diff --git a/.backportrc.json b/.backportrc.json index 20287f0bfc0e6..702113b5600e7 100644 --- a/.backportrc.json +++ b/.backportrc.json @@ -1,10 +1,10 @@ { "upstream" : "elastic/elasticsearch", - "targetBranchChoices" : [ "main", "8.x", "8.17", "8.16", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], + "targetBranchChoices" : [ "main", "8.x", "9.0", "8.18", "8.17", "8.16", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], "targetPRLabels" : [ "backport" ], "branchLabelMapping" : { - "^v9.0.0$" : "main", - "^v8.18.0$" : "8.x", + "^v9.1.0$" : "main", + "^v8.19.0$" : "8.x", "^v(\\d+).(\\d+).\\d+(?:-(?:alpha|beta|rc)\\d+)?$" : "$1.$2" } -} +} \ No newline at end of file diff --git a/.buildkite/pipelines/intake.template.yml b/.buildkite/pipelines/intake.template.yml index 9d7cf3c7e0083..75c7a339b8cea 100644 --- a/.buildkite/pipelines/intake.template.yml +++ b/.buildkite/pipelines/intake.template.yml @@ -63,6 +63,27 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: "{{matrix.BWC_VERSION}}" + - group: lucene-compat + steps: + - label: "{{matrix.LUCENE_VERSION}} / lucene-compat" + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints -Dtests.bwc.main.version=$$ES_VERSION -Dtests.bwc.refspec.main=$$ES_COMMIT luceneBwcTest + timeout_in_minutes: 300 + matrix: + setup: + LUCENE_VERSION: + - "10.0.0" + ES_VERSION: + - "9.0.0" + ES_COMMIT: + - "b2cc9d9b8f00ee621f93ddca07ea9c671aab1578" # update to match last commit before lucene bump + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk + env: + ES_VERSION: "{{matrix.ES_VERSION}}" + ES_COMMIT: "{{matrix.ES_COMMIT}}" - label: rest-compat command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkRestCompat timeout_in_minutes: 300 @@ -75,7 +96,7 @@ steps: - trigger: elasticsearch-dra-workflow label: Trigger DRA snapshot workflow async: true - branches: "main 8.* 7.17" + branches: "main 9.* 8.* 7.17" build: branch: "$BUILDKITE_BRANCH" commit: "$BUILDKITE_COMMIT" diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 54be022ce236b..5c34e242d62c3 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.16.4", "8.17.2", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.5", "8.17.3", "8.18.0", "8.19.0", "9.0.0", "9.1.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -64,6 +64,27 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: "{{matrix.BWC_VERSION}}" + - group: lucene-compat + steps: + - label: "{{matrix.LUCENE_VERSION}} / lucene-compat" + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints -Dtests.bwc.main.version=$$ES_VERSION -Dtests.bwc.refspec.main=$$ES_COMMIT luceneBwcTest + timeout_in_minutes: 300 + matrix: + setup: + LUCENE_VERSION: + - "10.0.0" + ES_VERSION: + - "9.0.0" + ES_COMMIT: + - "b2cc9d9b8f00ee621f93ddca07ea9c671aab1578" # update to match last commit before lucene bump + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk + env: + ES_VERSION: "{{matrix.ES_VERSION}}" + ES_COMMIT: "{{matrix.ES_COMMIT}}" - label: rest-compat command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkRestCompat timeout_in_minutes: 300 @@ -76,7 +97,7 @@ steps: - trigger: elasticsearch-dra-workflow label: Trigger DRA snapshot workflow async: true - branches: "main 8.* 7.17" + branches: "main 9.* 8.* 7.17" build: branch: "$BUILDKITE_BRANCH" commit: "$BUILDKITE_COMMIT" diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index aded97712d7a5..e2db7579724bd 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -287,8 +287,8 @@ steps: env: BWC_VERSION: 8.15.5 - - label: "{{matrix.image}} / 8.16.4 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.4 + - label: "{{matrix.image}} / 8.16.5 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.5 timeout_in_minutes: 300 matrix: setup: @@ -301,10 +301,10 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.16.4 + BWC_VERSION: 8.16.5 - - label: "{{matrix.image}} / 8.17.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.2 + - label: "{{matrix.image}} / 8.17.3 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.3 timeout_in_minutes: 300 matrix: setup: @@ -317,7 +317,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.17.2 + BWC_VERSION: 8.17.3 - label: "{{matrix.image}} / 8.18.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.18.0 @@ -335,6 +335,22 @@ steps: env: BWC_VERSION: 8.18.0 + - label: "{{matrix.image}} / 8.19.0 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.19.0 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 8.19.0 + - label: "{{matrix.image}} / 9.0.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v9.0.0 timeout_in_minutes: 300 @@ -351,6 +367,22 @@ steps: env: BWC_VERSION: 9.0.0 + - label: "{{matrix.image}} / 9.1.0 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v9.1.0 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 9.1.0 + - group: packaging-tests-windows steps: - label: "{{matrix.image}} / packaging-tests-windows" diff --git a/.buildkite/pipelines/periodic.template.yml b/.buildkite/pipelines/periodic.template.yml index 201c34058a409..cf14bcdb3c5de 100644 --- a/.buildkite/pipelines/periodic.template.yml +++ b/.buildkite/pipelines/periodic.template.yml @@ -78,14 +78,15 @@ steps: BWC_VERSION: "{{matrix.BWC_VERSION}}" - group: java-matrix steps: - - label: "{{matrix.ES_RUNTIME_JAVA}} / {{matrix.GRADLE_TASK}} / java-matrix" - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true $$GRADLE_TASK + - label: "{{matrix.ES_RUNTIME_JAVA}} / entitlements={{matrix.ENTITLEMENTS_ENABLED}} / {{matrix.GRADLE_TASK}} / java-matrix" + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.jvm.argline="-Des.entitlements.enabled=$$ENTITLEMENTS_ENABLED" $$GRADLE_TASK timeout_in_minutes: 300 matrix: setup: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 + - openjdk24 GRADLE_TASK: - checkPart1 - checkPart2 @@ -93,6 +94,9 @@ steps: - checkPart4 - checkPart5 - checkRestCompat + ENTITLEMENTS_ENABLED: + - "true" + - "false" agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -101,6 +105,7 @@ steps: env: ES_RUNTIME_JAVA: "{{matrix.ES_RUNTIME_JAVA}}" GRADLE_TASK: "{{matrix.GRADLE_TASK}}" + ENTITLEMENTS_ENABLED: "{{matrix.ENTITLEMENTS_ENABLED}}" - label: "{{matrix.ES_RUNTIME_JAVA}} / {{matrix.BWC_VERSION}} / java-matrix-bwc" command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v$$BWC_VERSION#bwcTest timeout_in_minutes: 300 @@ -201,6 +206,27 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk + - group: lucene-compat + steps: + - label: "{{matrix.LUCENE_VERSION}} / lucene-compat" + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints -Dtests.bwc.main.version=$$ES_VERSION -Dtests.bwc.refspec.main=$$ES_COMMIT luceneBwcTest + timeout_in_minutes: 300 + matrix: + setup: + LUCENE_VERSION: + - "10.0.0" + ES_VERSION: + - "9.0.0" + ES_COMMIT: + - "b2cc9d9b8f00ee621f93ddca07ea9c671aab1578" # update to match last commit before lucene bump + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk + env: + ES_VERSION: "{{matrix.ES_VERSION}}" + ES_COMMIT: "{{matrix.ES_COMMIT}}" - label: Upload Snyk Dependency Graph command: .ci/scripts/run-gradle.sh uploadSnykDependencyGraph -PsnykTargetReference=$BUILDKITE_BRANCH env: diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 64c4d59fd7fbe..90ab4d31f1c3c 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -306,8 +306,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.16.4 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.16.4#bwcTest + - label: 8.16.5 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.16.5#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -316,7 +316,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.16.4 + BWC_VERSION: 8.16.5 retry: automatic: - exit_status: "-1" @@ -325,8 +325,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.17.2 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.17.2#bwcTest + - label: 8.17.3 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.17.3#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -335,7 +335,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.17.2 + BWC_VERSION: 8.17.3 retry: automatic: - exit_status: "-1" @@ -363,6 +363,25 @@ steps: - signal_reason: agent_stop limit: 3 + - label: 8.19.0 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.19.0#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.19.0 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + - label: 9.0.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v9.0.0#bwcTest timeout_in_minutes: 300 @@ -382,6 +401,25 @@ steps: - signal_reason: agent_stop limit: 3 + - label: 9.1.0 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v9.1.0#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 9.1.0 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + - label: concurrent-search-tests command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.jvm.argline=-Des.concurrent_search=true -Des.concurrent_search=true functionalTests timeout_in_minutes: 420 @@ -448,7 +486,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.16.4", "8.17.2", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.5", "8.17.3", "8.18.0", "8.19.0", "9.0.0", "9.1.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -459,14 +497,15 @@ steps: BWC_VERSION: "{{matrix.BWC_VERSION}}" - group: java-matrix steps: - - label: "{{matrix.ES_RUNTIME_JAVA}} / {{matrix.GRADLE_TASK}} / java-matrix" - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true $$GRADLE_TASK + - label: "{{matrix.ES_RUNTIME_JAVA}} / entitlements={{matrix.ENTITLEMENTS_ENABLED}} / {{matrix.GRADLE_TASK}} / java-matrix" + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.jvm.argline="-Des.entitlements.enabled=$$ENTITLEMENTS_ENABLED" $$GRADLE_TASK timeout_in_minutes: 300 matrix: setup: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 + - openjdk24 GRADLE_TASK: - checkPart1 - checkPart2 @@ -474,6 +513,9 @@ steps: - checkPart4 - checkPart5 - checkRestCompat + ENTITLEMENTS_ENABLED: + - "true" + - "false" agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -482,6 +524,7 @@ steps: env: ES_RUNTIME_JAVA: "{{matrix.ES_RUNTIME_JAVA}}" GRADLE_TASK: "{{matrix.GRADLE_TASK}}" + ENTITLEMENTS_ENABLED: "{{matrix.ENTITLEMENTS_ENABLED}}" - label: "{{matrix.ES_RUNTIME_JAVA}} / {{matrix.BWC_VERSION}} / java-matrix-bwc" command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v$$BWC_VERSION#bwcTest timeout_in_minutes: 300 @@ -490,7 +533,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.16.4", "8.17.2", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.5", "8.17.3", "8.18.0", "8.19.0", "9.0.0", "9.1.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -582,6 +625,27 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk + - group: lucene-compat + steps: + - label: "{{matrix.LUCENE_VERSION}} / lucene-compat" + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints -Dtests.bwc.main.version=$$ES_VERSION -Dtests.bwc.refspec.main=$$ES_COMMIT luceneBwcTest + timeout_in_minutes: 300 + matrix: + setup: + LUCENE_VERSION: + - "10.0.0" + ES_VERSION: + - "9.0.0" + ES_COMMIT: + - "b2cc9d9b8f00ee621f93ddca07ea9c671aab1578" # update to match last commit before lucene bump + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk + env: + ES_VERSION: "{{matrix.ES_VERSION}}" + ES_COMMIT: "{{matrix.ES_COMMIT}}" - label: Upload Snyk Dependency Graph command: .ci/scripts/run-gradle.sh uploadSnykDependencyGraph -PsnykTargetReference=$BUILDKITE_BRANCH env: diff --git a/.buildkite/scripts/dra-update-staging.sh b/.buildkite/scripts/dra-update-staging.sh index 676361bf1cfcf..e168dbf733ea7 100755 --- a/.buildkite/scripts/dra-update-staging.sh +++ b/.buildkite/scripts/dra-update-staging.sh @@ -36,6 +36,9 @@ for BRANCH in "${BRANCHES[@]}"; do fi if [[ "$SHOULD_TRIGGER" == "true" ]]; then + if [[ "$BRANCH" == "9.0" ]]; then + export VERSION_QUALIFIER="beta1" + fi echo "Triggering DRA staging workflow for $BRANCH" cat << EOF | buildkite-agent pipeline upload steps: @@ -46,6 +49,7 @@ steps: branch: "$BRANCH" env: DRA_WORKFLOW: staging + VERSION_QUALIFIER: ${VERSION_QUALIFIER:-} EOF fi done diff --git a/.buildkite/scripts/dra-workflow.sh b/.buildkite/scripts/dra-workflow.sh index d6bc3063fab75..aa3e871b6dc18 100755 --- a/.buildkite/scripts/dra-workflow.sh +++ b/.buildkite/scripts/dra-workflow.sh @@ -6,7 +6,7 @@ WORKFLOW="${DRA_WORKFLOW:-snapshot}" BRANCH="${BUILDKITE_BRANCH:-}" # Don't publish main branch to staging -if [[ "$BRANCH" == *.x && "$WORKFLOW" == "staging" ]]; then +if [[ ("$BRANCH" == "main" || "$BRANCH" == *.x) && "$WORKFLOW" == "staging" ]]; then exit 0 fi diff --git a/.buildkite/scripts/dra-workflow.trigger.sh b/.buildkite/scripts/dra-workflow.trigger.sh index 43c4b42ecf006..71998b06bd3d2 100755 --- a/.buildkite/scripts/dra-workflow.trigger.sh +++ b/.buildkite/scripts/dra-workflow.trigger.sh @@ -7,8 +7,8 @@ echo "steps:" source .buildkite/scripts/branches.sh for BRANCH in "${BRANCHES[@]}"; do - if [[ "$BRANCH" == "main" ]]; then - export VERSION_QUALIFIER="alpha1" + if [[ "$BRANCH" == "9.0" ]]; then + export VERSION_QUALIFIER="beta1" fi INTAKE_PIPELINE_SLUG="elasticsearch-intake" diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 9f4b86ffc7ada..4d27c647bc40a 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -15,7 +15,9 @@ BWC_VERSION: - "8.13.4" - "8.14.3" - "8.15.5" - - "8.16.4" - - "8.17.2" + - "8.16.5" + - "8.17.3" - "8.18.0" + - "8.19.0" - "9.0.0" + - "9.1.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 38104e03edb5f..9920160e4df83 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,7 @@ BWC_VERSION: - - "8.16.4" - - "8.17.2" + - "8.16.5" + - "8.17.3" - "8.18.0" + - "8.19.0" - "9.0.0" + - "9.1.0" diff --git a/.github/workflows/comment-on-asciidoc-changes.yml b/.github/workflows/comment-on-asciidoc-changes.yml new file mode 100644 index 0000000000000..8e5f836b1489a --- /dev/null +++ b/.github/workflows/comment-on-asciidoc-changes.yml @@ -0,0 +1,21 @@ +--- +name: Comment on PR for .asciidoc changes + +on: + # We need to use pull_request_target to be able to comment on PRs from forks + pull_request_target: + types: + - synchronize + - opened + - reopened + branches: + - main + - master + - "9.0" + +jobs: + comment-on-asciidoc-change: + permissions: + contents: read + pull-requests: write + uses: elastic/docs-builder/.github/workflows/comment-on-asciidoc-changes.yml@main diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java index 652defa7b39cd..77c70bc3a10f4 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java @@ -155,7 +155,7 @@ private static Operator operator(DriverContext driverContext, String grouping, S if (grouping.equals("none")) { return new AggregationOperator( - List.of(supplier(op, dataType, filter, 0).aggregatorFactory(AggregatorMode.SINGLE).apply(driverContext)), + List.of(supplier(op, dataType, filter).aggregatorFactory(AggregatorMode.SINGLE, List.of(0)).apply(driverContext)), driverContext ); } @@ -182,33 +182,33 @@ private static Operator operator(DriverContext driverContext, String grouping, S default -> throw new IllegalArgumentException("unsupported grouping [" + grouping + "]"); }; return new HashAggregationOperator( - List.of(supplier(op, dataType, filter, groups.size()).groupingAggregatorFactory(AggregatorMode.SINGLE)), + List.of(supplier(op, dataType, filter).groupingAggregatorFactory(AggregatorMode.SINGLE, List.of(groups.size()))), () -> BlockHash.build(groups, driverContext.blockFactory(), 16 * 1024, false), driverContext ); } - private static AggregatorFunctionSupplier supplier(String op, String dataType, String filter, int dataChannel) { + private static AggregatorFunctionSupplier supplier(String op, String dataType, String filter) { return filtered(switch (op) { - case COUNT -> CountAggregatorFunction.supplier(List.of(dataChannel)); + case COUNT -> CountAggregatorFunction.supplier(); case COUNT_DISTINCT -> switch (dataType) { - case LONGS -> new CountDistinctLongAggregatorFunctionSupplier(List.of(dataChannel), 3000); - case DOUBLES -> new CountDistinctDoubleAggregatorFunctionSupplier(List.of(dataChannel), 3000); + case LONGS -> new CountDistinctLongAggregatorFunctionSupplier(3000); + case DOUBLES -> new CountDistinctDoubleAggregatorFunctionSupplier(3000); default -> throw new IllegalArgumentException("unsupported data type [" + dataType + "]"); }; case MAX -> switch (dataType) { - case LONGS -> new MaxLongAggregatorFunctionSupplier(List.of(dataChannel)); - case DOUBLES -> new MaxDoubleAggregatorFunctionSupplier(List.of(dataChannel)); + case LONGS -> new MaxLongAggregatorFunctionSupplier(); + case DOUBLES -> new MaxDoubleAggregatorFunctionSupplier(); default -> throw new IllegalArgumentException("unsupported data type [" + dataType + "]"); }; case MIN -> switch (dataType) { - case LONGS -> new MinLongAggregatorFunctionSupplier(List.of(dataChannel)); - case DOUBLES -> new MinDoubleAggregatorFunctionSupplier(List.of(dataChannel)); + case LONGS -> new MinLongAggregatorFunctionSupplier(); + case DOUBLES -> new MinDoubleAggregatorFunctionSupplier(); default -> throw new IllegalArgumentException("unsupported data type [" + dataType + "]"); }; case SUM -> switch (dataType) { - case LONGS -> new SumLongAggregatorFunctionSupplier(List.of(dataChannel)); - case DOUBLES -> new SumDoubleAggregatorFunctionSupplier(List.of(dataChannel)); + case LONGS -> new SumLongAggregatorFunctionSupplier(); + case DOUBLES -> new SumDoubleAggregatorFunctionSupplier(); default -> throw new IllegalArgumentException("unsupported data type [" + dataType + "]"); }; default -> throw new IllegalArgumentException("unsupported op [" + op + "]"); diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java index 9ed5e1accef59..fba3c752bb239 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java @@ -261,7 +261,8 @@ private static BlockLoader numericBlockLoader(WhereAndBaseName w, NumberFieldMap null, false, null, - null + null, + false ).blockLoader(null); } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java index 4085e74d35db6..e61171aeff027 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java @@ -83,7 +83,7 @@ public class ScriptScoreBenchmark { private final ScriptModule scriptModule = new ScriptModule(Settings.EMPTY, pluginsService.filterPlugins(ScriptPlugin.class).toList()); private final Map fieldTypes = Map.ofEntries( - Map.entry("n", new NumberFieldType("n", NumberType.LONG, false, false, true, true, null, Map.of(), null, false, null, null)) + Map.entry("n", new NumberFieldType("n", NumberType.LONG, false, false, true, true, null, Map.of(), null, false, null, null, false)) ); private final IndexFieldDataCache fieldDataCache = new IndexFieldDataCache.None(); private final CircuitBreakerService breakerService = new NoneCircuitBreakerService(); diff --git a/branches.json b/branches.json index 95fbdb1efd655..81d5a46991445 100644 --- a/branches.json +++ b/branches.json @@ -7,6 +7,12 @@ { "branch": "8.16" }, + { + "branch": "9.0" + }, + { + "branch": "8.18" + }, { "branch": "8.17" }, diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle index e931537fcd6e9..1774ac0b0a112 100644 --- a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle +++ b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle @@ -14,6 +14,7 @@ include ":distribution:bwc:bugfix2" include ":distribution:bwc:minor" include ":distribution:bwc:major" include ":distribution:bwc:staged" +include ":distribution:bwc:staged2" include ":distribution:bwc:maintenance" include ":distribution:archives:darwin-tar" include ":distribution:archives:oss-darwin-tar" diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle index 797dc8bd0641b..8702f5a9bf0e9 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle @@ -32,7 +32,9 @@ develocity { // Automatically publish scans from Elasticsearch CI if (onCI) { publishing.onlyIf { true } - server = 'https://gradle-enterprise.elastic.co' + if(server.isPresent() == false) { + server = 'https://gradle-enterprise.elastic.co' + } } else if( server.isPresent() == false) { publishing.onlyIf { false } } diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index 90a4f74b5e9f4..fd973c3b0502e 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -153,6 +153,10 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { doLast { enablePreview('.idea/modules/libs/native/elasticsearch.libs.native.main.iml', 'JDK_21_PREVIEW') enablePreview('.idea/modules/libs/native/elasticsearch.libs.native.test.iml', 'JDK_21_PREVIEW') + enablePreview('.idea/modules/libs/entitlement/elasticsearch.libs.entitlement.main.iml', 'JDK_21_PREVIEW') + enablePreview('.idea/modules/libs/entitlement/elasticsearch.libs.entitlement.test.iml', 'JDK_21_PREVIEW') + enablePreview('.idea/modules/libs/entitlement/bridge/elasticsearch.libs.entitlement.bridge.main.iml', 'JDK_21_PREVIEW') + enablePreview('.idea/modules/libs/entitlement/bridge/elasticsearch.libs.entitlement.bridge.test.iml', 'JDK_21_PREVIEW') } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java index 9f7645349e852..5c1caa08bfaa1 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java @@ -166,6 +166,7 @@ private Map computeUnreleased(List devel .toList(); boolean existingBugfix = false; + boolean existingStaged = false; for (int i = 0; i < featureFreezeBranches.size(); i++) { String branch = featureFreezeBranches.get(i); Version version = versions.stream() @@ -193,7 +194,9 @@ private Map computeUnreleased(List devel if (i == featureFreezeBranches.size() - 1) { result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:maintenance")); } else if (version.getRevision() == 0) { // This is the next staged minor - result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:staged")); + String project = existingStaged ? "staged2" : "staged"; + result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:" + project)); + existingStaged = true; } else { // This is a bugfix String project = existingBugfix ? "bugfix2" : "bugfix"; result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:" + project)); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index 24ba0740cfe26..d461a1bfae617 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -22,7 +22,7 @@ public enum DockerBase { // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off - WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:dd66beec64a7f9b19c6c35a1195153b2b630a55e16ec71949ed5187c5947eea1", + WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:ecd940be9f342ee6173397c48f3df5bb410e95000f8726fd01759b6c39b0beda", "-wolfi", "apk" ), diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java index ee0eb3f6eb2bf..ce779343dfea9 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java @@ -58,7 +58,6 @@ public class ElasticsearchJavaBasePlugin implements Plugin { @Override public void apply(Project project) { - project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); // make sure the global build info plugin is applied to the root project project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class); buildParams = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java index c38ea5b4f0850..b6c36285ca3a7 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java @@ -86,6 +86,24 @@ public void apply(Project project) { fileSystemOperations ); }); + + // Also set up the "main" project which is just used for arbitrary overrides. See InternalDistributionDownloadPlugin. + if (System.getProperty("tests.bwc.main.version") != null) { + configureBwcProject( + project.project(":distribution:bwc:main"), + buildParams, + new BwcVersions.UnreleasedVersionInfo( + Version.fromString(System.getProperty("tests.bwc.main.version")), + "main", + ":distribution:bwc:main" + ), + providerFactory, + objectFactory, + toolChainService, + isCi, + fileSystemOperations + ); + } } private static void configureBwcProject( diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java index ba587aa4bd979..4c0c224aff3f3 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java @@ -102,6 +102,29 @@ private void registerInternalDistributionResolutions(List { + String versionProperty = System.getProperty("tests.bwc.main.version"); + // We use this phony version as a placeholder for the real version + if (distribution.getVersion().equals("0.0.0")) { + BwcVersions.UnreleasedVersionInfo unreleasedVersionInfo = new BwcVersions.UnreleasedVersionInfo( + Version.fromString(versionProperty), + "main", + ":distribution:bwc:main" + ); + String projectConfig = getProjectConfig(distribution, unreleasedVersionInfo); + return new ProjectBasedDistributionDependency( + (config) -> projectDependency(project.getDependencies(), unreleasedVersionInfo.gradleProjectPath(), projectConfig) + ); + } + return null; + })); } private boolean isCurrentVersion(ElasticsearchDistribution distribution) { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index b387f019ad386..b835bae815d07 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -163,6 +163,7 @@ private void addJar(Project project, SourceSet sourceSet, int javaVersion) { project.getConfigurations().register("java" + javaVersion); TaskProvider jarTask = project.getTasks().register("java" + javaVersion + "Jar", Jar.class, task -> { task.from(sourceSet.getOutput()); + task.getArchiveClassifier().set("java" + javaVersion); }); project.getArtifacts().add("java" + javaVersion, jarTask); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java index 59ba9bae0a57d..0c86a2d030741 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java @@ -60,6 +60,7 @@ import static org.gradle.api.JavaVersion.VERSION_21; import static org.gradle.api.JavaVersion.VERSION_22; import static org.gradle.api.JavaVersion.VERSION_23; +import static org.gradle.api.JavaVersion.VERSION_24; @CacheableTask public abstract class ThirdPartyAuditTask extends DefaultTask { @@ -341,8 +342,12 @@ private String runForbiddenAPIsCli() throws IOException { spec.setExecutable(javaHome.get() + "/bin/java"); } spec.classpath(getForbiddenAPIsClasspath(), getThirdPartyClasspath()); - // Enable explicitly for each release as appropriate. Just JDK 20/21/22/23 for now, and just the vector module. - if (isJavaVersion(VERSION_20) || isJavaVersion(VERSION_21) || isJavaVersion(VERSION_22) || isJavaVersion(VERSION_23)) { + // Enable explicitly for each release as appropriate. Just JDK 20/21/22/23/24 for now, and just the vector module. + if (isJavaVersion(VERSION_20) + || isJavaVersion(VERSION_21) + || isJavaVersion(VERSION_22) + || isJavaVersion(VERSION_23) + || isJavaVersion(VERSION_24)) { spec.jvmArgs("--add-modules", "jdk.incubator.vector"); } spec.jvmArgs("-Xmx1g"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTask.java index a6ead34b11079..ebd316d7f042a 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTask.java @@ -15,6 +15,7 @@ import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration; import com.github.javaparser.ast.body.FieldDeclaration; import com.github.javaparser.ast.body.VariableDeclarator; +import com.github.javaparser.ast.expr.Expression; import com.github.javaparser.ast.expr.NameExpr; import com.github.javaparser.printer.lexicalpreservation.LexicalPreservingPrinter; import com.google.common.annotations.VisibleForTesting; @@ -33,6 +34,7 @@ import java.util.Objects; import java.util.Optional; import java.util.TreeMap; +import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -51,6 +53,8 @@ public class UpdateVersionsTask extends AbstractVersionsTask { private boolean setCurrent; @Nullable private Version removeVersion; + @Nullable + private String addTransportVersion; @Inject public UpdateVersionsTask(BuildLayout layout) { @@ -62,6 +66,11 @@ public void addVersion(String version) { this.addVersion = Version.fromString(version); } + @Option(option = "add-transport-version", description = "Specifies transport version to add") + public void addTransportVersion(String transportVersion) { + this.addTransportVersion = transportVersion; + } + @Option(option = "set-current", description = "Set the 'current' constant to the new version") public void setCurrent(boolean setCurrent) { this.setCurrent = setCurrent; @@ -87,15 +96,18 @@ static Optional parseVersionField(CharSequence field) { @TaskAction public void executeTask() throws IOException { - if (addVersion == null && removeVersion == null) { + if (addVersion == null && removeVersion == null && addTransportVersion == null) { throw new IllegalArgumentException("No versions to add or remove specified"); } if (setCurrent && addVersion == null) { throw new IllegalArgumentException("No new version added to set as the current version"); } - if (Objects.equals(addVersion, removeVersion)) { + if (addVersion != null && removeVersion != null && Objects.equals(addVersion, removeVersion)) { throw new IllegalArgumentException("Same version specified to add and remove"); } + if (addTransportVersion != null && addTransportVersion.split(":").length != 2) { + throw new IllegalArgumentException("Transport version specified must be in the format ':'"); + } Path versionJava = rootDir.resolve(VERSION_FILE_PATH); CompilationUnit file = LexicalPreservingPrinter.setup(StaticJavaParser.parse(versionJava)); @@ -115,6 +127,18 @@ public void executeTask() throws IOException { modifiedFile = removed; } } + if (addTransportVersion != null) { + var constant = addTransportVersion.split(":")[0]; + var versionId = Integer.parseInt(addTransportVersion.split(":")[1]); + LOGGER.lifecycle("Adding transport version constant [{}] with id [{}]", constant, versionId); + + var transportVersionsFile = rootDir.resolve(TRANSPORT_VERSIONS_FILE_PATH); + var transportVersions = LexicalPreservingPrinter.setup(StaticJavaParser.parse(transportVersionsFile)); + var modified = addTransportVersionConstant(transportVersions, constant, versionId); + if (modified.isPresent()) { + writeOutNewContents(transportVersionsFile, modified.get()); + } + } if (modifiedFile.isPresent()) { writeOutNewContents(versionJava, modifiedFile.get()); @@ -161,6 +185,51 @@ static Optional addVersionConstant(CompilationUnit versionJava, return Optional.of(versionJava); } + @VisibleForTesting + static Optional addTransportVersionConstant(CompilationUnit transportVersions, String constant, int versionId) { + ClassOrInterfaceDeclaration transportVersionsClass = transportVersions.getClassByName("TransportVersions").get(); + if (transportVersionsClass.getFieldByName(constant).isPresent()) { + LOGGER.lifecycle("New transport version constant [{}] already present, skipping", constant); + return Optional.empty(); + } + + TreeMap versions = transportVersionsClass.getFields() + .stream() + .filter(f -> f.getElementType().asString().equals("TransportVersion")) + .filter( + f -> f.getVariables().stream().limit(1).allMatch(v -> v.getInitializer().filter(Expression::isMethodCallExpr).isPresent()) + ) + .filter(f -> f.getVariable(0).getInitializer().get().asMethodCallExpr().getNameAsString().endsWith("def")) + .collect( + Collectors.toMap( + f -> f.getVariable(0) + .getInitializer() + .get() + .asMethodCallExpr() + .getArgument(0) + .asIntegerLiteralExpr() + .asNumber() + .intValue(), + Function.identity(), + (f1, f2) -> { + throw new IllegalStateException("Duplicate version constant " + f1); + }, + TreeMap::new + ) + ); + + // find the version this should be inserted after + Map.Entry previousVersion = versions.lowerEntry(versionId); + if (previousVersion == null) { + throw new IllegalStateException(String.format("Could not find previous version to [%s]", versionId)); + } + + FieldDeclaration newTransportVersion = createNewTransportVersionConstant(previousVersion.getValue(), constant, versionId); + transportVersionsClass.getMembers().addAfter(newTransportVersion, previousVersion.getValue()); + + return Optional.of(transportVersions); + } + private static FieldDeclaration createNewVersionConstant(FieldDeclaration lastVersion, String newName, String newExpr) { return new FieldDeclaration( new NodeList<>(lastVersion.getModifiers()), @@ -172,6 +241,29 @@ private static FieldDeclaration createNewVersionConstant(FieldDeclaration lastVe ); } + private static FieldDeclaration createNewTransportVersionConstant(FieldDeclaration lastVersion, String newName, int newId) { + return new FieldDeclaration( + new NodeList<>(lastVersion.getModifiers()), + new VariableDeclarator( + lastVersion.getCommonType(), + newName, + StaticJavaParser.parseExpression(String.format("def(%s)", formatTransportVersionId(newId))) + ) + ); + } + + private static String formatTransportVersionId(int id) { + String idString = Integer.toString(id); + + return new StringBuilder(idString.substring(idString.length() - 2, idString.length())).insert(0, "_") + .insert(0, idString.substring(idString.length() - 3, idString.length() - 2)) + .insert(0, "_") + .insert(0, idString.substring(idString.length() - 6, idString.length() - 3)) + .insert(0, "_") + .insert(0, idString.substring(0, idString.length() - 6)) + .toString(); + } + @VisibleForTesting static Optional removeVersionConstant(CompilationUnit versionJava, Version version) { String removeFieldName = toVersionField(version); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java index 559c0f60abc08..a9f7267cb501c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java @@ -223,7 +223,7 @@ public Void call(Object... args) { } Version version = (Version) args[0]; - boolean isReleased = bwcVersions.unreleasedInfo(version) == null; + boolean isReleased = bwcVersions.unreleasedInfo(version) == null && version.toString().equals("0.0.0") == false; String versionString = version.toString(); ElasticsearchDistribution bwcDistro = createDistribution(project, "bwc_" + versionString, versionString); diff --git a/build-tools-internal/src/main/resources/changelog-schema.json b/build-tools-internal/src/main/resources/changelog-schema.json index 9692af7adc5e6..7229571fc8bf4 100644 --- a/build-tools-internal/src/main/resources/changelog-schema.json +++ b/build-tools-internal/src/main/resources/changelog-schema.json @@ -291,6 +291,7 @@ "JVM option", "Java API", "Logging", + "Logs", "Mapping", "Packaging", "Painless", diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy index 4d033564a42b4..a662a76db4da7 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy @@ -77,6 +77,39 @@ class BwcVersionsSpec extends Specification { bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.18.0'), v('9.0.0')] } + def "current version is next major with two staged minors"() { + given: + addVersion('7.17.10', '8.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.16.2', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('8.18.0', '9.10.0') + addVersion('8.19.0', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.1.0', '10.1.0') + + when: + def bwc = new BwcVersions(versionLines, v('9.1.0'), ['main', '9.0', '8.x', '8.18', '8.17', '8.16', '7.17']) + def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } + + then: + unreleased == [ + (v('8.16.2')): new UnreleasedVersionInfo(v('8.16.2'), '8.16', ':distribution:bwc:bugfix2'), + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.18', ':distribution:bwc:staged2'), + (v('8.19.0')): new UnreleasedVersionInfo(v('8.19.0'), '8.x', ':distribution:bwc:minor'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.0', ':distribution:bwc:staged'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution'), + ] + bwc.wireCompatible == [v('8.19.0'), v('9.0.0'), v('9.1.0')] + bwc.indexCompatible == [v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.16.2'), v('8.17.0'), v('8.17.1'), v('8.18.0'), v('8.19.0'), v('9.0.0'), v('9.1.0')] + } + def "current version is first new minor in major series"() { given: addVersion('7.17.10', '8.9.0') diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTaskTests.java index 9e4f1cd3a913d..d5060a2e62365 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTaskTests.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTaskTests.java @@ -239,6 +239,96 @@ public void updateVersionFile_removesCorrectly() throws Exception { assertThat(field.isPresent(), is(false)); } + @Test + public void addTransportVersion() throws Exception { + var transportVersions = """ + public class TransportVersions { + public static final TransportVersion V_1_0_0 = def(1_000_0_00); + public static final TransportVersion V_1_1_0 = def(1_001_0_00); + public static final TransportVersion V_1_2_0 = def(1_002_0_00); + public static final TransportVersion V_1_2_1 = def(1_002_0_01); + public static final TransportVersion V_1_2_2 = def(1_002_0_02); + public static final TransportVersion SOME_OTHER_VERSION = def(1_003_0_00); + public static final TransportVersion YET_ANOTHER_VERSION = def(1_004_0_00); + public static final TransportVersion MINIMUM_COMPATIBLE = V_1_0_0; + } + """; + + var expectedTransportVersions = """ + public class TransportVersions { + + public static final TransportVersion V_1_0_0 = def(1_000_0_00); + + public static final TransportVersion V_1_1_0 = def(1_001_0_00); + + public static final TransportVersion V_1_2_0 = def(1_002_0_00); + + public static final TransportVersion V_1_2_1 = def(1_002_0_01); + + public static final TransportVersion V_1_2_2 = def(1_002_0_02); + + public static final TransportVersion SOME_OTHER_VERSION = def(1_003_0_00); + + public static final TransportVersion YET_ANOTHER_VERSION = def(1_004_0_00); + + public static final TransportVersion NEXT_TRANSPORT_VERSION = def(1_005_0_00); + + public static final TransportVersion MINIMUM_COMPATIBLE = V_1_0_0; + } + """; + + var unit = StaticJavaParser.parse(transportVersions); + var result = UpdateVersionsTask.addTransportVersionConstant(unit, "NEXT_TRANSPORT_VERSION", 1_005_0_00); + + assertThat(result.isPresent(), is(true)); + assertThat(result.get(), hasToString(expectedTransportVersions)); + } + + @Test + public void addTransportVersionPatch() throws Exception { + var transportVersions = """ + public class TransportVersions { + public static final TransportVersion V_1_0_0 = def(1_000_0_00); + public static final TransportVersion V_1_1_0 = def(1_001_0_00); + public static final TransportVersion V_1_2_0 = def(1_002_0_00); + public static final TransportVersion V_1_2_1 = def(1_002_0_01); + public static final TransportVersion V_1_2_2 = def(1_002_0_02); + public static final TransportVersion SOME_OTHER_VERSION = def(1_003_0_00); + public static final TransportVersion YET_ANOTHER_VERSION = def(1_004_0_00); + public static final TransportVersion MINIMUM_COMPATIBLE = V_1_0_0; + } + """; + + var expectedTransportVersions = """ + public class TransportVersions { + + public static final TransportVersion V_1_0_0 = def(1_000_0_00); + + public static final TransportVersion V_1_1_0 = def(1_001_0_00); + + public static final TransportVersion V_1_2_0 = def(1_002_0_00); + + public static final TransportVersion V_1_2_1 = def(1_002_0_01); + + public static final TransportVersion V_1_2_2 = def(1_002_0_02); + + public static final TransportVersion SOME_OTHER_VERSION = def(1_003_0_00); + + public static final TransportVersion PATCH_TRANSPORT_VERSION = def(1_003_0_01); + + public static final TransportVersion YET_ANOTHER_VERSION = def(1_004_0_00); + + public static final TransportVersion MINIMUM_COMPATIBLE = V_1_0_0; + } + """; + + var unit = StaticJavaParser.parse(transportVersions); + var result = UpdateVersionsTask.addTransportVersionConstant(unit, "PATCH_TRANSPORT_VERSION", 1_003_0_01); + + assertThat(result.isPresent(), is(true)); + assertThat(result.get(), hasToString(expectedTransportVersions)); + } + private static Optional findFirstField(Node node, String name) { return node.findFirst(FieldDeclaration.class, f -> f.getVariable(0).getName().getIdentifier().equals(name)); } diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 57882fa842b41..a0c663b19a0c6 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ -elasticsearch = 9.0.0 -lucene = 10.0.0 +elasticsearch = 9.1.0 +lucene = 10.1.0 bundled_jdk_vendor = openjdk bundled_jdk = 23+37@3c5b90190c68498b986a97f276efd28a diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java index 4c7290457e7df..35748459ecac3 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java @@ -25,7 +25,9 @@ import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.List; +import java.util.Objects; import javax.inject.Inject; @@ -141,8 +143,9 @@ private void registerDistributionDependencies(Project project, ElasticsearchDist private DistributionDependency resolveDependencyNotation(Project project, ElasticsearchDistribution distro) { return distributionsResolutionStrategies.stream() + .sorted(Comparator.comparing(DistributionResolution::getPriority).reversed()) .map(r -> r.getResolver().resolve(project, distro)) - .filter(d -> d != null) + .filter(Objects::nonNull) .findFirst() .orElseGet(() -> DistributionDependency.of(dependencyNotation(distro))); } diff --git a/catalog-info.yaml b/catalog-info.yaml index b688e043eb2eb..b001eee61e699 100644 --- a/catalog-info.yaml +++ b/catalog-info.yaml @@ -123,7 +123,7 @@ spec: pipeline_file: .buildkite/pipelines/lucene-snapshot/build-snapshot.yml env: ELASTIC_SLACK_NOTIFICATIONS_ENABLED: "true" - SLACK_NOTIFICATIONS_CHANNEL: "#lucene" + SLACK_NOTIFICATIONS_CHANNEL: "#lucene-ci" SLACK_NOTIFICATIONS_ALL_BRANCHES: "true" branch_configuration: lucene_snapshot default_branch: lucene_snapshot @@ -167,7 +167,7 @@ spec: pipeline_file: .buildkite/pipelines/lucene-snapshot/update-branch.yml env: ELASTIC_SLACK_NOTIFICATIONS_ENABLED: "true" - SLACK_NOTIFICATIONS_CHANNEL: "#lucene" + SLACK_NOTIFICATIONS_CHANNEL: "#lucene-ci" SLACK_NOTIFICATIONS_ALL_BRANCHES: "true" default_branch: lucene_snapshot teams: @@ -210,7 +210,7 @@ spec: pipeline_file: .buildkite/pipelines/lucene-snapshot/run-tests.yml env: ELASTIC_SLACK_NOTIFICATIONS_ENABLED: "true" - SLACK_NOTIFICATIONS_CHANNEL: "#lucene" + SLACK_NOTIFICATIONS_CHANNEL: "#lucene-ci" SLACK_NOTIFICATIONS_ALL_BRANCHES: "true" branch_configuration: lucene_snapshot default_branch: lucene_snapshot diff --git a/client/test/build.gradle b/client/test/build.gradle index e39b7587b69d5..3b7f62fd8ef58 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -28,9 +28,9 @@ dependencies { api "org.hamcrest:hamcrest:${versions.hamcrest}" // mockito - api 'org.mockito:mockito-core:5.11.0' - api 'org.mockito:mockito-subclass:5.11.0' - api 'net.bytebuddy:byte-buddy:1.14.12' + api 'org.mockito:mockito-core:5.15.2' + api 'org.mockito:mockito-subclass:5.15.2' + api 'net.bytebuddy:byte-buddy:1.15.11' api 'org.objenesis:objenesis:3.3' } diff --git a/server/src/main/java/org/elasticsearch/common/util/Countable.java b/distribution/bwc/main/build.gradle similarity index 80% rename from server/src/main/java/org/elasticsearch/common/util/Countable.java rename to distribution/bwc/main/build.gradle index 27ac9cd0c938c..305e32a58a6ad 100644 --- a/server/src/main/java/org/elasticsearch/common/util/Countable.java +++ b/distribution/bwc/main/build.gradle @@ -7,8 +7,4 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.common.util; - -public interface Countable { - int size(); -} +// This project is used only for overriding bwc distributions. See InternalDistributionDownloadPlugin for details. diff --git a/distribution/bwc/staged2/build.gradle b/distribution/bwc/staged2/build.gradle new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommand.java index cc662bd747575..c6421d76392cf 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommand.java @@ -74,7 +74,7 @@ protected void executeCommand(Terminal terminal, OptionSet options, Environment keyStore.setFile(setting, Files.readAllBytes(file)); } - keyStore.save(env.configFile(), getKeyStorePassword().getChars()); + keyStore.save(env.configDir(), getKeyStorePassword().getChars()); } @SuppressForbidden(reason = "file arg for cli") diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommand.java index c01c18418858a..a7ea6dcf7ce74 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommand.java @@ -100,7 +100,7 @@ protected void executeCommand(Terminal terminal, OptionSet options, Environment } } - keyStore.save(env.configFile(), getKeyStorePassword().getChars()); + keyStore.save(env.configDir(), getKeyStorePassword().getChars()); } } diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/BaseKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/BaseKeyStoreCommand.java index 0380018d36cff..a8a75ac23c900 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/BaseKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/BaseKeyStoreCommand.java @@ -39,14 +39,14 @@ public BaseKeyStoreCommand(String description, boolean keyStoreMustExist) { @Override public final void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { try { - final Path configFile = env.configFile(); + final Path configFile = env.configDir(); keyStore = KeyStoreWrapper.load(configFile); if (keyStore == null) { if (keyStoreMustExist) { throw new UserException( ExitCodes.DATA_ERROR, "Elasticsearch keystore not found at [" - + KeyStoreWrapper.keystorePath(env.configFile()) + + KeyStoreWrapper.keystorePath(env.configDir()) + "]. Use 'create' command to create one." ); } else if (options.has(forceOption) == false) { diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/ChangeKeyStorePasswordCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/ChangeKeyStorePasswordCommand.java index 4dca3d538263a..9e4f70eee559d 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/ChangeKeyStorePasswordCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/ChangeKeyStorePasswordCommand.java @@ -31,7 +31,7 @@ class ChangeKeyStorePasswordCommand extends BaseKeyStoreCommand { protected void executeCommand(Terminal terminal, OptionSet options, Environment env) throws Exception { try (SecureString newPassword = readPassword(terminal, true)) { final KeyStoreWrapper keyStore = getKeyStore(); - keyStore.save(env.configFile(), newPassword.getChars()); + keyStore.save(env.configDir(), newPassword.getChars()); terminal.println("Elasticsearch keystore password changed successfully."); } catch (SecurityException e) { throw new UserException(ExitCodes.DATA_ERROR, e.getMessage()); diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommand.java index a922c92f5f44b..ef561b08d9a50 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommand.java @@ -40,7 +40,7 @@ class CreateKeyStoreCommand extends KeyStoreAwareCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { try (SecureString password = options.has(passwordOption) ? readPassword(terminal, true) : new SecureString(new char[0])) { - Path keystoreFile = KeyStoreWrapper.keystorePath(env.configFile()); + Path keystoreFile = KeyStoreWrapper.keystorePath(env.configDir()); if (Files.exists(keystoreFile)) { if (terminal.promptYesNo("An elasticsearch keystore already exists. Overwrite?", false) == false) { terminal.println("Exiting without creating keystore."); @@ -48,8 +48,8 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce } } KeyStoreWrapper keystore = KeyStoreWrapper.create(); - keystore.save(env.configFile(), password.getChars()); - terminal.println("Created elasticsearch keystore in " + KeyStoreWrapper.keystorePath(env.configFile())); + keystore.save(env.configDir(), password.getChars()); + terminal.println("Created elasticsearch keystore in " + KeyStoreWrapper.keystorePath(env.configDir())); } catch (SecurityException e) { throw new UserException(ExitCodes.IO_ERROR, "Error creating the elasticsearch keystore."); } diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/HasPasswordKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/HasPasswordKeyStoreCommand.java index 0428d5dcf7df8..f0eaca1648b96 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/HasPasswordKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/HasPasswordKeyStoreCommand.java @@ -32,7 +32,7 @@ public class HasPasswordKeyStoreCommand extends KeyStoreAwareCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { - final Path configFile = env.configFile(); + final Path configFile = env.configDir(); final KeyStoreWrapper keyStore = KeyStoreWrapper.load(configFile); // We handle error printing here so we can respect the "--silent" flag diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/RemoveSettingKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/RemoveSettingKeyStoreCommand.java index 8a973c6d67f7d..fb1a2ad1df7f3 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/RemoveSettingKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/RemoveSettingKeyStoreCommand.java @@ -45,6 +45,6 @@ protected void executeCommand(Terminal terminal, OptionSet options, Environment } keyStore.remove(setting); } - keyStore.save(env.configFile(), getKeyStorePassword().getChars()); + keyStore.save(env.configDir(), getKeyStorePassword().getChars()); } } diff --git a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommand.java b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommand.java index b7061d6153b80..bbbfbf81f7ed9 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommand.java +++ b/distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommand.java @@ -26,7 +26,7 @@ public class UpgradeKeyStoreCommand extends BaseKeyStoreCommand { @Override protected void executeCommand(final Terminal terminal, final OptionSet options, final Environment env) throws Exception { - KeyStoreWrapper.upgrade(getKeyStore(), env.configFile(), getKeyStorePassword().getChars()); + KeyStoreWrapper.upgrade(getKeyStore(), env.configDir(), getKeyStorePassword().getChars()); } } diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommandTests.java index edd70e4e52f55..56706dd44f0c3 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommandTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddFileKeyStoreCommandTests.java @@ -46,14 +46,14 @@ private Path createRandomFile() throws IOException { for (int i = 0; i < length; ++i) { bytes[i] = randomByte(); } - Path file = env.configFile().resolve(randomAlphaOfLength(16)); + Path file = env.configDir().resolve(randomAlphaOfLength(16)); Files.write(file, bytes); return file; } private void addFile(KeyStoreWrapper keystore, String setting, Path file, String password) throws Exception { keystore.setFile(setting, Files.readAllBytes(file)); - keystore.save(env.configFile(), password.toCharArray()); + keystore.save(env.configDir(), password.toCharArray()); } public void testMissingCreateWithEmptyPasswordWhenPrompted() throws Exception { @@ -77,7 +77,7 @@ public void testMissingNoCreate() throws Exception { terminal.addSecretInput(randomFrom("", "keystorepassword")); terminal.addTextInput("n"); // explicit no execute("foo"); - assertNull(KeyStoreWrapper.load(env.configFile())); + assertNull(KeyStoreWrapper.load(env.configDir())); } public void testOverwritePromptDefault() throws Exception { diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommandTests.java index 3de18e094104f..412624be1d506 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommandTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/AddStringKeyStoreCommandTests.java @@ -83,7 +83,7 @@ public void testMissingPromptCreateWithoutPasswordWithoutPromptIfForced() throws public void testMissingNoCreate() throws Exception { terminal.addTextInput("n"); // explicit no execute("foo"); - assertNull(KeyStoreWrapper.load(env.configFile())); + assertNull(KeyStoreWrapper.load(env.configDir())); } public void testOverwritePromptDefault() throws Exception { @@ -143,7 +143,7 @@ public void testForceNonExistent() throws Exception { public void testPromptForValue() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); terminal.addSecretInput("secret value"); execute("foo"); @@ -152,7 +152,7 @@ public void testPromptForValue() throws Exception { public void testPromptForMultipleValues() throws Exception { final String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); terminal.addSecretInput("bar1"); terminal.addSecretInput("bar2"); @@ -165,7 +165,7 @@ public void testPromptForMultipleValues() throws Exception { public void testStdinShort() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput("secret value 1"); execute("-x", "foo"); @@ -174,7 +174,7 @@ public void testStdinShort() throws Exception { public void testStdinLong() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput("secret value 2"); execute("--stdin", "foo"); @@ -183,7 +183,7 @@ public void testStdinLong() throws Exception { public void testStdinNoInput() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput(""); execute("-x", "foo"); @@ -192,7 +192,7 @@ public void testStdinNoInput() throws Exception { public void testStdinInputWithLineBreaks() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput("Typedthisandhitenter\n"); execute("-x", "foo"); @@ -201,7 +201,7 @@ public void testStdinInputWithLineBreaks() throws Exception { public void testStdinInputWithCarriageReturn() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput("Typedthisandhitenter\r"); execute("-x", "foo"); @@ -210,7 +210,7 @@ public void testStdinInputWithCarriageReturn() throws Exception { public void testStdinWithMultipleValues() throws Exception { final String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); setInput("bar1\nbar2\nbar3"); execute(randomFrom("-x", "--stdin"), "foo1", "foo2", "foo3"); @@ -221,7 +221,7 @@ public void testStdinWithMultipleValues() throws Exception { public void testAddUtf8String() throws Exception { String password = "keystorepassword"; - KeyStoreWrapper.create().save(env.configFile(), password.toCharArray()); + KeyStoreWrapper.create().save(env.configDir(), password.toCharArray()); terminal.addSecretInput(password); final int stringSize = randomIntBetween(8, 16); try (CharArrayWriter secretChars = new CharArrayWriter(stringSize)) { diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/BootstrapTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/BootstrapTests.java index 0fc76943f9d05..d93bc2466ed7b 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/BootstrapTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/BootstrapTests.java @@ -42,7 +42,7 @@ public void setupEnv() throws IOException { public void testLoadSecureSettings() throws Exception { final char[] password = KeyStoreWrapperTests.getPossibleKeystorePassword(); - final Path configPath = env.configFile(); + final Path configPath = env.configDir(); final SecureString seed; try (KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create()) { seed = KeyStoreWrapper.SEED_SETTING.get(Settings.builder().setSecureSettings(keyStoreWrapper).build()); diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommandTests.java index 72a83a48b6344..74b8c634939fd 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommandTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommandTests.java @@ -48,7 +48,7 @@ public void testNotMatchingPasswords() throws Exception { public void testDefaultNotPromptForPassword() throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); execute(); - Path configDir = env.configFile(); + Path configDir = env.configDir(); assertNotNull(KeyStoreWrapper.load(configDir)); } @@ -63,7 +63,7 @@ public void testPosix() throws Exception { } else { execute(); } - Path configDir = env.configFile(); + Path configDir = env.configDir(); assertNotNull(KeyStoreWrapper.load(configDir)); } @@ -79,13 +79,13 @@ public void testNotPosix() throws Exception { } else { execute(); } - Path configDir = env.configFile(); + Path configDir = env.configDir(); assertNotNull(KeyStoreWrapper.load(configDir)); } public void testOverwrite() throws Exception { String password = getPossibleKeystorePassword(); - Path keystoreFile = KeyStoreWrapper.keystorePath(env.configFile()); + Path keystoreFile = KeyStoreWrapper.keystorePath(env.configDir()); byte[] content = "not a keystore".getBytes(StandardCharsets.UTF_8); Files.write(keystoreFile, content); @@ -110,6 +110,6 @@ public void testOverwrite() throws Exception { } else { execute(); } - assertNotNull(KeyStoreWrapper.load(env.configFile())); + assertNotNull(KeyStoreWrapper.load(env.configDir())); } } diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreCommandTestCase.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreCommandTestCase.java index 80edce4a20796..fcbe7b2226296 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreCommandTestCase.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreCommandTestCase.java @@ -77,11 +77,11 @@ KeyStoreWrapper createKeystore(String password, String... settings) throws Excep } void saveKeystore(KeyStoreWrapper keystore, String password) throws Exception { - keystore.save(env.configFile(), password.toCharArray()); + keystore.save(env.configDir(), password.toCharArray()); } KeyStoreWrapper loadKeystore(String password) throws Exception { - KeyStoreWrapper keystore = KeyStoreWrapper.load(env.configFile()); + KeyStoreWrapper keystore = KeyStoreWrapper.load(env.configDir()); keystore.decrypt(password.toCharArray()); return keystore; } diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java index 5ab27bac3998a..ee3a53d5c3df7 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java @@ -84,8 +84,8 @@ public void testFileSettingExhaustiveBytes() throws Exception { bytes[i] = (byte) i; } keystore.setFile("foo", bytes); - keystore.save(env.configFile(), password); - keystore = KeyStoreWrapper.load(env.configFile()); + keystore.save(env.configDir(), password); + keystore = KeyStoreWrapper.load(env.configDir()); keystore.decrypt(password); try (InputStream stream = keystore.getFile("foo")) { for (int i = 0; i < 256; ++i) { @@ -114,8 +114,8 @@ public void testDecryptKeyStoreWithWrongPassword() throws Exception { invalidPassword[realPassword.length] = '#'; } KeyStoreWrapper keystore = KeyStoreWrapper.create(); - keystore.save(env.configFile(), realPassword); - final KeyStoreWrapper loadedkeystore = KeyStoreWrapper.load(env.configFile()); + keystore.save(env.configDir(), realPassword); + final KeyStoreWrapper loadedkeystore = KeyStoreWrapper.load(env.configDir()); final SecurityException exception = expectThrows(SecurityException.class, () -> loadedkeystore.decrypt(invalidPassword)); if (inFipsJvm()) { assertThat( @@ -133,8 +133,8 @@ public void testDecryptKeyStoreWithWrongPassword() throws Exception { public void testDecryptKeyStoreWithShortPasswordInFips() throws Exception { assumeTrue("This should run only in FIPS mode", inFipsJvm()); KeyStoreWrapper keystore = KeyStoreWrapper.create(); - keystore.save(env.configFile(), "alongenoughpassword".toCharArray()); - final KeyStoreWrapper loadedkeystore = KeyStoreWrapper.load(env.configFile()); + keystore.save(env.configDir(), "alongenoughpassword".toCharArray()); + final KeyStoreWrapper loadedkeystore = KeyStoreWrapper.load(env.configDir()); final GeneralSecurityException exception = expectThrows( GeneralSecurityException.class, () -> loadedkeystore.decrypt("shortpwd".toCharArray()) // shorter than 14 characters @@ -147,7 +147,7 @@ public void testCreateKeyStoreWithShortPasswordInFips() throws Exception { KeyStoreWrapper keystore = KeyStoreWrapper.create(); final GeneralSecurityException exception = expectThrows( GeneralSecurityException.class, - () -> keystore.save(env.configFile(), "shortpwd".toCharArray()) // shorter than 14 characters + () -> keystore.save(env.configDir(), "shortpwd".toCharArray()) // shorter than 14 characters ); assertThat(exception.getMessage(), containsString("Error generating an encryption key from the provided password")); } @@ -192,18 +192,18 @@ public void testUpgradeNoop() throws Exception { final char[] password = getPossibleKeystorePassword(); KeyStoreWrapper keystore = KeyStoreWrapper.create(); SecureString seed = keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()); - keystore.save(env.configFile(), password); + keystore.save(env.configDir(), password); // upgrade does not overwrite seed - KeyStoreWrapper.upgrade(keystore, env.configFile(), password); + KeyStoreWrapper.upgrade(keystore, env.configDir(), password); assertEquals(seed.toString(), keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()).toString()); - keystore = KeyStoreWrapper.load(env.configFile()); + keystore = KeyStoreWrapper.load(env.configDir()); keystore.decrypt(password); assertEquals(seed.toString(), keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()).toString()); } public void testFailWhenCannotConsumeSecretStream() throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); - Path configDir = env.configFile(); + Path configDir = env.configDir(); try ( Directory directory = newFSDirectory(configDir); IndexOutput indexOutput = EndiannessReverserUtil.createOutput(directory, "elasticsearch.keystore", IOContext.DEFAULT) @@ -234,7 +234,7 @@ public void testFailWhenCannotConsumeSecretStream() throws Exception { public void testFailWhenCannotConsumeEncryptedBytesStream() throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); - Path configDir = env.configFile(); + Path configDir = env.configDir(); try ( Directory directory = newFSDirectory(configDir); IndexOutput indexOutput = EndiannessReverserUtil.createOutput(directory, "elasticsearch.keystore", IOContext.DEFAULT) @@ -266,7 +266,7 @@ public void testFailWhenCannotConsumeEncryptedBytesStream() throws Exception { public void testFailWhenSecretStreamNotConsumed() throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); - Path configDir = env.configFile(); + Path configDir = env.configDir(); try ( Directory directory = newFSDirectory(configDir); IndexOutput indexOutput = EndiannessReverserUtil.createOutput(directory, "elasticsearch.keystore", IOContext.DEFAULT) @@ -296,7 +296,7 @@ public void testFailWhenSecretStreamNotConsumed() throws Exception { public void testFailWhenEncryptedBytesStreamIsNotConsumed() throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); - Path configDir = env.configFile(); + Path configDir = env.configDir(); try ( Directory directory = newFSDirectory(configDir); IndexOutput indexOutput = EndiannessReverserUtil.createOutput(directory, "elasticsearch.keystore", IOContext.DEFAULT) @@ -359,11 +359,11 @@ public void testUpgradeAddsSeed() throws Exception { final char[] password = getPossibleKeystorePassword(); KeyStoreWrapper keystore = KeyStoreWrapper.create(); keystore.remove(KeyStoreWrapper.SEED_SETTING.getKey()); - keystore.save(env.configFile(), password); - KeyStoreWrapper.upgrade(keystore, env.configFile(), password); + keystore.save(env.configDir(), password); + KeyStoreWrapper.upgrade(keystore, env.configDir(), password); SecureString seed = keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()); assertNotNull(seed); - keystore = KeyStoreWrapper.load(env.configFile()); + keystore = KeyStoreWrapper.load(env.configDir()); keystore.decrypt(password); assertEquals(seed.toString(), keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()).toString()); } @@ -380,7 +380,7 @@ public void testIllegalSettingName() throws Exception { public void testBackcompatV4() throws Exception { assumeFalse("Can't run in a FIPS JVM as PBE is not available", inFipsJvm()); - Path configDir = env.configFile(); + Path configDir = env.configDir(); try ( Directory directory = newFSDirectory(configDir); IndexOutput indexOutput = EndiannessReverserUtil.createOutput(directory, "elasticsearch.keystore", IOContext.DEFAULT) @@ -421,10 +421,10 @@ public void testStringAndFileDistinction() throws Exception { final Path temp = createTempDir(); Files.writeString(temp.resolve("file_setting"), "file_value", StandardCharsets.UTF_8); wrapper.setFile("file_setting", Files.readAllBytes(temp.resolve("file_setting"))); - wrapper.save(env.configFile(), password); + wrapper.save(env.configDir(), password); wrapper.close(); - final KeyStoreWrapper afterSave = KeyStoreWrapper.load(env.configFile()); + final KeyStoreWrapper afterSave = KeyStoreWrapper.load(env.configDir()); assertNotNull(afterSave); afterSave.decrypt(password); assertThat(afterSave.getSettingNames(), equalTo(Set.of("keystore.seed", "string_setting", "file_setting"))); @@ -510,8 +510,8 @@ public void testSerializationWhenLoadedFromFile() throws Exception { // testing with password and raw dataBytes[] final char[] password = getPossibleKeystorePassword(); - wrapper.save(env.configFile(), password); - final KeyStoreWrapper fromFile = KeyStoreWrapper.load(env.configFile()); + wrapper.save(env.configDir(), password); + final KeyStoreWrapper fromFile = KeyStoreWrapper.load(env.configDir()); fromFile.decrypt(password); assertThat(fromFile.getSettingNames(), hasSize(2)); diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java index bb533f32c7ac2..894b9d215a47f 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java @@ -62,11 +62,11 @@ private void assertKeystoreUpgradeWithPassword(String file, int version) throws } private void assertKeystoreUpgrade(String file, int version, @Nullable String password) throws Exception { - final Path keystore = KeyStoreWrapper.keystorePath(env.configFile()); + final Path keystore = KeyStoreWrapper.keystorePath(env.configDir()); try (InputStream is = KeyStoreWrapperTests.class.getResourceAsStream(file); OutputStream os = Files.newOutputStream(keystore)) { is.transferTo(os); } - try (KeyStoreWrapper beforeUpgrade = KeyStoreWrapper.load(env.configFile())) { + try (KeyStoreWrapper beforeUpgrade = KeyStoreWrapper.load(env.configDir())) { assertNotNull(beforeUpgrade); assertThat(beforeUpgrade.getFormatVersion(), equalTo(version)); } @@ -77,7 +77,7 @@ private void assertKeystoreUpgrade(String file, int version, @Nullable String pa execute(); terminal.reset(); - try (KeyStoreWrapper afterUpgrade = KeyStoreWrapper.load(env.configFile())) { + try (KeyStoreWrapper afterUpgrade = KeyStoreWrapper.load(env.configDir())) { assertNotNull(afterUpgrade); assertThat(afterUpgrade.getFormatVersion(), equalTo(KeyStoreWrapper.CURRENT_VERSION)); afterUpgrade.decrypt(password != null ? password.toCharArray() : new char[0]); @@ -87,6 +87,6 @@ private void assertKeystoreUpgrade(String file, int version, @Nullable String pa public void testKeystoreDoesNotExist() { final UserException e = expectThrows(UserException.class, this::execute); - assertThat(e, hasToString(containsString("keystore not found at [" + KeyStoreWrapper.keystorePath(env.configFile()) + "]"))); + assertThat(e, hasToString(containsString("keystore not found at [" + KeyStoreWrapper.keystorePath(env.configDir()) + "]"))); } } diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/InstallPluginAction.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/InstallPluginAction.java index d443cf5e1e181..0803d24c3914f 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/InstallPluginAction.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/InstallPluginAction.java @@ -38,6 +38,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.env.Environment; import org.elasticsearch.jdk.JarHell; +import org.elasticsearch.jdk.RuntimeVersionFeature; import org.elasticsearch.plugin.scanner.ClassReaders; import org.elasticsearch.plugin.scanner.NamedComponentScanner; import org.elasticsearch.plugins.Platforms; @@ -249,8 +250,8 @@ public void execute(List plugins) throws Exception { final List deleteOnFailure = new ArrayList<>(); deleteOnFailures.put(pluginId, deleteOnFailure); - final Path pluginZip = download(plugin, env.tmpFile()); - final Path extractedZip = unzip(pluginZip, env.pluginsFile()); + final Path pluginZip = download(plugin, env.tmpDir()); + final Path extractedZip = unzip(pluginZip, env.pluginsDir()); deleteOnFailure.add(extractedZip); final PluginDescriptor pluginDescriptor = installPlugin(plugin, extractedZip, deleteOnFailure); terminal.println(logPrefix + "Installed " + pluginDescriptor.getName()); @@ -868,14 +869,14 @@ private PluginDescriptor loadPluginInfo(Path pluginRoot) throws Exception { PluginsUtils.verifyCompatibility(info); // checking for existing version of the plugin - verifyPluginName(env.pluginsFile(), info.getName()); + verifyPluginName(env.pluginsDir(), info.getName()); - PluginsUtils.checkForFailedPluginRemovals(env.pluginsFile()); + PluginsUtils.checkForFailedPluginRemovals(env.pluginsDir()); terminal.println(VERBOSE, info.toString()); // check for jar hell before any copying - jarHellCheck(info, pluginRoot, env.pluginsFile(), env.modulesFile()); + jarHellCheck(info, pluginRoot, env.pluginsDir(), env.modulesDir()); if (info.isStable() && hasNamedComponentFile(pluginRoot) == false) { generateNameComponentFile(pluginRoot); @@ -922,10 +923,12 @@ void jarHellCheck(PluginDescriptor candidateInfo, Path candidateDir, Path plugin */ private PluginDescriptor installPlugin(InstallablePlugin descriptor, Path tmpRoot, List deleteOnFailure) throws Exception { final PluginDescriptor info = loadPluginInfo(tmpRoot); - PluginPolicyInfo pluginPolicy = PolicyUtil.getPluginPolicyInfo(tmpRoot, env.tmpFile()); - if (pluginPolicy != null) { - Set permissions = PluginSecurity.getPermissionDescriptions(pluginPolicy, env.tmpFile()); - PluginSecurity.confirmPolicyExceptions(terminal, permissions, batch); + if (RuntimeVersionFeature.isSecurityManagerAvailable()) { + PluginPolicyInfo pluginPolicy = PolicyUtil.getPluginPolicyInfo(tmpRoot, env.tmpDir()); + if (pluginPolicy != null) { + Set permissions = PluginSecurity.getPermissionDescriptions(pluginPolicy, env.tmpDir()); + PluginSecurity.confirmPolicyExceptions(terminal, permissions, batch); + } } // Validate that the downloaded plugin's ID matches what we expect from the descriptor. The @@ -938,14 +941,14 @@ private PluginDescriptor installPlugin(InstallablePlugin descriptor, Path tmpRoo ); } - final Path destination = env.pluginsFile().resolve(info.getName()); + final Path destination = env.pluginsDir().resolve(info.getName()); deleteOnFailure.add(destination); installPluginSupportFiles( info, tmpRoot, - env.binFile().resolve(info.getName()), - env.configFile().resolve(info.getName()), + env.binDir().resolve(info.getName()), + env.configDir().resolve(info.getName()), deleteOnFailure ); movePlugin(tmpRoot, destination); diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/ListPluginsCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/ListPluginsCommand.java index fc578c81b24c9..f51a478fe2135 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/ListPluginsCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/ListPluginsCommand.java @@ -40,13 +40,13 @@ class ListPluginsCommand extends EnvironmentAwareCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { - if (Files.exists(env.pluginsFile()) == false) { - throw new IOException("Plugins directory missing: " + env.pluginsFile()); + if (Files.exists(env.pluginsDir()) == false) { + throw new IOException("Plugins directory missing: " + env.pluginsDir()); } - terminal.println(Terminal.Verbosity.VERBOSE, "Plugins directory: " + env.pluginsFile()); + terminal.println(Terminal.Verbosity.VERBOSE, "Plugins directory: " + env.pluginsDir()); final List plugins = new ArrayList<>(); - try (DirectoryStream paths = Files.newDirectoryStream(env.pluginsFile())) { + try (DirectoryStream paths = Files.newDirectoryStream(env.pluginsDir())) { for (Path path : paths) { if (path.getFileName().toString().equals(ELASTICSEARCH_PLUGINS_YML_CACHE) == false) { plugins.add(path); @@ -61,7 +61,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce private static void printPlugin(Environment env, Terminal terminal, Path plugin, String prefix) throws IOException { terminal.println(Terminal.Verbosity.SILENT, prefix + plugin.getFileName().toString()); - PluginDescriptor info = PluginDescriptor.readFromProperties(env.pluginsFile().resolve(plugin)); + PluginDescriptor info = PluginDescriptor.readFromProperties(env.pluginsDir().resolve(plugin)); terminal.println(Terminal.Verbosity.VERBOSE, info.toString(prefix)); // When PluginDescriptor#getElasticsearchVersion returns a string, we can revisit the need diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/RemovePluginAction.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/RemovePluginAction.java index a8f9e746a24e1..ac9c2b21788c6 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/RemovePluginAction.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/RemovePluginAction.java @@ -93,7 +93,7 @@ private void ensurePluginsNotUsedByOtherPlugins(List plugins) // We build a new map where the keys are plugins that extend plugins // we want to remove and the values are the plugins we can't remove // because of this dependency - Map> pluginDependencyMap = PluginsUtils.getDependencyMapView(env.pluginsFile()); + Map> pluginDependencyMap = PluginsUtils.getDependencyMapView(env.pluginsDir()); for (Map.Entry> entry : pluginDependencyMap.entrySet()) { for (String extendedPlugin : entry.getValue()) { for (InstallablePlugin plugin : plugins) { @@ -121,9 +121,9 @@ private void ensurePluginsNotUsedByOtherPlugins(List plugins) private void checkCanRemove(InstallablePlugin plugin) throws UserException { String pluginId = plugin.getId(); - final Path pluginDir = env.pluginsFile().resolve(pluginId); - final Path pluginConfigDir = env.configFile().resolve(pluginId); - final Path removing = env.pluginsFile().resolve(".removing-" + pluginId); + final Path pluginDir = env.pluginsDir().resolve(pluginId); + final Path pluginConfigDir = env.configDir().resolve(pluginId); + final Path removing = env.pluginsDir().resolve(".removing-" + pluginId); /* * If the plugin does not exist and the plugin config does not exist, fail to the user that the plugin is not found, unless there's @@ -147,7 +147,7 @@ private void checkCanRemove(InstallablePlugin plugin) throws UserException { } } - final Path pluginBinDir = env.binFile().resolve(pluginId); + final Path pluginBinDir = env.binDir().resolve(pluginId); if (Files.exists(pluginBinDir)) { if (Files.isDirectory(pluginBinDir) == false) { throw new UserException(ExitCodes.IO_ERROR, "bin dir for " + pluginId + " is not a directory"); @@ -157,9 +157,9 @@ private void checkCanRemove(InstallablePlugin plugin) throws UserException { private void removePlugin(InstallablePlugin plugin) throws IOException { final String pluginId = plugin.getId(); - final Path pluginDir = env.pluginsFile().resolve(pluginId); - final Path pluginConfigDir = env.configFile().resolve(pluginId); - final Path removing = env.pluginsFile().resolve(".removing-" + pluginId); + final Path pluginDir = env.pluginsDir().resolve(pluginId); + final Path pluginConfigDir = env.configDir().resolve(pluginId); + final Path removing = env.pluginsDir().resolve(".removing-" + pluginId); terminal.println("-> removing [" + pluginId + "]..."); @@ -176,7 +176,7 @@ private void removePlugin(InstallablePlugin plugin) throws IOException { terminal.println(VERBOSE, "removing [" + pluginDir + "]"); } - final Path pluginBinDir = env.binFile().resolve(pluginId); + final Path pluginBinDir = env.binDir().resolve(pluginId); if (Files.exists(pluginBinDir)) { try (Stream paths = Files.list(pluginBinDir)) { pluginPaths.addAll(paths.toList()); diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java index d6d0619422770..6d77437bd71d5 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java @@ -61,7 +61,7 @@ public SyncPluginsAction(Terminal terminal, Environment env) { * @throws UserException if a plugins config file is found. */ public static void ensureNoConfigFile(Environment env) throws UserException { - final Path pluginsConfig = env.configFile().resolve(ELASTICSEARCH_PLUGINS_YML); + final Path pluginsConfig = env.configDir().resolve(ELASTICSEARCH_PLUGINS_YML); if (Files.exists(pluginsConfig)) { throw new UserException( ExitCodes.USAGE, @@ -79,16 +79,16 @@ public static void ensureNoConfigFile(Environment env) throws UserException { * @throws Exception if anything goes wrong */ public void execute() throws Exception { - final Path configPath = this.env.configFile().resolve(ELASTICSEARCH_PLUGINS_YML); - final Path previousConfigPath = this.env.pluginsFile().resolve(ELASTICSEARCH_PLUGINS_YML_CACHE); + final Path configPath = this.env.configDir().resolve(ELASTICSEARCH_PLUGINS_YML); + final Path previousConfigPath = this.env.pluginsDir().resolve(ELASTICSEARCH_PLUGINS_YML_CACHE); if (Files.exists(configPath) == false) { // The `PluginsManager` will have checked that this file exists before invoking the action. throw new PluginSyncException("Plugins config does not exist: " + configPath.toAbsolutePath()); } - if (Files.exists(env.pluginsFile()) == false) { - throw new PluginSyncException("Plugins directory missing: " + env.pluginsFile()); + if (Files.exists(env.pluginsDir()) == false) { + throw new PluginSyncException("Plugins directory missing: " + env.pluginsDir()); } // Parse descriptor file @@ -267,14 +267,14 @@ private List getExistingPlugins() throws PluginSyncException { final List plugins = new ArrayList<>(); try { - try (DirectoryStream paths = Files.newDirectoryStream(env.pluginsFile())) { + try (DirectoryStream paths = Files.newDirectoryStream(env.pluginsDir())) { for (Path pluginPath : paths) { String filename = pluginPath.getFileName().toString(); if (filename.startsWith(".")) { continue; } - PluginDescriptor info = PluginDescriptor.readFromProperties(env.pluginsFile().resolve(pluginPath)); + PluginDescriptor info = PluginDescriptor.readFromProperties(env.pluginsDir().resolve(pluginPath)); plugins.add(info); // Check for a version mismatch, unless it's an official plugin since we can upgrade them. diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsCliProvider.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsCliProvider.java index 88b24ab9ae614..a5dacebec69bc 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsCliProvider.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsCliProvider.java @@ -37,7 +37,7 @@ public Command create() { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { var action = new SyncPluginsAction(terminal, env); - if (Files.exists(env.configFile().resolve(ELASTICSEARCH_PLUGINS_YML)) == false) { + if (Files.exists(env.configDir().resolve(ELASTICSEARCH_PLUGINS_YML)) == false) { return; } if (Build.current().type() != Build.Type.DOCKER) { diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java index d638534943ecd..d2c8d4adb4d1b 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java @@ -354,7 +354,7 @@ void installPlugins(final List plugins, final Path home, fina } void assertPlugin(String name, Path original, Environment environment) throws IOException { - assertPluginInternal(name, environment.pluginsFile(), original); + assertPluginInternal(name, environment.pluginsDir(), original); assertConfigAndBin(name, original, environment); assertInstallCleaned(environment); } @@ -395,7 +395,7 @@ void assertPluginInternal(String name, Path pluginsFile, Path originalPlugin) th void assertConfigAndBin(String name, Path original, Environment environment) throws IOException { if (Files.exists(original.resolve("bin"))) { - Path binDir = environment.binFile().resolve(name); + Path binDir = environment.binDir().resolve(name); assertTrue("bin dir exists", Files.exists(binDir)); assertTrue("bin is a dir", Files.isDirectory(binDir)); try (DirectoryStream stream = Files.newDirectoryStream(binDir)) { @@ -409,7 +409,7 @@ void assertConfigAndBin(String name, Path original, Environment environment) thr } } if (Files.exists(original.resolve("config"))) { - Path configDir = environment.configFile().resolve(name); + Path configDir = environment.configDir().resolve(name); assertTrue("config dir exists", Files.exists(configDir)); assertTrue("config is a dir", Files.isDirectory(configDir)); @@ -417,7 +417,7 @@ void assertConfigAndBin(String name, Path original, Environment environment) thr GroupPrincipal group = null; if (isPosix) { - PosixFileAttributes configAttributes = Files.getFileAttributeView(environment.configFile(), PosixFileAttributeView.class) + PosixFileAttributes configAttributes = Files.getFileAttributeView(environment.configDir(), PosixFileAttributeView.class) .readAttributes(); user = configAttributes.owner(); group = configAttributes.group(); @@ -446,7 +446,7 @@ void assertConfigAndBin(String name, Path original, Environment environment) thr } void assertInstallCleaned(Environment environment) throws IOException { - try (DirectoryStream stream = Files.newDirectoryStream(environment.pluginsFile())) { + try (DirectoryStream stream = Files.newDirectoryStream(environment.pluginsDir())) { for (Path file : stream) { if (file.getFileName().toString().startsWith(".installing")) { fail("Installation dir still exists, " + file); @@ -549,7 +549,7 @@ public void testTransaction() throws Exception { () -> installPlugins(List.of(pluginZip, nonexistentPluginZip), env.v1()) ); assertThat(e.getMessage(), containsString("does-not-exist")); - final Path fakeInstallPath = env.v2().pluginsFile().resolve("fake"); + final Path fakeInstallPath = env.v2().pluginsDir().resolve("fake"); // fake should have been removed when the file not found exception occurred assertFalse(Files.exists(fakeInstallPath)); assertInstallCleaned(env.v2()); @@ -557,7 +557,7 @@ public void testTransaction() throws Exception { public void testInstallFailsIfPreviouslyRemovedPluginFailed() throws Exception { InstallablePlugin pluginZip = createPluginZip("fake", pluginDir); - final Path removing = env.v2().pluginsFile().resolve(".removing-failed"); + final Path removing = env.v2().pluginsDir().resolve(".removing-failed"); Files.createDirectory(removing); final IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip)); final String expected = Strings.format( @@ -603,11 +603,11 @@ public void testUnknownPlugin() { public void testPluginsDirReadOnly() throws Exception { assumeTrue("posix and filesystem", isPosix && isReal); - try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.v2().pluginsFile())) { + try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.v2().pluginsDir())) { pluginsAttrs.setPermissions(new HashSet<>()); InstallablePlugin pluginZip = createPluginZip("fake", pluginDir); IOException e = expectThrows(IOException.class, () -> installPlugin(pluginZip)); - assertThat(e.getMessage(), containsString(env.v2().pluginsFile().toString())); + assertThat(e.getMessage(), containsString(env.v2().pluginsDir().toString())); } assertInstallCleaned(env.v2()); } @@ -694,7 +694,7 @@ public void testBinConflict() throws Exception { Files.createFile(binDir.resolve("somescript")); InstallablePlugin pluginZip = createPluginZip("elasticsearch", pluginDir); FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> installPlugin(pluginZip)); - assertThat(e.getMessage(), containsString(env.v2().binFile().resolve("elasticsearch").toString())); + assertThat(e.getMessage(), containsString(env.v2().binDir().resolve("elasticsearch").toString())); assertInstallCleaned(env.v2()); } @@ -704,7 +704,7 @@ public void testBinPermissions() throws Exception { Files.createDirectory(binDir); Files.createFile(binDir.resolve("somescript")); InstallablePlugin pluginZip = createPluginZip("fake", pluginDir); - try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binFile())) { + try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binDir())) { Set perms = binAttrs.getCopyPermissions(); // make sure at least one execute perm is missing, so we know we forced it during installation perms.remove(PosixFilePermission.GROUP_EXECUTE); @@ -734,7 +734,7 @@ public void testPluginPermissions() throws Exception { installPlugin(pluginZip); assertPlugin("fake", tempPluginDir, env.v2()); - final Path fake = env.v2().pluginsFile().resolve("fake"); + final Path fake = env.v2().pluginsDir().resolve("fake"); final Path resources = fake.resolve("resources"); final Path platform = fake.resolve("platform"); final Path platformName = platform.resolve("linux-x86_64"); @@ -784,7 +784,7 @@ public void testConfig() throws Exception { } public void testExistingConfig() throws Exception { - Path envConfigDir = env.v2().configFile().resolve("fake"); + Path envConfigDir = env.v2().configDir().resolve("fake"); Files.createDirectories(envConfigDir); Files.write(envConfigDir.resolve("custom.yml"), "existing config".getBytes(StandardCharsets.UTF_8)); Path configDir = pluginDir.resolve("config"); @@ -921,7 +921,7 @@ public void testPluginAlreadyInstalled() throws Exception { e.getMessage(), equalTo( "plugin directory [" - + env.v2().pluginsFile().resolve("fake") + + env.v2().pluginsDir().resolve("fake") + "] already exists; " + "if you need to update the plugin, uninstall it first using command 'remove fake'" ) @@ -1499,7 +1499,7 @@ private void assertPolicyConfirmation(Tuple pathEnvironmentTu assertThat(e.getMessage(), containsString("installation aborted by user")); assertThat(terminal.getErrorOutput(), containsString("WARNING: " + warning)); - try (Stream fileStream = Files.list(pathEnvironmentTuple.v2().pluginsFile())) { + try (Stream fileStream = Files.list(pathEnvironmentTuple.v2().pluginsDir())) { assertThat(fileStream.collect(Collectors.toList()), empty()); } @@ -1512,7 +1512,7 @@ private void assertPolicyConfirmation(Tuple pathEnvironmentTu e = expectThrows(UserException.class, () -> installPlugin(pluginZip)); assertThat(e.getMessage(), containsString("installation aborted by user")); assertThat(terminal.getErrorOutput(), containsString("WARNING: " + warning)); - try (Stream fileStream = Files.list(pathEnvironmentTuple.v2().pluginsFile())) { + try (Stream fileStream = Files.list(pathEnvironmentTuple.v2().pluginsDir())) { assertThat(fileStream.collect(Collectors.toList()), empty()); } } @@ -1566,7 +1566,7 @@ public void testStablePluginWithNamedComponentsFile() throws Exception { InstallablePlugin stablePluginZip = createStablePlugin("stable1", pluginDir, true); installPlugins(List.of(stablePluginZip), env.v1()); assertPlugin("stable1", pluginDir, env.v2()); - assertNamedComponentFile("stable1", env.v2().pluginsFile(), namedComponentsJSON()); + assertNamedComponentFile("stable1", env.v2().pluginsDir(), namedComponentsJSON()); } @SuppressWarnings("unchecked") @@ -1577,7 +1577,7 @@ public void testStablePluginWithoutNamedComponentsFile() throws Exception { installPlugins(List.of(stablePluginZip), env.v1()); assertPlugin("stable1", pluginDir, env.v2()); - assertNamedComponentFile("stable1", env.v2().pluginsFile(), namedComponentsJSON()); + assertNamedComponentFile("stable1", env.v2().pluginsDir(), namedComponentsJSON()); } public void testGetSemanticVersion() { diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/ListPluginsCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/ListPluginsCommandTests.java index 0064b8c4bc513..5249aeefc2f2d 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/ListPluginsCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/ListPluginsCommandTests.java @@ -65,7 +65,7 @@ private static void buildFakePlugin( final boolean hasNativeController ) throws IOException { PluginTestUtil.writePluginProperties( - env.pluginsFile().resolve(name), + env.pluginsDir().resolve(name), "description", description, "name", @@ -84,9 +84,9 @@ private static void buildFakePlugin( } public void testPluginsDirMissing() throws Exception { - Files.delete(env.pluginsFile()); + Files.delete(env.pluginsDir()); IOException e = expectThrows(IOException.class, () -> execute()); - assertEquals("Plugins directory missing: " + env.pluginsFile(), e.getMessage()); + assertEquals("Plugins directory missing: " + env.pluginsDir(), e.getMessage()); } public void testNoPlugins() throws Exception { @@ -112,7 +112,7 @@ public void testPluginWithVerbose() throws Exception { execute("-v"); assertEquals( buildMultiline( - "Plugins directory: " + env.pluginsFile(), + "Plugins directory: " + env.pluginsDir(), "fake_plugin", "- Plugin information:", "Name: fake_plugin", @@ -134,7 +134,7 @@ public void testPluginWithNativeController() throws Exception { execute("-v"); assertEquals( buildMultiline( - "Plugins directory: " + env.pluginsFile(), + "Plugins directory: " + env.pluginsDir(), "fake_plugin1", "- Plugin information:", "Name: fake_plugin1", @@ -157,7 +157,7 @@ public void testPluginWithVerboseMultiplePlugins() throws Exception { execute("-v"); assertEquals( buildMultiline( - "Plugins directory: " + env.pluginsFile(), + "Plugins directory: " + env.pluginsDir(), "fake_plugin1", "- Plugin information:", "Name: fake_plugin1", @@ -193,14 +193,14 @@ public void testPluginWithoutVerboseMultiplePlugins() throws Exception { } public void testPluginWithoutDescriptorFile() throws Exception { - final Path pluginDir = env.pluginsFile().resolve("fake1"); + final Path pluginDir = env.pluginsDir().resolve("fake1"); Files.createDirectories(pluginDir); var e = expectThrows(IllegalStateException.class, () -> execute()); assertThat(e.getMessage(), equalTo("Plugin [fake1] is missing a descriptor properties file.")); } public void testPluginWithWrongDescriptorFile() throws Exception { - final Path pluginDir = env.pluginsFile().resolve("fake1"); + final Path pluginDir = env.pluginsDir().resolve("fake1"); PluginTestUtil.writePluginProperties(pluginDir, "description", "fake desc"); var e = expectThrows(IllegalArgumentException.class, () -> execute()); assertThat(e.getMessage(), startsWith("property [name] is missing for plugin")); @@ -208,7 +208,7 @@ public void testPluginWithWrongDescriptorFile() throws Exception { public void testExistingIncompatiblePlugin() throws Exception { PluginTestUtil.writePluginProperties( - env.pluginsFile().resolve("fake_plugin1"), + env.pluginsDir().resolve("fake_plugin1"), "description", "fake desc 1", "name", diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/RemovePluginActionTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/RemovePluginActionTests.java index aabdd4aaceb9e..8338c395e5e4c 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/RemovePluginActionTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/RemovePluginActionTests.java @@ -58,11 +58,11 @@ public void setUp() throws Exception { } void createPlugin(String name) throws IOException { - createPlugin(env.pluginsFile(), name, Version.CURRENT); + createPlugin(env.pluginsDir(), name, Version.CURRENT); } void createPlugin(String name, Version version) throws IOException { - createPlugin(env.pluginsFile(), name, version); + createPlugin(env.pluginsDir(), name, version); } void createPlugin(Path path, String name, Version version) throws IOException { @@ -98,7 +98,7 @@ static MockTerminal removePlugin(List pluginIds, Path home, boolean purg } static void assertRemoveCleaned(Environment env) throws IOException { - try (DirectoryStream stream = Files.newDirectoryStream(env.pluginsFile())) { + try (DirectoryStream stream = Files.newDirectoryStream(env.pluginsDir())) { for (Path file : stream) { if (file.getFileName().toString().startsWith(".removing")) { fail("Removal dir still exists, " + file); @@ -115,84 +115,84 @@ public void testMissing() throws Exception { public void testBasic() throws Exception { createPlugin("fake"); - Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar")); - Files.createDirectory(env.pluginsFile().resolve("fake").resolve("subdir")); + Files.createFile(env.pluginsDir().resolve("fake").resolve("plugin.jar")); + Files.createDirectory(env.pluginsDir().resolve("fake").resolve("subdir")); createPlugin("other"); removePlugin("fake", home, randomBoolean()); - assertFalse(Files.exists(env.pluginsFile().resolve("fake"))); - assertTrue(Files.exists(env.pluginsFile().resolve("other"))); + assertFalse(Files.exists(env.pluginsDir().resolve("fake"))); + assertTrue(Files.exists(env.pluginsDir().resolve("other"))); assertRemoveCleaned(env); } /** Check that multiple plugins can be removed at the same time. */ public void testRemoveMultiple() throws Exception { createPlugin("fake"); - Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar")); - Files.createDirectory(env.pluginsFile().resolve("fake").resolve("subdir")); + Files.createFile(env.pluginsDir().resolve("fake").resolve("plugin.jar")); + Files.createDirectory(env.pluginsDir().resolve("fake").resolve("subdir")); createPlugin("other"); - Files.createFile(env.pluginsFile().resolve("other").resolve("plugin.jar")); - Files.createDirectory(env.pluginsFile().resolve("other").resolve("subdir")); + Files.createFile(env.pluginsDir().resolve("other").resolve("plugin.jar")); + Files.createDirectory(env.pluginsDir().resolve("other").resolve("subdir")); removePlugin("fake", home, randomBoolean()); removePlugin("other", home, randomBoolean()); - assertFalse(Files.exists(env.pluginsFile().resolve("fake"))); - assertFalse(Files.exists(env.pluginsFile().resolve("other"))); + assertFalse(Files.exists(env.pluginsDir().resolve("fake"))); + assertFalse(Files.exists(env.pluginsDir().resolve("other"))); assertRemoveCleaned(env); } public void testBin() throws Exception { createPlugin("fake"); - Path binDir = env.binFile().resolve("fake"); + Path binDir = env.binDir().resolve("fake"); Files.createDirectories(binDir); Files.createFile(binDir.resolve("somescript")); removePlugin("fake", home, randomBoolean()); - assertFalse(Files.exists(env.pluginsFile().resolve("fake"))); - assertTrue(Files.exists(env.binFile().resolve("elasticsearch"))); + assertFalse(Files.exists(env.pluginsDir().resolve("fake"))); + assertTrue(Files.exists(env.binDir().resolve("elasticsearch"))); assertFalse(Files.exists(binDir)); assertRemoveCleaned(env); } public void testBinNotDir() throws Exception { createPlugin("fake"); - Files.createFile(env.binFile().resolve("fake")); + Files.createFile(env.binDir().resolve("fake")); UserException e = expectThrows(UserException.class, () -> removePlugin("fake", home, randomBoolean())); assertThat(e.getMessage(), containsString("not a directory")); - assertTrue(Files.exists(env.pluginsFile().resolve("fake"))); // did not remove - assertTrue(Files.exists(env.binFile().resolve("fake"))); + assertTrue(Files.exists(env.pluginsDir().resolve("fake"))); // did not remove + assertTrue(Files.exists(env.binDir().resolve("fake"))); assertRemoveCleaned(env); } public void testConfigDirPreserved() throws Exception { createPlugin("fake"); - final Path configDir = env.configFile().resolve("fake"); + final Path configDir = env.configDir().resolve("fake"); Files.createDirectories(configDir); Files.createFile(configDir.resolve("fake.yml")); final MockTerminal terminal = removePlugin("fake", home, false); - assertTrue(Files.exists(env.configFile().resolve("fake"))); + assertTrue(Files.exists(env.configDir().resolve("fake"))); assertThat(terminal.getOutput(), containsString(expectedConfigDirPreservedMessage(configDir))); assertRemoveCleaned(env); } public void testPurgePluginExists() throws Exception { createPlugin("fake"); - final Path configDir = env.configFile().resolve("fake"); + final Path configDir = env.configDir().resolve("fake"); if (randomBoolean()) { Files.createDirectories(configDir); Files.createFile(configDir.resolve("fake.yml")); } final MockTerminal terminal = removePlugin("fake", home, true); - assertFalse(Files.exists(env.configFile().resolve("fake"))); + assertFalse(Files.exists(env.configDir().resolve("fake"))); assertThat(terminal.getOutput(), not(containsString(expectedConfigDirPreservedMessage(configDir)))); assertRemoveCleaned(env); } public void testPurgePluginDoesNotExist() throws Exception { - final Path configDir = env.configFile().resolve("fake"); + final Path configDir = env.configDir().resolve("fake"); Files.createDirectories(configDir); Files.createFile(configDir.resolve("fake.yml")); final MockTerminal terminal = removePlugin("fake", home, true); - assertFalse(Files.exists(env.configFile().resolve("fake"))); + assertFalse(Files.exists(env.configDir().resolve("fake"))); assertThat(terminal.getOutput(), not(containsString(expectedConfigDirPreservedMessage(configDir)))); assertRemoveCleaned(env); } @@ -203,8 +203,8 @@ public void testPurgeNothingExists() throws Exception { } public void testPurgeOnlyMarkerFileExists() throws Exception { - final Path configDir = env.configFile().resolve("fake"); - final Path removing = env.pluginsFile().resolve(".removing-fake"); + final Path configDir = env.configDir().resolve("fake"); + final Path removing = env.pluginsDir().resolve(".removing-fake"); Files.createFile(removing); final MockTerminal terminal = removePlugin("fake", home, randomBoolean()); assertFalse(Files.exists(removing)); @@ -213,7 +213,7 @@ public void testPurgeOnlyMarkerFileExists() throws Exception { public void testNoConfigDirPreserved() throws Exception { createPlugin("fake"); - final Path configDir = env.configFile().resolve("fake"); + final Path configDir = env.configDir().resolve("fake"); final MockTerminal terminal = removePlugin("fake", home, randomBoolean()); assertThat(terminal.getOutput(), not(containsString(expectedConfigDirPreservedMessage(configDir)))); } @@ -250,8 +250,8 @@ public void testMissingPluginName() { public void testRemoveWhenRemovingMarker() throws Exception { createPlugin("fake"); - Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar")); - Files.createFile(env.pluginsFile().resolve(".removing-fake")); + Files.createFile(env.pluginsDir().resolve("fake").resolve("plugin.jar")); + Files.createFile(env.pluginsDir().resolve(".removing-fake")); removePlugin("fake", home, randomBoolean()); } @@ -262,10 +262,10 @@ public void testRemoveWhenRemovingMarker() throws Exception { public void testRemoveMigratedPluginsWhenInstalled() throws Exception { for (String id : List.of("repository-azure", "repository-gcs", "repository-s3")) { createPlugin(id); - Files.createFile(env.pluginsFile().resolve(id).resolve("plugin.jar")); + Files.createFile(env.pluginsDir().resolve(id).resolve("plugin.jar")); final MockTerminal terminal = removePlugin(id, home, randomBoolean()); - assertThat(Files.exists(env.pluginsFile().resolve(id)), is(false)); + assertThat(Files.exists(env.pluginsDir().resolve(id)), is(false)); // This message shouldn't be printed if plugin was actually installed. assertThat(terminal.getErrorOutput(), not(containsString("plugin [" + id + "] is no longer a plugin"))); } @@ -288,11 +288,11 @@ public void testRemoveMigratedPluginsWhenNotInstalled() throws Exception { */ public void testRemoveRegularInstalledPluginAndMigratedUninstalledPlugin() throws Exception { createPlugin("fake"); - Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar")); + Files.createFile(env.pluginsDir().resolve("fake").resolve("plugin.jar")); final MockTerminal terminal = removePlugin(List.of("fake", "repository-s3"), home, randomBoolean()); - assertThat(Files.exists(env.pluginsFile().resolve("fake")), is(false)); + assertThat(Files.exists(env.pluginsDir().resolve("fake")), is(false)); assertThat(terminal.getErrorOutput(), containsString("plugin [repository-s3] is no longer a plugin")); } diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/SyncPluginsActionTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/SyncPluginsActionTests.java index 2d2336428a0a5..1a09736160956 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/SyncPluginsActionTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/SyncPluginsActionTests.java @@ -55,10 +55,10 @@ public void setUp() throws Exception { Path home = createTempDir(); Settings settings = Settings.builder().put("path.home", home).build(); env = TestEnvironment.newEnvironment(settings); - Files.createDirectories(env.binFile()); - Files.createFile(env.binFile().resolve("elasticsearch")); - Files.createDirectories(env.configFile()); - Files.createDirectories(env.pluginsFile()); + Files.createDirectories(env.binDir()); + Files.createFile(env.binDir().resolve("elasticsearch")); + Files.createDirectories(env.configDir()); + Files.createDirectories(env.pluginsDir()); terminal = MockTerminal.create(); action = new SyncPluginsAction(terminal, env); @@ -78,7 +78,7 @@ public void test_ensureNoConfigFile_withoutConfig_doesNothing() throws Exception * then an exception is thrown. */ public void test_ensureNoConfigFile_withConfig_throwsException() throws Exception { - Files.createFile(env.configFile().resolve("elasticsearch-plugins.yml")); + Files.createFile(env.configDir().resolve("elasticsearch-plugins.yml")); final UserException e = expectThrows(UserException.class, () -> SyncPluginsAction.ensureNoConfigFile(env)); assertThat(e.getMessage(), Matchers.matchesPattern("^Plugins config \\[.*] exists.*$")); @@ -354,7 +354,7 @@ private void createPlugin(String name) throws IOException { private void createPlugin(String name, String version) throws IOException { PluginTestUtil.writePluginProperties( - env.pluginsFile().resolve(name), + env.pluginsDir().resolve(name), "description", "dummy", "name", diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeyStoreLoader.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeyStoreLoader.java index 9430cb598cf02..2ae58040437af 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeyStoreLoader.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeyStoreLoader.java @@ -24,7 +24,7 @@ public class KeyStoreLoader implements SecureSettingsLoader { @Override public LoadedSecrets load(Environment environment, Terminal terminal) throws Exception { // See if we have a keystore already present - KeyStoreWrapper secureSettings = KeyStoreWrapper.load(environment.configFile()); + KeyStoreWrapper secureSettings = KeyStoreWrapper.load(environment.configDir()); // If there's no keystore or the keystore has no password, set an empty password var password = (secureSettings == null || secureSettings.hasPassword() == false) ? new SecureString(new char[0]) @@ -35,7 +35,7 @@ public LoadedSecrets load(Environment environment, Terminal terminal) throws Exc @Override public SecureSettings bootstrap(Environment environment, SecureString password) throws Exception { - return KeyStoreWrapper.bootstrap(environment.configFile(), () -> password); + return KeyStoreWrapper.bootstrap(environment.configDir(), () -> password); } @Override diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java index 22b62972befe4..be454350133eb 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java @@ -150,7 +150,7 @@ private void validateConfig(OptionSet options, Environment env) throws UserExcep throw new UserException(ExitCodes.USAGE, "Multiple --enrollment-token parameters are not allowed"); } - Path log4jConfig = env.configFile().resolve("log4j2.properties"); + Path log4jConfig = env.configDir().resolve("log4j2.properties"); if (Files.exists(log4jConfig) == false) { throw new UserException(ExitCodes.CONFIG, "Missing logging config file at " + log4jConfig); } @@ -239,7 +239,7 @@ private ServerArgs createArgs(OptionSet options, Environment env, SecureSettings } validatePidFile(pidFile); } - return new ServerArgs(daemonize, quiet, pidFile, secrets, env.settings(), env.configFile(), env.logsFile()); + return new ServerArgs(daemonize, quiet, pidFile, secrets, env.settings(), env.configDir(), env.logsDir()); } @Override diff --git a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java index 66ee712fcce95..2854d76c110d1 100644 --- a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java +++ b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java @@ -43,8 +43,8 @@ class WindowsServiceDaemon extends EnvironmentAwareCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { // the Windows service daemon doesn't support secure settings implementations other than the keystore - try (var loadedSecrets = KeyStoreWrapper.bootstrap(env.configFile(), () -> new SecureString(new char[0]))) { - var args = new ServerArgs(false, true, null, loadedSecrets, env.settings(), env.configFile(), env.logsFile()); + try (var loadedSecrets = KeyStoreWrapper.bootstrap(env.configDir(), () -> new SecureString(new char[0]))) { + var args = new ServerArgs(false, true, null, loadedSecrets, env.settings(), env.configDir(), env.logsDir()); var tempDir = ServerProcessUtils.setupTempDir(processInfo); var jvmOptions = JvmOptionsParser.determineJvmOptions(args, processInfo, tempDir, new MachineDependentHeap()); var serverProcessBuilder = new ServerProcessBuilder().withTerminal(terminal) diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index f2e61861bd3a6..c2e14a399b70e 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,8 +1,8 @@ include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -:lucene_version: 10.0.0 -:lucene_version_path: 10_0_0 +:lucene_version: 10.1.0 +:lucene_version_path: 10_1_0 :jdk: 11.0.2 :jdk_major: 11 :build_type: tar diff --git a/docs/build.gradle b/docs/build.gradle index 3a1070b3fc356..505bf2fb1ddfb 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -120,7 +120,6 @@ testClusters.matching { it.name == "yamlRestTest"}.configureEach { // TODO: remove this once cname is prepended to transport.publish_address by default in 8.0 systemProperty 'es.transport.cname_in_publish_address', 'true' - systemProperty 'es.queryable_built_in_roles_enabled', 'false' requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") requiresFeature 'es.failure_store_feature_flag_enabled', Version.fromString("8.12.0") @@ -856,6 +855,9 @@ buildRestTests.setups['library'] = ''' ''' buildRestTests.setups['sensor_rollup_job'] = ''' + - requires: + test_runner_features: [ "allowed_warnings" ] + - do: indices.create: index: dummy-rollup-index @@ -886,9 +888,10 @@ buildRestTests.setups['sensor_rollup_job'] = ''' node: type: keyword - do: - raw: - method: PUT - path: _rollup/job/sensor + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." + rollup.put_job: + id: sensor body: > { "index_pattern": "sensor-*", @@ -918,6 +921,9 @@ buildRestTests.setups['sensor_rollup_job'] = ''' } ''' buildRestTests.setups['sensor_started_rollup_job'] = ''' + - requires: + test_runner_features: [ "allowed_warnings" ] + - do: indices.create: index: dummy-rollup-index @@ -967,9 +973,10 @@ buildRestTests.setups['sensor_started_rollup_job'] = ''' {"timestamp": 1516297294000, "temperature": 202, "voltage": 4.0, "node": "c"} - do: - raw: - method: PUT - path: _rollup/job/sensor + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." + rollup.put_job: + id: sensor body: > { "index_pattern": "sensor-*", @@ -998,9 +1005,10 @@ buildRestTests.setups['sensor_started_rollup_job'] = ''' ] } - do: - raw: - method: POST - path: _rollup/job/sensor/_start + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." + rollup.start_job: + id: sensor ''' buildRestTests.setups['sensor_index'] = ''' diff --git a/docs/changelog/113131.yaml b/docs/changelog/113131.yaml new file mode 100644 index 0000000000000..684ec2e64fb5b --- /dev/null +++ b/docs/changelog/113131.yaml @@ -0,0 +1,10 @@ +pr: 113131 +summary: Emit deprecation warning when executing one of the rollup APIs +area: Rollup +type: deprecation +issues: [] +deprecation: + title: Emit deprecation warning when executing one of the rollup APIs + area: Rollup + details: Rollup is already deprecated since 8.11.0 via documentation and since 8.15.0 it is no longer possible to create new rollup jobs in clusters without rollup usage. This change updates the rollup APIs to emit a deprecation warning. + impact: Returning a deprecation warning when using one of the rollup APIs. diff --git a/docs/changelog/116026.yaml b/docs/changelog/116026.yaml new file mode 100644 index 0000000000000..9beb6a1891bb8 --- /dev/null +++ b/docs/changelog/116026.yaml @@ -0,0 +1,13 @@ +pr: 116026 +summary: Change Elasticsearch timeouts to 429 response instead of 5xx +area: Infra/Core +type: breaking +issues: [] +breaking: + title: Change most Elasticsearch timeouts to 429 response instead of 5xx + area: REST API + details: When a timeout occurs in most REST requests, whether via a per-request timeout, or a system default, the + request would return a 5xx response code. The response code from those APIs when a timeout occurs is now 429. + impact: Adjust any code relying on retrying on 5xx responses for timeouts to look for a 429 response code and + inspect the response to determine whether a timeout occured. + notable: false diff --git a/docs/changelog/117176.yaml b/docs/changelog/117176.yaml new file mode 100644 index 0000000000000..26e0d3635bc9e --- /dev/null +++ b/docs/changelog/117176.yaml @@ -0,0 +1,5 @@ +pr: 117176 +summary: Integrate IBM watsonx to Inference API for re-ranking task +area: Experiences +type: enhancement +issues: [] diff --git a/docs/changelog/117201.yaml b/docs/changelog/117201.yaml deleted file mode 100644 index f8a2be35c70a3..0000000000000 --- a/docs/changelog/117201.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117201 -summary: "Use `field_caps` native nested fields filtering" -area: ES|QL -type: bug -issues: - - 117054 diff --git a/docs/changelog/117504.yaml b/docs/changelog/117504.yaml deleted file mode 100644 index 91a62c61b88f2..0000000000000 --- a/docs/changelog/117504.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117504 -summary: Fix NPE caused by race condition in async search when minimise round trips - is true -area: Search -type: bug -issues: [] diff --git a/docs/changelog/119308.yaml b/docs/changelog/119308.yaml new file mode 100644 index 0000000000000..bb47cac148a82 --- /dev/null +++ b/docs/changelog/119308.yaml @@ -0,0 +1,5 @@ +pr: 119308 +summary: Upgrade to Lucene 10.1.0 +area: Search +type: upgrade +issues: [] diff --git a/docs/changelog/119546.yaml b/docs/changelog/119546.yaml new file mode 100644 index 0000000000000..017bbb845c0a6 --- /dev/null +++ b/docs/changelog/119546.yaml @@ -0,0 +1,5 @@ +pr: 119546 +summary: Introduce `FallbackSyntheticSourceBlockLoader` and apply it to keyword fields +area: Mapping +type: enhancement +issues: [] diff --git a/docs/changelog/119748.yaml b/docs/changelog/119748.yaml deleted file mode 100644 index 8b29fb7c1a647..0000000000000 --- a/docs/changelog/119748.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 119748 -summary: Issue S3 web identity token refresh call with sufficient permissions -area: Snapshot/Restore -type: bug -issues: - - 119747 diff --git a/docs/changelog/120014.yaml b/docs/changelog/120014.yaml deleted file mode 100644 index bef1f3ba49939..0000000000000 --- a/docs/changelog/120014.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 120014 -summary: Fix potential file leak in ES816BinaryQuantizedVectorsWriter -area: Search -type: bug -issues: - - 119981 diff --git a/docs/changelog/120062.yaml b/docs/changelog/120062.yaml deleted file mode 100644 index 42e8d97f17444..0000000000000 --- a/docs/changelog/120062.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 120062 -summary: Update Text Similarity Reranker to Properly Handle Aliases -area: Ranking -type: bug -issues: - - 119617 diff --git a/docs/changelog/120064.yaml b/docs/changelog/120064.yaml new file mode 100644 index 0000000000000..8874b2ffe9588 --- /dev/null +++ b/docs/changelog/120064.yaml @@ -0,0 +1,5 @@ +pr: 120064 +summary: Change the auditor to write via an alias +area: Machine Learning +type: upgrade +issues: [] diff --git a/docs/changelog/120133.yaml b/docs/changelog/120133.yaml deleted file mode 100644 index 4ec88267a1bf8..0000000000000 --- a/docs/changelog/120133.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 120133 -summary: Use approximation to advance matched queries -area: Search -type: bug -issues: - - 120130 diff --git a/docs/changelog/120222.yaml b/docs/changelog/120222.yaml new file mode 100644 index 0000000000000..c9ded878ac031 --- /dev/null +++ b/docs/changelog/120222.yaml @@ -0,0 +1,5 @@ +pr: 120222 +summary: Adding linear retriever to support weighted sums of sub-retrievers +area: "Search" +type: enhancement +issues: [] diff --git a/docs/changelog/120256.yaml b/docs/changelog/120256.yaml deleted file mode 100644 index c4ee5ab1705c5..0000000000000 --- a/docs/changelog/120256.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 120256 -summary: Improve memory aspects of enrich cache -area: Ingest Node -type: enhancement -issues: - - 96050 - - 120021 diff --git a/docs/changelog/120340.yaml b/docs/changelog/120340.yaml new file mode 100644 index 0000000000000..3c2200170c0c4 --- /dev/null +++ b/docs/changelog/120340.yaml @@ -0,0 +1,5 @@ +pr: 120340 +summary: Add support for `extended_stats` +area: Transform +type: enhancement +issues: [] diff --git a/docs/changelog/120343.yaml b/docs/changelog/120343.yaml new file mode 100644 index 0000000000000..f33bd215877c7 --- /dev/null +++ b/docs/changelog/120343.yaml @@ -0,0 +1,6 @@ +pr: 120343 +summary: Support some stats on aggregate_metric_double +area: "ES|QL" +type: enhancement +issues: + - 110649 diff --git a/docs/changelog/120355.yaml b/docs/changelog/120355.yaml new file mode 100644 index 0000000000000..fd335fe1f5892 --- /dev/null +++ b/docs/changelog/120355.yaml @@ -0,0 +1,5 @@ +pr: 120355 +summary: Ensure cluster string could be quoted +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/120400.yaml b/docs/changelog/120400.yaml new file mode 100644 index 0000000000000..57d40730e0c8d --- /dev/null +++ b/docs/changelog/120400.yaml @@ -0,0 +1,5 @@ +pr: 120400 +summary: "[Inference API] Add node-local rate limiting for the inference API" +area: Machine Learning +type: feature +issues: [] diff --git a/docs/changelog/120483.yaml b/docs/changelog/120483.yaml deleted file mode 100644 index 20da3b9ab4e8d..0000000000000 --- a/docs/changelog/120483.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 120483 -summary: Fix NPE on disabled API auth key cache -area: Authentication -type: bug -issues: [] diff --git a/docs/changelog/120542.yaml b/docs/changelog/120542.yaml new file mode 100644 index 0000000000000..9e91146fc3366 --- /dev/null +++ b/docs/changelog/120542.yaml @@ -0,0 +1,6 @@ +pr: 120542 +summary: "Feat: add a user-configurable timeout parameter to the `_resolve/cluster`\ + \ API" +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/120546.yaml b/docs/changelog/120546.yaml new file mode 100644 index 0000000000000..ec89cb1830311 --- /dev/null +++ b/docs/changelog/120546.yaml @@ -0,0 +1,5 @@ +pr: 120546 +summary: Restrict agent entitlements to the system classloader unnamed module +area: Infra/Plugins +type: bug +issues: [] diff --git a/docs/changelog/120551.yaml b/docs/changelog/120551.yaml new file mode 100644 index 0000000000000..171d639be3e89 --- /dev/null +++ b/docs/changelog/120551.yaml @@ -0,0 +1,5 @@ +pr: 120551 +summary: Set default reranker for text similarity reranker to Elastic reranker +area: Ranking +type: enhancement +issues: [] diff --git a/docs/changelog/120573.yaml b/docs/changelog/120573.yaml new file mode 100644 index 0000000000000..33ced06ddf996 --- /dev/null +++ b/docs/changelog/120573.yaml @@ -0,0 +1,5 @@ +pr: 120573 +summary: Optimize `IngestDocument` `FieldPath` allocation +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/changelog/120590.yaml b/docs/changelog/120590.yaml deleted file mode 100644 index 56abe44fbce1e..0000000000000 --- a/docs/changelog/120590.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 120590 -summary: Map `scope.name` as a dimension -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/120717.yaml b/docs/changelog/120717.yaml deleted file mode 100644 index c5609e7e3df5f..0000000000000 --- a/docs/changelog/120717.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 120717 -summary: Fix LTR rescorer throws 'local model reference is null' on multi-shards index when explained is enabled -area: Ranking -type: bug -issues: - - 120739 diff --git a/docs/changelog/120751.yaml b/docs/changelog/120751.yaml new file mode 100644 index 0000000000000..0c1dffc0e527b --- /dev/null +++ b/docs/changelog/120751.yaml @@ -0,0 +1,5 @@ +pr: 120751 +summary: Adding support for binary embedding type to Cohere service embedding type +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/120774.yaml b/docs/changelog/120774.yaml new file mode 100644 index 0000000000000..8157e1725be83 --- /dev/null +++ b/docs/changelog/120774.yaml @@ -0,0 +1,5 @@ +pr: 120774 +summary: Retry ES|QL node requests on shard level failures +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/120781.yaml b/docs/changelog/120781.yaml deleted file mode 100644 index 67c7d90528d6e..0000000000000 --- a/docs/changelog/120781.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 120781 -summary: Add back `keep_alive` to `async_search.submit` rest-api-spec -area: Search -type: bug -issues: [] diff --git a/docs/changelog/120806.yaml b/docs/changelog/120806.yaml new file mode 100644 index 0000000000000..7605e2ba534d9 --- /dev/null +++ b/docs/changelog/120806.yaml @@ -0,0 +1,20 @@ +pr: 120806 +summary: Deprecate certificate based remote cluster security model +area: Security +type: deprecation +issues: [] +deprecation: + title: Deprecate certificate based remote cluster security model + area: Authorization + details: -| + <> is deprecated and will be removed + in a future major version. + Users are encouraged to <>. + The <> is preferred way to configure remote clusters, + as it allows to follow security best practices when setting up remote cluster connections + and defining fine-grained access control. + impact: -| + If you have configured remote clusters with certificate-based security model, you should + <>. + Configuring a remote cluster using <>, + generates a warning in the deprecation logs. diff --git a/docs/changelog/120809.yaml b/docs/changelog/120809.yaml deleted file mode 100644 index 30a3736dc93a4..0000000000000 --- a/docs/changelog/120809.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 120809 -summary: LTR sometines throw `NullPointerException:` Cannot read field "approximation" - because "top" is null -area: Ranking -type: bug -issues: [] diff --git a/docs/changelog/120821.yaml b/docs/changelog/120821.yaml new file mode 100644 index 0000000000000..403c186d9f102 --- /dev/null +++ b/docs/changelog/120821.yaml @@ -0,0 +1,5 @@ +pr: 120821 +summary: "[Deprecation] Add `transform_ids` to outdated index" +area: Transform +type: enhancement +issues: [] diff --git a/docs/changelog/120824.yaml b/docs/changelog/120824.yaml new file mode 100644 index 0000000000000..603b49338ff69 --- /dev/null +++ b/docs/changelog/120824.yaml @@ -0,0 +1,5 @@ +pr: 120824 +summary: Optimize some per-document hot paths in the geoip processor +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/changelog/120842.yaml b/docs/changelog/120842.yaml new file mode 100644 index 0000000000000..98227cf399b56 --- /dev/null +++ b/docs/changelog/120842.yaml @@ -0,0 +1,5 @@ +pr: 120842 +summary: Remove Elastic Inference Service feature flag and deprecated setting +area: Inference +type: enhancement +issues: [] diff --git a/docs/changelog/120852.yaml b/docs/changelog/120852.yaml new file mode 100644 index 0000000000000..90a05aa860f3f --- /dev/null +++ b/docs/changelog/120852.yaml @@ -0,0 +1,5 @@ +pr: 120852 +summary: Correct line and column numbers of missing named parameters +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/120913.yaml b/docs/changelog/120913.yaml new file mode 100644 index 0000000000000..69db6027caa69 --- /dev/null +++ b/docs/changelog/120913.yaml @@ -0,0 +1,5 @@ +pr: 120913 +summary: Automatically rollover legacy .ml-anomalies indices +area: Machine Learning +type: upgrade +issues: [] diff --git a/docs/changelog/120930.yaml b/docs/changelog/120930.yaml deleted file mode 100644 index 376edb7632a0b..0000000000000 --- a/docs/changelog/120930.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 120930 -summary: Normalize negative scores for `text_similarity_reranker` retriever -area: Ranking -type: bug -issues: - - 120201 diff --git a/docs/changelog/120937.yaml b/docs/changelog/120937.yaml new file mode 100644 index 0000000000000..6a25690136987 --- /dev/null +++ b/docs/changelog/120937.yaml @@ -0,0 +1,6 @@ +pr: 120937 +summary: Use the system index descriptor in the snapshot blob cache cleanup task +area: Snapshot/Restore +type: bug +issues: + - 120518 diff --git a/docs/changelog/120957.yaml b/docs/changelog/120957.yaml new file mode 100644 index 0000000000000..841ef945ce7ef --- /dev/null +++ b/docs/changelog/120957.yaml @@ -0,0 +1,5 @@ +pr: 120957 +summary: Introduce `AllocationBalancingRoundSummaryService` +area: Allocation +type: enhancement +issues: [] diff --git a/docs/changelog/120974.yaml b/docs/changelog/120974.yaml new file mode 100644 index 0000000000000..ed52eefd9f5f8 --- /dev/null +++ b/docs/changelog/120974.yaml @@ -0,0 +1,6 @@ +pr: 120974 +summary: Tweak `copy_to` handling in synthetic `_source` to account for nested objects +area: Mapping +type: bug +issues: + - 120831 diff --git a/docs/changelog/120997.yaml b/docs/changelog/120997.yaml new file mode 100644 index 0000000000000..6b56578404371 --- /dev/null +++ b/docs/changelog/120997.yaml @@ -0,0 +1,5 @@ +pr: 120997 +summary: Allow `SSHA-256` for API key credential hash +area: Authentication +type: enhancement +issues: [] diff --git a/docs/changelog/120998.yaml b/docs/changelog/120998.yaml new file mode 100644 index 0000000000000..4d9a3aa3eb1c4 --- /dev/null +++ b/docs/changelog/120998.yaml @@ -0,0 +1,5 @@ +pr: 120998 +summary: ES|QL `change_point` processing command +area: Machine Learning +type: feature +issues: [] diff --git a/docs/changelog/121048.yaml b/docs/changelog/121048.yaml new file mode 100644 index 0000000000000..e1a9d665315ff --- /dev/null +++ b/docs/changelog/121048.yaml @@ -0,0 +1,5 @@ +pr: 121048 +summary: Updating Inference Update API documentation to have the correct PUT method +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/121049.yaml b/docs/changelog/121049.yaml new file mode 100644 index 0000000000000..760deb62e149b --- /dev/null +++ b/docs/changelog/121049.yaml @@ -0,0 +1,19 @@ +pr: 121049 +summary: Conditionally enable logsdb by default for data streams matching with logs-*-* + pattern. +area: Logs +type: breaking +issues: + - 106489 +breaking: + title: Conditionally enable logsdb by default + area: Logs + details: |- + Logsdb will be enabled by default for data streams matching with logs-*-* pattern. + If upgrading from 8.x to 9.x and data streams matching with log-*-* do exist, + then Logsdb will not be enabled by default. + impact: |- + Logsdb reduce storage footprint in Elasticsearch for logs, but there are side effects + to be taken into account that are described in the Logsdb docs: + https://www.elastic.co/guide/en/elasticsearch/reference/current/logs-data-stream.html#upgrade-to-logsdb-notes + notable: true diff --git a/docs/changelog/121074.yaml b/docs/changelog/121074.yaml new file mode 100644 index 0000000000000..6e07ab295ea66 --- /dev/null +++ b/docs/changelog/121074.yaml @@ -0,0 +1,5 @@ +pr: 121074 +summary: Implement a `MetricsAware` interface +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/121105.yaml b/docs/changelog/121105.yaml new file mode 100644 index 0000000000000..925d3a036e5c2 --- /dev/null +++ b/docs/changelog/121105.yaml @@ -0,0 +1,5 @@ +pr: 121105 +summary: Mark bbq indices as GA and add rolling upgrade integration tests +area: Vector Search +type: feature +issues: [] diff --git a/docs/changelog/121109.yaml b/docs/changelog/121109.yaml new file mode 100644 index 0000000000000..6492eccbf975a --- /dev/null +++ b/docs/changelog/121109.yaml @@ -0,0 +1,6 @@ +pr: 121109 +summary: Fix propagation of dynamic mapping parameter when applying `copy_to` +area: Mapping +type: bug +issues: + - 113049 diff --git a/docs/changelog/121124.yaml b/docs/changelog/121124.yaml new file mode 100644 index 0000000000000..066145386ecb4 --- /dev/null +++ b/docs/changelog/121124.yaml @@ -0,0 +1,5 @@ +pr: 121124 +summary: Run `TransportGetEnrichPolicyAction` on local node +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/changelog/121156.yaml b/docs/changelog/121156.yaml new file mode 100644 index 0000000000000..8f9c5ccdb38c9 --- /dev/null +++ b/docs/changelog/121156.yaml @@ -0,0 +1,5 @@ +pr: 121156 +summary: Remove redundant sorts from execution plan +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/121193.yaml b/docs/changelog/121193.yaml new file mode 100644 index 0000000000000..af45b0656265f --- /dev/null +++ b/docs/changelog/121193.yaml @@ -0,0 +1,18 @@ +pr: 121193 +summary: Enable LOOKUP JOIN in non-snapshot builds +area: ES|QL +type: enhancement +issues: + - 121185 +highlight: + title: Enable LOOKUP JOIN in non-snapshot builds + body: |- + This effectively releases LOOKUP JOIN into tech preview. Docs will + follow in a separate PR. + + - Enable the lexing/grammar for LOOKUP JOIN in non-snapshot builds. + - Remove the grammar for the unsupported `| JOIN ...` command (without `LOOKUP` as first keyword). The way the lexer modes work, otherwise we'd also have to enable `| JOIN ...` syntax on non-snapshot builds and would have to add additional validation to provide appropriate error messages. + - Remove grammar for `LOOKUP JOIN index AS ...` because qualifiers are not yet supported. Otherwise we'd have to put in additional validation as well to prevent such queries. + + Also fix https://github.com/elastic/elasticsearch/issues/121185 + notable: true diff --git a/docs/changelog/121196.yaml b/docs/changelog/121196.yaml new file mode 100644 index 0000000000000..f5168b1ea436c --- /dev/null +++ b/docs/changelog/121196.yaml @@ -0,0 +1,5 @@ +pr: 121196 +summary: Fix geoip databases index access after system feature migration +area: Ingest Node +type: bug +issues: [] diff --git a/docs/changelog/121207.yaml b/docs/changelog/121207.yaml new file mode 100644 index 0000000000000..ebb9d44d89366 --- /dev/null +++ b/docs/changelog/121207.yaml @@ -0,0 +1,5 @@ +pr: 121207 +summary: "[Inference API] Put back legacy EIS URL setting" +area: Inference +type: bug +issues: [] diff --git a/docs/changelog/121240.yaml b/docs/changelog/121240.yaml new file mode 100644 index 0000000000000..b0ca8e5e614db --- /dev/null +++ b/docs/changelog/121240.yaml @@ -0,0 +1,5 @@ +pr: 121240 +summary: Implement runtime skip_unavailable=true +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/121256.yaml b/docs/changelog/121256.yaml new file mode 100644 index 0000000000000..b4ba7fb3d0149 --- /dev/null +++ b/docs/changelog/121256.yaml @@ -0,0 +1,5 @@ +pr: 121256 +summary: Run `TransportEnrichStatsAction` on local node +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/changelog/121260.yaml b/docs/changelog/121260.yaml new file mode 100644 index 0000000000000..40c7487f29b12 --- /dev/null +++ b/docs/changelog/121260.yaml @@ -0,0 +1,5 @@ +pr: 121260 +summary: Introduce a pre-mapping logical plan processing step +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/121324.yaml b/docs/changelog/121324.yaml new file mode 100644 index 0000000000000..d105ea0b46b4c --- /dev/null +++ b/docs/changelog/121324.yaml @@ -0,0 +1,6 @@ +pr: 121324 +summary: Support duplicate suggestions in completion field +area: Suggesters +type: bug +issues: + - 82432 diff --git a/docs/changelog/121325.yaml b/docs/changelog/121325.yaml new file mode 100644 index 0000000000000..9a9edc67d19fa --- /dev/null +++ b/docs/changelog/121325.yaml @@ -0,0 +1,5 @@ +pr: 121325 +summary: '`ReindexDataStreamIndex` bug in assertion caused by reference equality' +area: Data streams +type: bug +issues: [] diff --git a/docs/changelog/121327.yaml b/docs/changelog/121327.yaml new file mode 100644 index 0000000000000..fc684ea504f3d --- /dev/null +++ b/docs/changelog/121327.yaml @@ -0,0 +1,5 @@ +pr: 121327 +summary: Reduce Data Loss in System Indices Migration +area: Infra/Core +type: bug +issues: [] diff --git a/docs/changelog/121396.yaml b/docs/changelog/121396.yaml new file mode 100644 index 0000000000000..1d77a8fbb0079 --- /dev/null +++ b/docs/changelog/121396.yaml @@ -0,0 +1,5 @@ +pr: 121396 +summary: Change format for Unified Chat +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/121552.yaml b/docs/changelog/121552.yaml new file mode 100644 index 0000000000000..c12e7615d1245 --- /dev/null +++ b/docs/changelog/121552.yaml @@ -0,0 +1,5 @@ +pr: 121552 +summary: Fix a bug in TOP +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/121556.yaml b/docs/changelog/121556.yaml new file mode 100644 index 0000000000000..d5fad5f37e5c8 --- /dev/null +++ b/docs/changelog/121556.yaml @@ -0,0 +1,5 @@ +pr: 121556 +summary: Enable New Semantic Text Format Only On Newly Created Indices +area: Mapping +type: bug +issues: [] diff --git a/docs/changelog/121568.yaml b/docs/changelog/121568.yaml new file mode 100644 index 0000000000000..80d769967dc2d --- /dev/null +++ b/docs/changelog/121568.yaml @@ -0,0 +1,6 @@ +pr: 121568 +summary: Analyze API to return 400 for wrong custom analyzer +area: Analysis +type: bug +issues: + - 121443 diff --git a/docs/changelog/121667.yaml b/docs/changelog/121667.yaml new file mode 100644 index 0000000000000..2467036832a3e --- /dev/null +++ b/docs/changelog/121667.yaml @@ -0,0 +1,11 @@ +pr: 121667 +summary: Add deprecation warning for flush API +area: Machine Learning +type: deprecation +issues: + - 121506 +deprecation: + title: Add deprecation warning for flush API + area: REST API + details: The anomaly detection job flush API is deprecated since it is only required for the post data API, which was deprecated since 7.11.0. + impact: This should have a minimal impact on users as the flush API is only required for the post data API, which was deprecated since 7.11.0. diff --git a/docs/changelog/121720.yaml b/docs/changelog/121720.yaml new file mode 100644 index 0000000000000..40dcfaeb770dd --- /dev/null +++ b/docs/changelog/121720.yaml @@ -0,0 +1,5 @@ +pr: 121720 +summary: Skip fetching _inference_fields field in legacy semantic_text format +area: Search +type: bug +issues: [] diff --git a/docs/changelog/121727.yaml b/docs/changelog/121727.yaml new file mode 100644 index 0000000000000..80c0a5eae4335 --- /dev/null +++ b/docs/changelog/121727.yaml @@ -0,0 +1,7 @@ +pr: 121727 +summary: Copy metrics and `default_metric` properties when downsampling `aggregate_metric_double` +area: Downsampling +type: bug +issues: + - 119696 + - 96076 diff --git a/docs/changelog/121731.yaml b/docs/changelog/121731.yaml new file mode 100644 index 0000000000000..9fc117610a490 --- /dev/null +++ b/docs/changelog/121731.yaml @@ -0,0 +1,21 @@ +pr: 121731 +summary: Remove TLSv1.1 from default protocols +area: TLS +type: breaking +issues: [] +breaking: + title: Remove TLSv1.1 from default protocols + area: Cluster and node setting + details: "TLSv1.1 is no longer enabled by default. Prior to version 9.0, Elasticsearch\ + \ would attempt to enable TLSv1.1 if the JDK supported it. In most cases, including\ + \ all cases where Elasticsearch 8 was running with the bundled JDK, the JDK would\ + \ not support TLSv1.1, so that protocol would not be available in Elasticsearch.\ + \ However, if Elasticsearch was running on an old JDK or a JDK that have been\ + \ reconfigured to support TLSv1.1, then the protocol would automatically be available\ + \ within Elasticsearch. As of Elasticsearch 9.0, this is no longer true. If you\ + \ wish to enable TLSv1.1 then you must enable it within the JDK and also enable\ + \ it within Elasticsearch by using the `ssl.supported_protocols` setting." + impact: "Most users will not be impacted. If your Elastisearch 8 cluster was using\ + \ a custom JDK and you relied on TLSv1.1, then you will need to explicitly enable\ + \ TLSv1.1 within Elasticsearch (as well as enabling it within your JDK)" + notable: false diff --git a/docs/changelog/121805.yaml b/docs/changelog/121805.yaml new file mode 100644 index 0000000000000..7d0f3a96221ae --- /dev/null +++ b/docs/changelog/121805.yaml @@ -0,0 +1,5 @@ +pr: 121805 +summary: Support subset of metrics in aggregate metric double +area: "ES|QL" +type: enhancement +issues: [] diff --git a/docs/changelog/121821.yaml b/docs/changelog/121821.yaml new file mode 100644 index 0000000000000..1e8edd09dcd9a --- /dev/null +++ b/docs/changelog/121821.yaml @@ -0,0 +1,6 @@ +pr: 121821 +summary: Fix get all inference endponts not returning multiple endpoints sharing model + deployment +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/121843.yaml b/docs/changelog/121843.yaml new file mode 100644 index 0000000000000..85b19e317a09c --- /dev/null +++ b/docs/changelog/121843.yaml @@ -0,0 +1,6 @@ +pr: 121843 +summary: Fix async stop sometimes not properly collecting result +area: ES|QL +type: bug +issues: + - 121249 diff --git a/docs/changelog/121850.yaml b/docs/changelog/121850.yaml new file mode 100644 index 0000000000000..b6c5ba2e03fe8 --- /dev/null +++ b/docs/changelog/121850.yaml @@ -0,0 +1,5 @@ +pr: 121850 +summary: Take named parameters for identifier and pattern out of snapshot +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/121911.yaml b/docs/changelog/121911.yaml new file mode 100644 index 0000000000000..d30e96abc21c2 --- /dev/null +++ b/docs/changelog/121911.yaml @@ -0,0 +1,5 @@ +pr: 121911 +summary: Fix ENRICH validation for use of wildcards +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/122011.yaml b/docs/changelog/122011.yaml new file mode 100644 index 0000000000000..2cc9a37b01616 --- /dev/null +++ b/docs/changelog/122011.yaml @@ -0,0 +1,5 @@ +pr: 122011 +summary: Fix JOIN command validation (not supported) +area: EQL +type: bug +issues: [] diff --git a/docs/changelog/122066.yaml b/docs/changelog/122066.yaml new file mode 100644 index 0000000000000..79a9129bd542a --- /dev/null +++ b/docs/changelog/122066.yaml @@ -0,0 +1,5 @@ +pr: 122066 +summary: Adding elser default endpoint for EIS +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/122074.yaml b/docs/changelog/122074.yaml new file mode 100644 index 0000000000000..21e171d0eb5e2 --- /dev/null +++ b/docs/changelog/122074.yaml @@ -0,0 +1,8 @@ +pr: 122074 +summary: If the Transform is configured to write to an alias as its destination index, + when the delete_dest_index parameter is set to true, then the Delete API will now + delete the write index backing the alias +area: Transform +type: bug +issues: + - 121913 diff --git a/docs/changelog/122199.yaml b/docs/changelog/122199.yaml new file mode 100644 index 0000000000000..172ae900bdabb --- /dev/null +++ b/docs/changelog/122199.yaml @@ -0,0 +1,5 @@ +pr: 122199 +summary: Fix issues that prevents using search only snapshots for indices that use index sorting. This is includes Logsdb and time series indices. +area: Logs +type: bug +issues: [] diff --git a/docs/changelog/122224.yaml b/docs/changelog/122224.yaml new file mode 100644 index 0000000000000..41ae8c6578600 --- /dev/null +++ b/docs/changelog/122224.yaml @@ -0,0 +1,6 @@ +pr: 122224 +summary: Enable the use of nested field type with index.mode=time_series +area: Mapping +type: enhancement +issues: + - 120874 diff --git a/docs/changelog/122246.yaml b/docs/changelog/122246.yaml new file mode 100644 index 0000000000000..c1e90f3423117 --- /dev/null +++ b/docs/changelog/122246.yaml @@ -0,0 +1,5 @@ +pr: 122246 +summary: Ensure removal of index blocks does not leave key with null value +area: Data streams +type: bug +issues: [] diff --git a/docs/changelog/122257.yaml b/docs/changelog/122257.yaml new file mode 100644 index 0000000000000..24078170eb6b6 --- /dev/null +++ b/docs/changelog/122257.yaml @@ -0,0 +1,5 @@ +pr: 122257 +summary: Revive inlinestats +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/122272.yaml b/docs/changelog/122272.yaml new file mode 100644 index 0000000000000..62e5769179402 --- /dev/null +++ b/docs/changelog/122272.yaml @@ -0,0 +1,6 @@ +pr: 122272 +summary: "[Inference API] Rename `model_id` prop to model in EIS sparse inference\ + \ request body" +area: Inference +type: enhancement +issues: [] diff --git a/docs/changelog/122278.yaml b/docs/changelog/122278.yaml new file mode 100644 index 0000000000000..529d17c729c7f --- /dev/null +++ b/docs/changelog/122278.yaml @@ -0,0 +1,5 @@ +pr: 122278 +summary: Fix serialising the inference update request +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/122280.yaml b/docs/changelog/122280.yaml new file mode 100644 index 0000000000000..93a7e4e1aaf57 --- /dev/null +++ b/docs/changelog/122280.yaml @@ -0,0 +1,5 @@ +pr: 122280 +summary: Use `FallbackSyntheticSourceBlockLoader` for number fields +area: Mapping +type: enhancement +issues: [] diff --git a/docs/changelog/122326.yaml b/docs/changelog/122326.yaml new file mode 100644 index 0000000000000..91c71041d58fc --- /dev/null +++ b/docs/changelog/122326.yaml @@ -0,0 +1,5 @@ +pr: 122326 +summary: System Index Migration Failure Results in a Non-Recoverable State +area: Infra/Core +type: bug +issues: [] diff --git a/docs/changelog/122357.yaml b/docs/changelog/122357.yaml new file mode 100644 index 0000000000000..7648002c9356f --- /dev/null +++ b/docs/changelog/122357.yaml @@ -0,0 +1,6 @@ +pr: 122357 +summary: Handle search timeout in `SuggestPhase` +area: Search +type: bug +issues: + - 122186 diff --git a/docs/changelog/122365.yaml b/docs/changelog/122365.yaml new file mode 100644 index 0000000000000..1229cd8754ca6 --- /dev/null +++ b/docs/changelog/122365.yaml @@ -0,0 +1,5 @@ +pr: 122365 +summary: Fix handling of auto expand replicas for stateless indices +area: "Search" +type: bug +issues: [] diff --git a/docs/changelog/122417.yaml b/docs/changelog/122417.yaml new file mode 100644 index 0000000000000..f9e33df2a523c --- /dev/null +++ b/docs/changelog/122417.yaml @@ -0,0 +1,6 @@ +pr: 122417 +summary: Fix listener leak in exchange service +area: ES|QL +type: bug +issues: + - 122271 diff --git a/docs/changelog/122425.yaml b/docs/changelog/122425.yaml new file mode 100644 index 0000000000000..a0e590dcdc36c --- /dev/null +++ b/docs/changelog/122425.yaml @@ -0,0 +1,5 @@ +pr: 122425 +summary: Fix synthetic source bug that would mishandle nested `dense_vector` fields +area: Mapping +type: bug +issues: [] diff --git a/docs/changelog/122427.yaml b/docs/changelog/122427.yaml new file mode 100644 index 0000000000000..2444a0ec894ab --- /dev/null +++ b/docs/changelog/122427.yaml @@ -0,0 +1,5 @@ +pr: 122427 +summary: Improve size limiting string message +area: Infra/Core +type: enhancement +issues: [] diff --git a/docs/changelog/122496.yaml b/docs/changelog/122496.yaml new file mode 100644 index 0000000000000..37ce70977112b --- /dev/null +++ b/docs/changelog/122496.yaml @@ -0,0 +1,5 @@ +pr: 122496 +summary: Deduplicate `IngestStats` and `IngestStats.Stats` identity records when deserializing +area: Ingest Node +type: bug +issues: [] diff --git a/docs/community-clients/index.asciidoc b/docs/community-clients/index.asciidoc index ebde04b02f18a..cb8d7f58c612f 100644 --- a/docs/community-clients/index.asciidoc +++ b/docs/community-clients/index.asciidoc @@ -205,10 +205,6 @@ client]. Also see the {client}/ruby-api/current/index.html[official Elasticsearch Ruby client]. -* https://github.com/printercu/elastics-rb[elastics]: - Tiny client with built-in zero-downtime migrations and ActiveRecord integration. - **- Last commit more than a year ago** - * https://github.com/toptal/chewy[chewy]: An ODM and wrapper for the official Elasticsearch client. @@ -218,6 +214,13 @@ Also see the {client}/ruby-api/current/index.html[official Elasticsearch Ruby cl * https://github.com/artsy/estella[Estella]: Make your Ruby models searchable. +* https://github.com/mrkamel/search_flip[SearchFlip]: + Full-Featured Elasticsearch Ruby Client with a Chainable DSL. + +* https://github.com/printercu/elastics-rb[elastics]: + Tiny client with built-in zero-downtime migrations and ActiveRecord integration. + **- Last commit more than a year ago** + [[rust]] == Rust @@ -267,4 +270,4 @@ client]. * https://github.com/reactiverse/elasticsearch-client[elasticsearch-client]: An Elasticsearch client for Eclipse Vert.x - **- Last commit more than a year ago** \ No newline at end of file + **- Last commit more than a year ago** diff --git a/docs/internal/Versioning.md b/docs/internal/Versioning.md index f0f730f618259..474278e873922 100644 --- a/docs/internal/Versioning.md +++ b/docs/internal/Versioning.md @@ -35,19 +35,19 @@ Every change to the transport protocol is represented by a new transport version higher than all previous transport versions, which then becomes the highest version recognized by that build of Elasticsearch. The version ids are stored as constants in the `TransportVersions` class. -Each id has a standard pattern `M_NNN_SS_P`, where: +Each id has a standard pattern `M_NNN_S_PP`, where: * `M` is the major version * `NNN` is an incrementing id -* `SS` is used in subsidiary repos amending the default transport protocol -* `P` is used for patches and backports +* `S` is used in subsidiary repos amending the default transport protocol +* `PP` is used for patches and backports When you make a change to the serialization form of any object, you need to create a new sequential constant in `TransportVersions`, introduced in the same PR that adds the change, that increments the `NNN` component from the previous highest version, with other components set to zero. -For example, if the previous version number is `8_413_00_1`, -the next version number should be `8_414_00_0`. +For example, if the previous version number is `8_413_0_01`, +the next version number should be `8_414_0_00`. Once you have defined your constant, you then need to use it in serialization code. If the transport version is at or above the new id, @@ -166,7 +166,7 @@ also has that change, and knows about the patch backport ids and what they mean. Index version is a single incrementing version number for the index data format, metadata, and associated mappings. It is declared the same way as the -transport version - with the pattern `M_NNN_SS_P`, for the major version, version id, +transport version - with the pattern `M_NNN_S_PP`, for the major version, version id, subsidiary version id, and patch number respectively. Index version is stored in index metadata when an index is created, diff --git a/docs/reference/aggregations/bucket/time-series-aggregation.asciidoc b/docs/reference/aggregations/bucket/time-series-aggregation.asciidoc index 1fb527cd645f0..86e8355b69882 100644 --- a/docs/reference/aggregations/bucket/time-series-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/time-series-aggregation.asciidoc @@ -6,9 +6,14 @@ preview::[] -The time series aggregation queries data created using a time series index. This is typically data such as metrics +The time series aggregation queries data created using a <>. This is typically data such as metrics or other data streams with a time component, and requires creating an index using the time series mode. +[NOTE] +==== +Refer to the <> to learn more about the key differences from regular data streams. +==== + ////////////////////////// Creating a time series mapping diff --git a/docs/reference/alias.asciidoc b/docs/reference/alias.asciidoc index f676644c4ec48..3f8553c3b96d9 100644 --- a/docs/reference/alias.asciidoc +++ b/docs/reference/alias.asciidoc @@ -2,12 +2,14 @@ [[aliases]] = Aliases -An alias is a secondary name for a group of data streams or indices. Most {es} +An alias points to one or more indices or data streams. Most {es} APIs accept an alias in place of a data stream or index name. -You can change the data streams or indices of an alias at any time. If you use -aliases in your application's {es} requests, you can reindex data with no -downtime or changes to your app's code. +Aliases enable you to: + +* Query multiple indices/data streams together with a single name +* Change which indices/data streams your application uses in real time +* <> data without downtime [discrete] [[alias-types]] diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc index 28933eb75050d..545b50df009d3 100644 --- a/docs/reference/api-conventions.asciidoc +++ b/docs/reference/api-conventions.asciidoc @@ -254,6 +254,16 @@ as they contain data essential to the operation of the system. IMPORTANT: Direct access to system indices is deprecated and will no longer be allowed in a future major version. +To view system indices within cluster: + +[source,console] +-------------------------------------------------- +GET _cluster/state/metadata?filter_path=metadata.indices.*.system +-------------------------------------------------- + +WARNING: When overwriting current cluster state, system indices should be restored +as part of their {ref}/snapshot-restore.html#feature-state[feature state]. + [discrete] [[api-conventions-parameters]] === Parameters diff --git a/docs/reference/cluster/remote-info.asciidoc b/docs/reference/cluster/remote-info.asciidoc index 691acafd8ddbe..e91ccc4d8f4a1 100644 --- a/docs/reference/cluster/remote-info.asciidoc +++ b/docs/reference/cluster/remote-info.asciidoc @@ -26,10 +26,18 @@ Returns configured remote cluster information. [[cluster-remote-info-api-desc]] ==== {api-description-title} -The cluster remote info API allows you to retrieve all of the configured -remote cluster information. It returns connection and endpoint information keyed +The cluster remote info API allows you to retrieve information about configured +remote clusters. It returns connection and endpoint information keyed by the configured remote cluster alias. +TIP: This API returns information that reflects current state on the local cluster. +The `connected` field does not necessarily reflect whether a remote cluster is +down or unavailable, only whether there is currently an open connection to it. +Elasticsearch does not spontaneously try to reconnect to a disconnected remote +cluster. To trigger a reconnection, attempt a <>, +<>, or try the +<> endpoint. + [[cluster-remote-info-api-response-body]] ==== {api-response-body-title} @@ -39,7 +47,10 @@ by the configured remote cluster alias. `proxy`. `connected`:: - True if there is at least one connection to the remote cluster. + True if there is at least one open connection to the remote cluster. When + false, it means that the cluster no longer has an open connection to the + remote cluster. It does not necessarily mean that the remote cluster is + down or unavailable, just that at some point a connection was lost. `initial_connect_timeout`:: The initial connect timeout for remote cluster connections. diff --git a/docs/reference/data-streams/logs.asciidoc b/docs/reference/data-streams/logs.asciidoc index 7058cfe51496f..797efb7bef945 100644 --- a/docs/reference/data-streams/logs.asciidoc +++ b/docs/reference/data-streams/logs.asciidoc @@ -237,3 +237,9 @@ The `logsdb` index mode uses the following settings: * **`index.mapping.ignore_above`**: `8191` * **`index.mapping.total_fields.ignore_dynamic_beyond_limit`**: `true` + +[discrete] +[[upgrade-to-logsdb-notes]] +=== Notes about upgrading to Logsdb + +TODO: add notes. diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index ae9ae8fe73fc6..62201f5748b7d 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -307,23 +307,19 @@ POST test/_update/1 [discrete] ===== Upsert -If the document does not already exist, the contents of the `upsert` element -are inserted as a new document. If the document exists, the -`script` is executed: +An upsert operation lets you update an existing document or insert a new one if it doesn't exist, in a single request. + +In this example, if the product with ID `1` exists, its price will be updated to `100`. If the product does not exist, a new document with ID `1` and a price of `50` will be inserted. [source,console] ---- -POST test/_update/1 +POST /test/_update/1 { - "script": { - "source": "ctx._source.counter += params.count", - "lang": "painless", - "params": { - "count": 4 - } + "doc": { + "product_price": 100 }, "upsert": { - "counter": 1 + "product_price": 50 } } ---- diff --git a/docs/reference/esql/esql-across-clusters.asciidoc b/docs/reference/esql/esql-across-clusters.asciidoc index c12865bad6162..91e1ff5255034 100644 --- a/docs/reference/esql/esql-across-clusters.asciidoc +++ b/docs/reference/esql/esql-across-clusters.asciidoc @@ -39,6 +39,8 @@ If you're using the API key authentication method, you'll see the `"cluster_cred [[esql-ccs-security-model-certificate]] ===== TLS certificate authentication +deprecated::[9.0.0, "Use <> instead."] + TLS certificate authentication secures remote clusters with mutual TLS. This could be the preferred model when a single administrator has full control over both clusters. We generally recommend that roles and their privileges be identical in both clusters. diff --git a/docs/reference/esql/esql-async-query-stop-api.asciidoc b/docs/reference/esql/esql-async-query-stop-api.asciidoc index dba5282d224ed..1eb6711ae5840 100644 --- a/docs/reference/esql/esql-async-query-stop-api.asciidoc +++ b/docs/reference/esql/esql-async-query-stop-api.asciidoc @@ -23,7 +23,7 @@ field set to `true`. [source,console] ---- -POST /query/async/FkpMRkJGS1gzVDRlM3g4ZzMyRGlLbkEaTXlJZHdNT09TU2VTZVBoNDM3cFZMUToxMDM=/stop +POST /_query/async/FkpMRkJGS1gzVDRlM3g4ZzMyRGlLbkEaTXlJZHdNT09TU2VTZVBoNDM3cFZMUToxMDM=/stop ---- // TEST[skip: no access to query ID] diff --git a/docs/reference/esql/functions/kibana/definition/abs.json b/docs/reference/esql/functions/kibana/definition/abs.json index e875d8200715d..888ebf6386401 100644 --- a/docs/reference/esql/functions/kibana/definition/abs.json +++ b/docs/reference/esql/functions/kibana/definition/abs.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "abs", "description" : "Returns the absolute value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/acos.json b/docs/reference/esql/functions/kibana/definition/acos.json index f86b1a5552a6f..5b6a5b023f48d 100644 --- a/docs/reference/esql/functions/kibana/definition/acos.json +++ b/docs/reference/esql/functions/kibana/definition/acos.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "acos", "description" : "Returns the arccosine of `n` as an angle, expressed in radians.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/asin.json b/docs/reference/esql/functions/kibana/definition/asin.json index 4063ffcba6140..293a66a21ab28 100644 --- a/docs/reference/esql/functions/kibana/definition/asin.json +++ b/docs/reference/esql/functions/kibana/definition/asin.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "asin", "description" : "Returns the arcsine of the input\nnumeric expression as an angle, expressed in radians.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/atan.json b/docs/reference/esql/functions/kibana/definition/atan.json index 1b2a3a1860bc2..afa380ca1d7fa 100644 --- a/docs/reference/esql/functions/kibana/definition/atan.json +++ b/docs/reference/esql/functions/kibana/definition/atan.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "atan", "description" : "Returns the arctangent of the input\nnumeric expression as an angle, expressed in radians.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/atan2.json b/docs/reference/esql/functions/kibana/definition/atan2.json index 9b67c07d8d73b..8e12198cb60ee 100644 --- a/docs/reference/esql/functions/kibana/definition/atan2.json +++ b/docs/reference/esql/functions/kibana/definition/atan2.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "atan2", "description" : "The angle between the positive x-axis and the ray from the\norigin to the point (x , y) in the Cartesian plane, expressed in radians.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/bit_length.json b/docs/reference/esql/functions/kibana/definition/bit_length.json index 0c75b76cdbbfb..25a032044ec9c 100644 --- a/docs/reference/esql/functions/kibana/definition/bit_length.json +++ b/docs/reference/esql/functions/kibana/definition/bit_length.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "bit_length", "description" : "Returns the bit length of a string.", "note" : "All strings are in UTF-8, so a single character can use multiple bytes.", diff --git a/docs/reference/esql/functions/kibana/definition/bucket.json b/docs/reference/esql/functions/kibana/definition/bucket.json index f9c7f2f27d6f9..990f102bac16f 100644 --- a/docs/reference/esql/functions/kibana/definition/bucket.json +++ b/docs/reference/esql/functions/kibana/definition/bucket.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "grouping", "name" : "bucket", "description" : "Creates groups of values - buckets - out of a datetime or numeric input.\nThe size of the buckets can either be provided directly, or chosen based on a recommended count and values range.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/byte_length.json b/docs/reference/esql/functions/kibana/definition/byte_length.json index 60f439b9d8133..6d1a91813221b 100644 --- a/docs/reference/esql/functions/kibana/definition/byte_length.json +++ b/docs/reference/esql/functions/kibana/definition/byte_length.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "byte_length", "description" : "Returns the byte length of a string.", "note" : "All strings are in UTF-8, so a single character can use multiple bytes.", diff --git a/docs/reference/esql/functions/kibana/definition/case.json b/docs/reference/esql/functions/kibana/definition/case.json index 51693d9d30660..4e2d4187712e3 100644 --- a/docs/reference/esql/functions/kibana/definition/case.json +++ b/docs/reference/esql/functions/kibana/definition/case.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "case", "description" : "Accepts pairs of conditions and values. The function returns the value that\nbelongs to the first condition that evaluates to `true`.\n\nIf the number of arguments is odd, the last argument is the default value which\nis returned when no condition matches. If the number of arguments is even, and\nno condition matches, the function returns `null`.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/categorize.json b/docs/reference/esql/functions/kibana/definition/categorize.json index ed5fa15232b85..1b7f20405756b 100644 --- a/docs/reference/esql/functions/kibana/definition/categorize.json +++ b/docs/reference/esql/functions/kibana/definition/categorize.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "grouping", "name" : "categorize", "description" : "Groups text messages into categories of similarly formatted text values.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/cbrt.json b/docs/reference/esql/functions/kibana/definition/cbrt.json index ce23551f75e5f..f146a864929a8 100644 --- a/docs/reference/esql/functions/kibana/definition/cbrt.json +++ b/docs/reference/esql/functions/kibana/definition/cbrt.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "cbrt", "description" : "Returns the cube root of a number. The input can be any numeric value, the return value is always a double.\nCube roots of infinities are null.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/ceil.json b/docs/reference/esql/functions/kibana/definition/ceil.json index 15e7bafd1fa5d..4a9e24e9094e8 100644 --- a/docs/reference/esql/functions/kibana/definition/ceil.json +++ b/docs/reference/esql/functions/kibana/definition/ceil.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "ceil", "description" : "Round a number up to the nearest integer.", "note" : "This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to the integer similar to Math.ceil.", diff --git a/docs/reference/esql/functions/kibana/definition/cidr_match.json b/docs/reference/esql/functions/kibana/definition/cidr_match.json index dc5d6773f69e6..9e988623c0fd6 100644 --- a/docs/reference/esql/functions/kibana/definition/cidr_match.json +++ b/docs/reference/esql/functions/kibana/definition/cidr_match.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "cidr_match", "description" : "Returns true if the provided IP is contained in one of the provided CIDR blocks.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/coalesce.json b/docs/reference/esql/functions/kibana/definition/coalesce.json index c929323397c9b..a507aea3fd0d9 100644 --- a/docs/reference/esql/functions/kibana/definition/coalesce.json +++ b/docs/reference/esql/functions/kibana/definition/coalesce.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "coalesce", "description" : "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/concat.json b/docs/reference/esql/functions/kibana/definition/concat.json index d3e9f0852bc13..b41ebba40b58e 100644 --- a/docs/reference/esql/functions/kibana/definition/concat.json +++ b/docs/reference/esql/functions/kibana/definition/concat.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "concat", "description" : "Concatenates two or more strings.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/cos.json b/docs/reference/esql/functions/kibana/definition/cos.json index 1b9ca2f1d4867..8922dd6db555c 100644 --- a/docs/reference/esql/functions/kibana/definition/cos.json +++ b/docs/reference/esql/functions/kibana/definition/cos.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "cos", "description" : "Returns the cosine of an angle.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/cosh.json b/docs/reference/esql/functions/kibana/definition/cosh.json index 77d9853d34e44..458f4ecf04554 100644 --- a/docs/reference/esql/functions/kibana/definition/cosh.json +++ b/docs/reference/esql/functions/kibana/definition/cosh.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "cosh", "description" : "Returns the hyperbolic cosine of a number.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/date_diff.json b/docs/reference/esql/functions/kibana/definition/date_diff.json index 2738ec8390226..dab7f7e0ef2e3 100644 --- a/docs/reference/esql/functions/kibana/definition/date_diff.json +++ b/docs/reference/esql/functions/kibana/definition/date_diff.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "date_diff", "description" : "Subtracts the `startTimestamp` from the `endTimestamp` and returns the difference in multiples of `unit`.\nIf `startTimestamp` is later than the `endTimestamp`, negative values are returned.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/date_extract.json b/docs/reference/esql/functions/kibana/definition/date_extract.json index 0ababf80d9137..1778d99dcf342 100644 --- a/docs/reference/esql/functions/kibana/definition/date_extract.json +++ b/docs/reference/esql/functions/kibana/definition/date_extract.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "date_extract", "description" : "Extracts parts of a date, like year, month, day, hour.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/date_format.json b/docs/reference/esql/functions/kibana/definition/date_format.json index f6f48e9df82b0..633e20444d771 100644 --- a/docs/reference/esql/functions/kibana/definition/date_format.json +++ b/docs/reference/esql/functions/kibana/definition/date_format.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "date_format", "description" : "Returns a string representation of a date, in the provided format.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/date_parse.json b/docs/reference/esql/functions/kibana/definition/date_parse.json index 77247b332c6f6..8e2f15efce829 100644 --- a/docs/reference/esql/functions/kibana/definition/date_parse.json +++ b/docs/reference/esql/functions/kibana/definition/date_parse.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "date_parse", "description" : "Returns a date by parsing the second argument using the format specified in the first argument.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/date_trunc.json b/docs/reference/esql/functions/kibana/definition/date_trunc.json index cdda984a0ce7e..023298d0f8b53 100644 --- a/docs/reference/esql/functions/kibana/definition/date_trunc.json +++ b/docs/reference/esql/functions/kibana/definition/date_trunc.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "date_trunc", "description" : "Rounds down a date to the closest interval.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/e.json b/docs/reference/esql/functions/kibana/definition/e.json index e5941b50c00a6..15bcb0572e2a8 100644 --- a/docs/reference/esql/functions/kibana/definition/e.json +++ b/docs/reference/esql/functions/kibana/definition/e.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "e", "description" : "Returns Euler's number.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/ends_with.json b/docs/reference/esql/functions/kibana/definition/ends_with.json index 5c2f721c33442..eb2e0268214ad 100644 --- a/docs/reference/esql/functions/kibana/definition/ends_with.json +++ b/docs/reference/esql/functions/kibana/definition/ends_with.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "ends_with", "description" : "Returns a boolean that indicates whether a keyword string ends with another string.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/exp.json b/docs/reference/esql/functions/kibana/definition/exp.json index 4df40ce467d80..186cde72c7999 100644 --- a/docs/reference/esql/functions/kibana/definition/exp.json +++ b/docs/reference/esql/functions/kibana/definition/exp.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "exp", "description" : "Returns the value of e raised to the power of the given number.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/floor.json b/docs/reference/esql/functions/kibana/definition/floor.json index 1d1c961bfe2b2..6e2deb3dce567 100644 --- a/docs/reference/esql/functions/kibana/definition/floor.json +++ b/docs/reference/esql/functions/kibana/definition/floor.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "floor", "description" : "Round a number down to the nearest integer.", "note" : "This is a noop for `long` (including unsigned) and `integer`.\nFor `double` this picks the closest `double` value to the integer\nsimilar to Math.floor.", diff --git a/docs/reference/esql/functions/kibana/definition/from_base64.json b/docs/reference/esql/functions/kibana/definition/from_base64.json index 7580b817031a5..a94f4a338cb08 100644 --- a/docs/reference/esql/functions/kibana/definition/from_base64.json +++ b/docs/reference/esql/functions/kibana/definition/from_base64.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "from_base64", "description" : "Decode a base64 string.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/greatest.json b/docs/reference/esql/functions/kibana/definition/greatest.json index 077100317dfca..af78ca3d02b5d 100644 --- a/docs/reference/esql/functions/kibana/definition/greatest.json +++ b/docs/reference/esql/functions/kibana/definition/greatest.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "greatest", "description" : "Returns the maximum value from multiple columns. This is similar to <>\nexcept it is intended to run on multiple columns at once.", "note" : "When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`.", diff --git a/docs/reference/esql/functions/kibana/definition/hash.json b/docs/reference/esql/functions/kibana/definition/hash.json index dbf4a2542afc5..93ee99bf3e9c9 100644 --- a/docs/reference/esql/functions/kibana/definition/hash.json +++ b/docs/reference/esql/functions/kibana/definition/hash.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "hash", "description" : "Computes the hash of the input using various algorithms such as MD5, SHA, SHA-224, SHA-256, SHA-384, SHA-512.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/hypot.json b/docs/reference/esql/functions/kibana/definition/hypot.json index 06971f07a3585..a71f318a286b1 100644 --- a/docs/reference/esql/functions/kibana/definition/hypot.json +++ b/docs/reference/esql/functions/kibana/definition/hypot.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "hypot", "description" : "Returns the hypotenuse of two numbers. The input can be any numeric values, the return value is always a double.\nHypotenuses of infinities are null.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/ip_prefix.json b/docs/reference/esql/functions/kibana/definition/ip_prefix.json index bbf0702b325e9..0d039a784a7bb 100644 --- a/docs/reference/esql/functions/kibana/definition/ip_prefix.json +++ b/docs/reference/esql/functions/kibana/definition/ip_prefix.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "ip_prefix", "description" : "Truncates an IP to a given prefix length.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/kql.json b/docs/reference/esql/functions/kibana/definition/kql.json index 440786ec63e77..ff0f854f20fa5 100644 --- a/docs/reference/esql/functions/kibana/definition/kql.json +++ b/docs/reference/esql/functions/kibana/definition/kql.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "kql", "description" : "Performs a KQL query. Returns true if the provided KQL query string matches the row.", "signatures" : [ @@ -30,7 +30,7 @@ } ], "examples" : [ - "FROM books \n| WHERE KQL(\"author: Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" + "FROM books \n| WHERE KQL(\"author: Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5" ], "preview" : true, "snapshot_only" : false diff --git a/docs/reference/esql/functions/kibana/definition/least.json b/docs/reference/esql/functions/kibana/definition/least.json index 18ec65c60f475..30f27b9be0b07 100644 --- a/docs/reference/esql/functions/kibana/definition/least.json +++ b/docs/reference/esql/functions/kibana/definition/least.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "least", "description" : "Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/left.json b/docs/reference/esql/functions/kibana/definition/left.json index 9e226d6cde59b..b367dc9655ec5 100644 --- a/docs/reference/esql/functions/kibana/definition/left.json +++ b/docs/reference/esql/functions/kibana/definition/left.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "left", "description" : "Returns the substring that extracts 'length' chars from 'string' starting from the left.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/length.json b/docs/reference/esql/functions/kibana/definition/length.json index bc26acde744f5..c2eca63e4e1dd 100644 --- a/docs/reference/esql/functions/kibana/definition/length.json +++ b/docs/reference/esql/functions/kibana/definition/length.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "length", "description" : "Returns the character length of a string.", "note" : "All strings are in UTF-8, so a single character can use multiple bytes.", diff --git a/docs/reference/esql/functions/kibana/definition/locate.json b/docs/reference/esql/functions/kibana/definition/locate.json index 6278cb5e33a00..0322ec1945aa7 100644 --- a/docs/reference/esql/functions/kibana/definition/locate.json +++ b/docs/reference/esql/functions/kibana/definition/locate.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "locate", "description" : "Returns an integer that indicates the position of a keyword substring within another string.\nReturns `0` if the substring cannot be found.\nNote that string positions start from `1`.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/log.json b/docs/reference/esql/functions/kibana/definition/log.json index 369503c1b8e4a..c75349a89630d 100644 --- a/docs/reference/esql/functions/kibana/definition/log.json +++ b/docs/reference/esql/functions/kibana/definition/log.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "log", "description" : "Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double.\n\nLogs of zero, negative numbers, and base of one return `null` as well as a warning.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/log10.json b/docs/reference/esql/functions/kibana/definition/log10.json index a958e5caec8a8..6cc089ae49c4f 100644 --- a/docs/reference/esql/functions/kibana/definition/log10.json +++ b/docs/reference/esql/functions/kibana/definition/log10.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "log10", "description" : "Returns the logarithm of a value to base 10. The input can be any numeric value, the return value is always a double.\n\nLogs of 0 and negative numbers return `null` as well as a warning.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/ltrim.json b/docs/reference/esql/functions/kibana/definition/ltrim.json index 6d992b9db7b2c..f0a58dd4f9aea 100644 --- a/docs/reference/esql/functions/kibana/definition/ltrim.json +++ b/docs/reference/esql/functions/kibana/definition/ltrim.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "ltrim", "description" : "Removes leading whitespaces from a string.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/match.json b/docs/reference/esql/functions/kibana/definition/match.json index 23a81ba34e387..4844382fe04cf 100644 --- a/docs/reference/esql/functions/kibana/definition/match.json +++ b/docs/reference/esql/functions/kibana/definition/match.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "match", "description" : "Use `MATCH` to perform a <> on the specified field.\nUsing `MATCH` is equivalent to using the `match` query in the Elasticsearch Query DSL.\n\nMatch can be used on fields from the text family like <> and <>,\nas well as other field types like keyword, boolean, dates, and numeric types.\n\nMatch can use <> to specify additional options for the match query.\nAll <> are supported.\n\nFor a simplified syntax, you can use the <> `:` operator instead of `MATCH`.\n\n`MATCH` returns true if the provided query matches the row.", "signatures" : [ @@ -20,10 +20,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -45,10 +45,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -70,10 +70,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -95,10 +95,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -120,10 +120,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -145,10 +145,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -170,10 +170,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -195,10 +195,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -220,10 +220,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -245,10 +245,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -270,10 +270,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -295,10 +295,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -320,10 +320,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -345,10 +345,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -370,10 +370,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -395,10 +395,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -420,10 +420,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -445,10 +445,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -470,10 +470,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -495,10 +495,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -520,10 +520,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -545,10 +545,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -570,10 +570,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -595,10 +595,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -620,10 +620,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -645,10 +645,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -670,10 +670,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -695,10 +695,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -720,10 +720,10 @@ }, { "name" : "options", - "type" : "function named parameters", + "type" : "function_named_parameters", "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", "optional" : true, - "description" : "Match additional options as <>. See <> for more information." + "description" : "(Optional) Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -731,7 +731,7 @@ } ], "examples" : [ - "FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;", + "FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5", "FROM books \n| WHERE MATCH(title, \"Hobbit Back Again\", {\"operator\": \"AND\"})\n| KEEP title;" ], "preview" : true, diff --git a/docs/reference/esql/functions/kibana/definition/match_operator.json b/docs/reference/esql/functions/kibana/definition/match_operator.json index a67c6b0e45c4a..98f1a8d73d35e 100644 --- a/docs/reference/esql/functions/kibana/definition/match_operator.json +++ b/docs/reference/esql/functions/kibana/definition/match_operator.json @@ -529,7 +529,7 @@ } ], "examples" : [ - "FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" + "FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5" ], "preview" : true, "snapshot_only" : false diff --git a/docs/reference/esql/functions/kibana/definition/md5.json b/docs/reference/esql/functions/kibana/definition/md5.json index 4d3a88e123ff4..b631f7816cb5f 100644 --- a/docs/reference/esql/functions/kibana/definition/md5.json +++ b/docs/reference/esql/functions/kibana/definition/md5.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "md5", "description" : "Computes the MD5 hash of the input.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_append.json b/docs/reference/esql/functions/kibana/definition/mv_append.json index 043625d9ea1e7..7cbcc678464c7 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_append.json +++ b/docs/reference/esql/functions/kibana/definition/mv_append.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_append", "description" : "Concatenates values of two multi-value fields.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_avg.json b/docs/reference/esql/functions/kibana/definition/mv_avg.json index a25d38c909e66..65a32cba133ef 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_avg.json +++ b/docs/reference/esql/functions/kibana/definition/mv_avg.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_avg", "description" : "Converts a multivalued field into a single valued field containing the average of all of the values.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_concat.json b/docs/reference/esql/functions/kibana/definition/mv_concat.json index 88b1107a9f401..6855525abfba5 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_concat.json +++ b/docs/reference/esql/functions/kibana/definition/mv_concat.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_concat", "description" : "Converts a multivalued string expression into a single valued column containing the concatenation of all values separated by a delimiter.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_count.json b/docs/reference/esql/functions/kibana/definition/mv_count.json index 90ace2525f710..b82b7b382409d 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_count.json +++ b/docs/reference/esql/functions/kibana/definition/mv_count.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_count", "description" : "Converts a multivalued expression into a single valued column containing a count of the number of values.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json index 2fb5b9c61727f..fbce83189ef2b 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json +++ b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_dedupe", "description" : "Remove duplicate values from a multivalued field.", "note" : "`MV_DEDUPE` may, but won't always, sort the values in the column.", diff --git a/docs/reference/esql/functions/kibana/definition/mv_first.json b/docs/reference/esql/functions/kibana/definition/mv_first.json index 552f568c9b171..32525a7c124f3 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_first.json +++ b/docs/reference/esql/functions/kibana/definition/mv_first.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_first", "description" : "Converts a multivalued expression into a single valued column containing the\nfirst value. This is most useful when reading from a function that emits\nmultivalued columns in a known order like <>.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_last.json b/docs/reference/esql/functions/kibana/definition/mv_last.json index 78d7b348a6042..f11482b69824c 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_last.json +++ b/docs/reference/esql/functions/kibana/definition/mv_last.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_last", "description" : "Converts a multivalue expression into a single valued column containing the last\nvalue. This is most useful when reading from a function that emits multivalued\ncolumns in a known order like <>.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_max.json b/docs/reference/esql/functions/kibana/definition/mv_max.json index a1e55c58cff70..65b8d801edabd 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_max.json +++ b/docs/reference/esql/functions/kibana/definition/mv_max.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_max", "description" : "Converts a multivalued expression into a single valued column containing the maximum value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_median.json b/docs/reference/esql/functions/kibana/definition/mv_median.json index fe95e1999f6a9..3ba870023cb47 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_median.json +++ b/docs/reference/esql/functions/kibana/definition/mv_median.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_median", "description" : "Converts a multivalued field into a single valued field containing the median value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_median_absolute_deviation.json b/docs/reference/esql/functions/kibana/definition/mv_median_absolute_deviation.json index af16e9ab0d3bb..e4beb343cd20d 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_median_absolute_deviation.json +++ b/docs/reference/esql/functions/kibana/definition/mv_median_absolute_deviation.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_median_absolute_deviation", "description" : "Converts a multivalued field into a single valued field containing the median absolute deviation.\n\nIt is calculated as the median of each data point's deviation from the median of the entire sample. That is, for a random variable `X`, the median absolute deviation is `median(|median(X) - X|)`.", "note" : "If the field has an even number of values, the medians will be calculated as the average of the middle two values. If the value is not a floating point number, the averages are rounded towards 0.", diff --git a/docs/reference/esql/functions/kibana/definition/mv_min.json b/docs/reference/esql/functions/kibana/definition/mv_min.json index 7998ca4eda94e..ef36cee912d5c 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_min.json +++ b/docs/reference/esql/functions/kibana/definition/mv_min.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_min", "description" : "Converts a multivalued expression into a single valued column containing the minimum value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_percentile.json b/docs/reference/esql/functions/kibana/definition/mv_percentile.json index 8e4cec705f48d..7835241ed68ba 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_percentile.json +++ b/docs/reference/esql/functions/kibana/definition/mv_percentile.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_percentile", "description" : "Converts a multivalued field into a single valued field containing the value at which a certain percentage of observed values occur.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_pseries_weighted_sum.json b/docs/reference/esql/functions/kibana/definition/mv_pseries_weighted_sum.json index f96a6dfb5a43a..7935afe3338e3 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_pseries_weighted_sum.json +++ b/docs/reference/esql/functions/kibana/definition/mv_pseries_weighted_sum.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_pseries_weighted_sum", "description" : "Converts a multivalued expression into a single-valued column by multiplying every element on the input list by its corresponding term in P-Series and computing the sum.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_slice.json b/docs/reference/esql/functions/kibana/definition/mv_slice.json index 5ad8f588cdc2b..f23c0d089d93b 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_slice.json +++ b/docs/reference/esql/functions/kibana/definition/mv_slice.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_slice", "description" : "Returns a subset of the multivalued field using the start and end index values.\nThis is most useful when reading from a function that emits multivalued columns\nin a known order like <> or <>.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_sort.json b/docs/reference/esql/functions/kibana/definition/mv_sort.json index 072c05743af33..17a8fb426755c 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_sort.json +++ b/docs/reference/esql/functions/kibana/definition/mv_sort.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_sort", "description" : "Sorts a multivalued field in lexicographical order.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_sum.json b/docs/reference/esql/functions/kibana/definition/mv_sum.json index 6ccbcec3c61b4..31b0e5e420b70 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_sum.json +++ b/docs/reference/esql/functions/kibana/definition/mv_sum.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_sum", "description" : "Converts a multivalued field into a single valued field containing the sum of all of the values.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_zip.json b/docs/reference/esql/functions/kibana/definition/mv_zip.json index e292cd27ee738..fc573834054b7 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_zip.json +++ b/docs/reference/esql/functions/kibana/definition/mv_zip.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "mv_zip", "description" : "Combines the values from two multivalued fields with a delimiter that joins them together.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/now.json b/docs/reference/esql/functions/kibana/definition/now.json index 26ef362995185..42831c2faf497 100644 --- a/docs/reference/esql/functions/kibana/definition/now.json +++ b/docs/reference/esql/functions/kibana/definition/now.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "now", "description" : "Returns current date and time.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/pi.json b/docs/reference/esql/functions/kibana/definition/pi.json index 98d9451a2b929..0dd720dd69cb6 100644 --- a/docs/reference/esql/functions/kibana/definition/pi.json +++ b/docs/reference/esql/functions/kibana/definition/pi.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "pi", "description" : "Returns Pi, the ratio of a circle's circumference to its diameter.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/pow.json b/docs/reference/esql/functions/kibana/definition/pow.json index 35fe080d82f79..93dda492f087b 100644 --- a/docs/reference/esql/functions/kibana/definition/pow.json +++ b/docs/reference/esql/functions/kibana/definition/pow.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "pow", "description" : "Returns the value of `base` raised to the power of `exponent`.", "note" : "It is still possible to overflow a double result here; in that case, null will be returned.", diff --git a/docs/reference/esql/functions/kibana/definition/qstr.json b/docs/reference/esql/functions/kibana/definition/qstr.json index 3b091bfe2e13b..b617f9f9246c6 100644 --- a/docs/reference/esql/functions/kibana/definition/qstr.json +++ b/docs/reference/esql/functions/kibana/definition/qstr.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "qstr", "description" : "Performs a <>. Returns true if the provided query string matches the row.", "signatures" : [ @@ -30,7 +30,7 @@ } ], "examples" : [ - "FROM books \n| WHERE QSTR(\"author: Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" + "FROM books \n| WHERE QSTR(\"author: Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5" ], "preview" : true, "snapshot_only" : false diff --git a/docs/reference/esql/functions/kibana/definition/repeat.json b/docs/reference/esql/functions/kibana/definition/repeat.json index 201484cf7aa6f..2eb739f0d0bc0 100644 --- a/docs/reference/esql/functions/kibana/definition/repeat.json +++ b/docs/reference/esql/functions/kibana/definition/repeat.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "repeat", "description" : "Returns a string constructed by concatenating `string` with itself the specified `number` of times.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/replace.json b/docs/reference/esql/functions/kibana/definition/replace.json index b5d1a1af7e8f4..b512019a9951a 100644 --- a/docs/reference/esql/functions/kibana/definition/replace.json +++ b/docs/reference/esql/functions/kibana/definition/replace.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "replace", "description" : "The function substitutes in the string `str` any match of the regular expression `regex`\nwith the replacement string `newStr`.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/reverse.json b/docs/reference/esql/functions/kibana/definition/reverse.json index 0652d9cfa6b15..2ebf63fa5787c 100644 --- a/docs/reference/esql/functions/kibana/definition/reverse.json +++ b/docs/reference/esql/functions/kibana/definition/reverse.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "reverse", "description" : "Returns a new string representing the input string in reverse order.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/right.json b/docs/reference/esql/functions/kibana/definition/right.json index 1211877a2f902..84408f082db5a 100644 --- a/docs/reference/esql/functions/kibana/definition/right.json +++ b/docs/reference/esql/functions/kibana/definition/right.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "right", "description" : "Return the substring that extracts 'length' chars from 'str' starting from the right.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/round.json b/docs/reference/esql/functions/kibana/definition/round.json index 4ef20aa162b42..a364dbbed0b5a 100644 --- a/docs/reference/esql/functions/kibana/definition/round.json +++ b/docs/reference/esql/functions/kibana/definition/round.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "round", "description" : "Rounds a number to the specified number of decimal places.\nDefaults to 0, which returns the nearest integer. If the\nprecision is a negative number, rounds to the number of digits left\nof the decimal point.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/rtrim.json b/docs/reference/esql/functions/kibana/definition/rtrim.json index 9c8a7578ed789..ef6c93f9fa172 100644 --- a/docs/reference/esql/functions/kibana/definition/rtrim.json +++ b/docs/reference/esql/functions/kibana/definition/rtrim.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "rtrim", "description" : "Removes trailing whitespaces from a string.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/sha1.json b/docs/reference/esql/functions/kibana/definition/sha1.json index a6abb31368bb3..18fa2c33ae0d5 100644 --- a/docs/reference/esql/functions/kibana/definition/sha1.json +++ b/docs/reference/esql/functions/kibana/definition/sha1.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "sha1", "description" : "Computes the SHA1 hash of the input.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/sha256.json b/docs/reference/esql/functions/kibana/definition/sha256.json index 700425d485b61..7ad0e2c5500da 100644 --- a/docs/reference/esql/functions/kibana/definition/sha256.json +++ b/docs/reference/esql/functions/kibana/definition/sha256.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "sha256", "description" : "Computes the SHA256 hash of the input.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/signum.json b/docs/reference/esql/functions/kibana/definition/signum.json index 0902826651eff..20667b8cb683e 100644 --- a/docs/reference/esql/functions/kibana/definition/signum.json +++ b/docs/reference/esql/functions/kibana/definition/signum.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "signum", "description" : "Returns the sign of the given number.\nIt returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/sin.json b/docs/reference/esql/functions/kibana/definition/sin.json index 0d4f4e5c5cf13..efdf96a3b8d38 100644 --- a/docs/reference/esql/functions/kibana/definition/sin.json +++ b/docs/reference/esql/functions/kibana/definition/sin.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "sin", "description" : "Returns the sine of an angle.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/sinh.json b/docs/reference/esql/functions/kibana/definition/sinh.json index b20db3328775b..3ed3eaa23c9ca 100644 --- a/docs/reference/esql/functions/kibana/definition/sinh.json +++ b/docs/reference/esql/functions/kibana/definition/sinh.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "sinh", "description" : "Returns the hyperbolic sine of a number.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/space.json b/docs/reference/esql/functions/kibana/definition/space.json index 7a819d3ccd213..cc1223de2cdf2 100644 --- a/docs/reference/esql/functions/kibana/definition/space.json +++ b/docs/reference/esql/functions/kibana/definition/space.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "space", "description" : "Returns a string made of `number` spaces.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/split.json b/docs/reference/esql/functions/kibana/definition/split.json index 862cf7a09f19c..87a7b0a0f87c6 100644 --- a/docs/reference/esql/functions/kibana/definition/split.json +++ b/docs/reference/esql/functions/kibana/definition/split.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "split", "description" : "Split a single valued string into multiple strings.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/sqrt.json b/docs/reference/esql/functions/kibana/definition/sqrt.json index 11620a465c45f..6985cb20043c7 100644 --- a/docs/reference/esql/functions/kibana/definition/sqrt.json +++ b/docs/reference/esql/functions/kibana/definition/sqrt.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "sqrt", "description" : "Returns the square root of a number. The input can be any numeric value, the return value is always a double.\nSquare roots of negative numbers and infinities are null.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_contains.json b/docs/reference/esql/functions/kibana/definition/st_contains.json index 0c1b61c563402..7d78518f91da1 100644 --- a/docs/reference/esql/functions/kibana/definition/st_contains.json +++ b/docs/reference/esql/functions/kibana/definition/st_contains.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_contains", "description" : "Returns whether the first geometry contains the second geometry.\nThis is the inverse of the <> function.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_disjoint.json b/docs/reference/esql/functions/kibana/definition/st_disjoint.json index 0c4f6e7c36f53..f0da69103ea68 100644 --- a/docs/reference/esql/functions/kibana/definition/st_disjoint.json +++ b/docs/reference/esql/functions/kibana/definition/st_disjoint.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_disjoint", "description" : "Returns whether the two geometries or geometry columns are disjoint.\nThis is the inverse of the <> function.\nIn mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_distance.json b/docs/reference/esql/functions/kibana/definition/st_distance.json index 2fb8f341d12f4..74492b6e30742 100644 --- a/docs/reference/esql/functions/kibana/definition/st_distance.json +++ b/docs/reference/esql/functions/kibana/definition/st_distance.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_distance", "description" : "Computes the distance between two points.\nFor cartesian geometries, this is the pythagorean distance in the same units as the original coordinates.\nFor geographic geometries, this is the circular distance along the great circle in meters.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_envelope.json b/docs/reference/esql/functions/kibana/definition/st_envelope.json index 6c00dda265ac7..83f4bad5b826f 100644 --- a/docs/reference/esql/functions/kibana/definition/st_envelope.json +++ b/docs/reference/esql/functions/kibana/definition/st_envelope.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_envelope", "description" : "Determines the minimum bounding box of the supplied geometry.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_intersects.json b/docs/reference/esql/functions/kibana/definition/st_intersects.json index 51237a0b1cee1..b35df6711b338 100644 --- a/docs/reference/esql/functions/kibana/definition/st_intersects.json +++ b/docs/reference/esql/functions/kibana/definition/st_intersects.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_intersects", "description" : "Returns true if two geometries intersect.\nThey intersect if they have any point in common, including their interior points\n(points along lines or within polygons).\nThis is the inverse of the <> function.\nIn mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_within.json b/docs/reference/esql/functions/kibana/definition/st_within.json index d877cf4e8dfbc..3ab419683f021 100644 --- a/docs/reference/esql/functions/kibana/definition/st_within.json +++ b/docs/reference/esql/functions/kibana/definition/st_within.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_within", "description" : "Returns whether the first geometry is within the second geometry.\nThis is the inverse of the <> function.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_x.json b/docs/reference/esql/functions/kibana/definition/st_x.json index f434f1467c0bf..27378edd25ff8 100644 --- a/docs/reference/esql/functions/kibana/definition/st_x.json +++ b/docs/reference/esql/functions/kibana/definition/st_x.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_x", "description" : "Extracts the `x` coordinate from the supplied point.\nIf the points is of type `geo_point` this is equivalent to extracting the `longitude` value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_xmax.json b/docs/reference/esql/functions/kibana/definition/st_xmax.json index 7be22617c0992..c1223ecae7349 100644 --- a/docs/reference/esql/functions/kibana/definition/st_xmax.json +++ b/docs/reference/esql/functions/kibana/definition/st_xmax.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_xmax", "description" : "Extracts the maximum value of the `x` coordinates from the supplied geometry.\nIf the geometry is of type `geo_point` or `geo_shape` this is equivalent to extracting the maximum `longitude` value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_xmin.json b/docs/reference/esql/functions/kibana/definition/st_xmin.json index 8052fdb861cea..20c4bd8cca79f 100644 --- a/docs/reference/esql/functions/kibana/definition/st_xmin.json +++ b/docs/reference/esql/functions/kibana/definition/st_xmin.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_xmin", "description" : "Extracts the minimum value of the `x` coordinates from the supplied geometry.\nIf the geometry is of type `geo_point` or `geo_shape` this is equivalent to extracting the minimum `longitude` value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_y.json b/docs/reference/esql/functions/kibana/definition/st_y.json index ca7d817a36ff0..42f37d0346b1f 100644 --- a/docs/reference/esql/functions/kibana/definition/st_y.json +++ b/docs/reference/esql/functions/kibana/definition/st_y.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_y", "description" : "Extracts the `y` coordinate from the supplied point.\nIf the points is of type `geo_point` this is equivalent to extracting the `latitude` value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_ymax.json b/docs/reference/esql/functions/kibana/definition/st_ymax.json index 1a53f7388ea56..83a8070834e1a 100644 --- a/docs/reference/esql/functions/kibana/definition/st_ymax.json +++ b/docs/reference/esql/functions/kibana/definition/st_ymax.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_ymax", "description" : "Extracts the maximum value of the `y` coordinates from the supplied geometry.\nIf the geometry is of type `geo_point` or `geo_shape` this is equivalent to extracting the maximum `latitude` value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/st_ymin.json b/docs/reference/esql/functions/kibana/definition/st_ymin.json index e11722a8f9c07..21051d02cb6d3 100644 --- a/docs/reference/esql/functions/kibana/definition/st_ymin.json +++ b/docs/reference/esql/functions/kibana/definition/st_ymin.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "st_ymin", "description" : "Extracts the minimum value of the `y` coordinates from the supplied geometry.\nIf the geometry is of type `geo_point` or `geo_shape` this is equivalent to extracting the minimum `latitude` value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/starts_with.json b/docs/reference/esql/functions/kibana/definition/starts_with.json index 0e5dc076a5689..bad7fa1d34b69 100644 --- a/docs/reference/esql/functions/kibana/definition/starts_with.json +++ b/docs/reference/esql/functions/kibana/definition/starts_with.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "starts_with", "description" : "Returns a boolean that indicates whether a keyword string starts with another string.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/substring.json b/docs/reference/esql/functions/kibana/definition/substring.json index 1998eb0428482..c6260c5040e58 100644 --- a/docs/reference/esql/functions/kibana/definition/substring.json +++ b/docs/reference/esql/functions/kibana/definition/substring.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "substring", "description" : "Returns a substring of a string, specified by a start position and an optional length.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/tan.json b/docs/reference/esql/functions/kibana/definition/tan.json index 76fab2ff6ab1e..af28eace4c591 100644 --- a/docs/reference/esql/functions/kibana/definition/tan.json +++ b/docs/reference/esql/functions/kibana/definition/tan.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "tan", "description" : "Returns the tangent of an angle.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/tanh.json b/docs/reference/esql/functions/kibana/definition/tanh.json index f56e053a3d4ee..a36edcc1e88f4 100644 --- a/docs/reference/esql/functions/kibana/definition/tanh.json +++ b/docs/reference/esql/functions/kibana/definition/tanh.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "tanh", "description" : "Returns the hyperbolic tangent of a number.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/tau.json b/docs/reference/esql/functions/kibana/definition/tau.json index 1dbb2e892ee60..b5090e8a39a81 100644 --- a/docs/reference/esql/functions/kibana/definition/tau.json +++ b/docs/reference/esql/functions/kibana/definition/tau.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "tau", "description" : "Returns the ratio of a circle's circumference to its radius.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/term.json b/docs/reference/esql/functions/kibana/definition/term.json index b0f129afd239c..1a0ea7bf8a4ee 100644 --- a/docs/reference/esql/functions/kibana/definition/term.json +++ b/docs/reference/esql/functions/kibana/definition/term.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "term", "description" : "Performs a Term query on the specified field. Returns true if the provided term matches the row.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_base64.json b/docs/reference/esql/functions/kibana/definition/to_base64.json index 84ace22073ab7..39014ceb9a5c2 100644 --- a/docs/reference/esql/functions/kibana/definition/to_base64.json +++ b/docs/reference/esql/functions/kibana/definition/to_base64.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_base64", "description" : "Encode a string to a base64 string.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_boolean.json b/docs/reference/esql/functions/kibana/definition/to_boolean.json index 3a403caaace6c..d295c826f5767 100644 --- a/docs/reference/esql/functions/kibana/definition/to_boolean.json +++ b/docs/reference/esql/functions/kibana/definition/to_boolean.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_boolean", "description" : "Converts an input value to a boolean value.\nA string value of *true* will be case-insensitive converted to the Boolean *true*.\nFor anything else, including the empty string, the function will return *false*.\nThe numerical value of *0* will be converted to *false*, anything else will be converted to *true*.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json b/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json index 7f6e99f88b777..f7303457120f4 100644 --- a/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json +++ b/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_cartesianpoint", "description" : "Converts an input value to a `cartesian_point` value.\nA string will only be successfully converted if it respects WKT Point format.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json b/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json index 284288068b415..6a08d531523ad 100644 --- a/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json +++ b/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_cartesianshape", "description" : "Converts an input value to a `cartesian_shape` value.\nA string will only be successfully converted if it respects WKT format.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_date_nanos.json b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json index 210b9608f9eff..bc74476a4867c 100644 --- a/docs/reference/esql/functions/kibana/definition/to_date_nanos.json +++ b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_date_nanos", "description" : "Converts an input to a nanosecond-resolution date value (aka date_nanos).", "note" : "The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z, attepting to convertvalues outside of that range will result in null with a warning.. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch.", diff --git a/docs/reference/esql/functions/kibana/definition/to_dateperiod.json b/docs/reference/esql/functions/kibana/definition/to_dateperiod.json index 2940a08a7a100..790c7ec92401c 100644 --- a/docs/reference/esql/functions/kibana/definition/to_dateperiod.json +++ b/docs/reference/esql/functions/kibana/definition/to_dateperiod.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_dateperiod", "description" : "Converts an input value into a `date_period` value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_datetime.json b/docs/reference/esql/functions/kibana/definition/to_datetime.json index 8f9ecbd139d32..90c683cf3767f 100644 --- a/docs/reference/esql/functions/kibana/definition/to_datetime.json +++ b/docs/reference/esql/functions/kibana/definition/to_datetime.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_datetime", "description" : "Converts an input value to a date value.\nA string will only be successfully converted if it's respecting the format `yyyy-MM-dd'T'HH:mm:ss.SSS'Z'`.\nTo convert dates in other formats, use <>.", "note" : "Note that when converting from nanosecond resolution to millisecond resolution with this function, the nanosecond date is truncated, not rounded.", diff --git a/docs/reference/esql/functions/kibana/definition/to_degrees.json b/docs/reference/esql/functions/kibana/definition/to_degrees.json index 6aefca5c5d4bc..7b1cc51809704 100644 --- a/docs/reference/esql/functions/kibana/definition/to_degrees.json +++ b/docs/reference/esql/functions/kibana/definition/to_degrees.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_degrees", "description" : "Converts a number in radians to degrees.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_double.json b/docs/reference/esql/functions/kibana/definition/to_double.json index 27565e616d6ed..09d5341a62a1d 100644 --- a/docs/reference/esql/functions/kibana/definition/to_double.json +++ b/docs/reference/esql/functions/kibana/definition/to_double.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_double", "description" : "Converts an input value to a double value. If the input parameter is of a date type,\nits value will be interpreted as milliseconds since the Unix epoch,\nconverted to double. Boolean *true* will be converted to double *1.0*, *false* to *0.0*.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_geopoint.json b/docs/reference/esql/functions/kibana/definition/to_geopoint.json index e2ba492e39e9e..2c465fcfc2f8d 100644 --- a/docs/reference/esql/functions/kibana/definition/to_geopoint.json +++ b/docs/reference/esql/functions/kibana/definition/to_geopoint.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_geopoint", "description" : "Converts an input value to a `geo_point` value.\nA string will only be successfully converted if it respects WKT Point format.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_geoshape.json b/docs/reference/esql/functions/kibana/definition/to_geoshape.json index 53316cc0f7b84..dc05f12e6ee3e 100644 --- a/docs/reference/esql/functions/kibana/definition/to_geoshape.json +++ b/docs/reference/esql/functions/kibana/definition/to_geoshape.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_geoshape", "description" : "Converts an input value to a `geo_shape` value.\nA string will only be successfully converted if it respects WKT format.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_integer.json b/docs/reference/esql/functions/kibana/definition/to_integer.json index 93691ac4e94ef..0228d6baaf507 100644 --- a/docs/reference/esql/functions/kibana/definition/to_integer.json +++ b/docs/reference/esql/functions/kibana/definition/to_integer.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_integer", "description" : "Converts an input value to an integer value.\nIf the input parameter is of a date type, its value will be interpreted as milliseconds\nsince the Unix epoch, converted to integer.\nBoolean *true* will be converted to integer *1*, *false* to *0*.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_ip.json b/docs/reference/esql/functions/kibana/definition/to_ip.json index dfc8e97d283f9..4ec424442c2c1 100644 --- a/docs/reference/esql/functions/kibana/definition/to_ip.json +++ b/docs/reference/esql/functions/kibana/definition/to_ip.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_ip", "description" : "Converts an input string to an IP value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_long.json b/docs/reference/esql/functions/kibana/definition/to_long.json index eb1ce7220c3f9..5cd920092473f 100644 --- a/docs/reference/esql/functions/kibana/definition/to_long.json +++ b/docs/reference/esql/functions/kibana/definition/to_long.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_long", "description" : "Converts an input value to a long value. If the input parameter is of a date type,\nits value will be interpreted as milliseconds since the Unix epoch, converted to long.\nBoolean *true* will be converted to long *1*, *false* to *0*.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_lower.json b/docs/reference/esql/functions/kibana/definition/to_lower.json index 07bb057fe080d..22fd8f4fc1b11 100644 --- a/docs/reference/esql/functions/kibana/definition/to_lower.json +++ b/docs/reference/esql/functions/kibana/definition/to_lower.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_lower", "description" : "Returns a new string representing the input string converted to lower case.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_radians.json b/docs/reference/esql/functions/kibana/definition/to_radians.json index 1b13ef22e23f0..638ef8ec13e8c 100644 --- a/docs/reference/esql/functions/kibana/definition/to_radians.json +++ b/docs/reference/esql/functions/kibana/definition/to_radians.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_radians", "description" : "Converts a number in degrees to radians.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_string.json b/docs/reference/esql/functions/kibana/definition/to_string.json index 1c86e81b31136..40e9588b03f85 100644 --- a/docs/reference/esql/functions/kibana/definition/to_string.json +++ b/docs/reference/esql/functions/kibana/definition/to_string.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_string", "description" : "Converts an input value into a string.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_timeduration.json b/docs/reference/esql/functions/kibana/definition/to_timeduration.json index eb7d4e0d5ccec..923aa2024f335 100644 --- a/docs/reference/esql/functions/kibana/definition/to_timeduration.json +++ b/docs/reference/esql/functions/kibana/definition/to_timeduration.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_timeduration", "description" : "Converts an input value into a `time_duration` value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json b/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json index 4a215b1ea97f3..f7725311b298a 100644 --- a/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json +++ b/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_unsigned_long", "description" : "Converts an input value to an unsigned long value. If the input parameter is of a date type,\nits value will be interpreted as milliseconds since the Unix epoch, converted to unsigned long.\nBoolean *true* will be converted to unsigned long *1*, *false* to *0*.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_upper.json b/docs/reference/esql/functions/kibana/definition/to_upper.json index caa9d563b08b1..ac0494b1fb9ec 100644 --- a/docs/reference/esql/functions/kibana/definition/to_upper.json +++ b/docs/reference/esql/functions/kibana/definition/to_upper.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_upper", "description" : "Returns a new string representing the input string converted to upper case.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/to_version.json b/docs/reference/esql/functions/kibana/definition/to_version.json index a77fc250c3a36..41ad00dc20c9e 100644 --- a/docs/reference/esql/functions/kibana/definition/to_version.json +++ b/docs/reference/esql/functions/kibana/definition/to_version.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "to_version", "description" : "Converts an input string to a version value.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/trim.json b/docs/reference/esql/functions/kibana/definition/trim.json index 45805b3bfb054..eb72d5d041d0f 100644 --- a/docs/reference/esql/functions/kibana/definition/trim.json +++ b/docs/reference/esql/functions/kibana/definition/trim.json @@ -1,6 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "scalar", "name" : "trim", "description" : "Removes leading and trailing whitespaces from a string.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/docs/kql.md b/docs/reference/esql/functions/kibana/docs/kql.md index 0ba419c1cd032..14c914d57af91 100644 --- a/docs/reference/esql/functions/kibana/docs/kql.md +++ b/docs/reference/esql/functions/kibana/docs/kql.md @@ -10,5 +10,5 @@ FROM books | WHERE KQL("author: Faulkner") | KEEP book_no, author | SORT book_no -| LIMIT 5; +| LIMIT 5 ``` diff --git a/docs/reference/esql/functions/kibana/docs/match.md b/docs/reference/esql/functions/kibana/docs/match.md index 6526d9e84168e..72132533ea82d 100644 --- a/docs/reference/esql/functions/kibana/docs/match.md +++ b/docs/reference/esql/functions/kibana/docs/match.md @@ -21,5 +21,5 @@ FROM books | WHERE MATCH(author, "Faulkner") | KEEP book_no, author | SORT book_no -| LIMIT 5; +| LIMIT 5 ``` diff --git a/docs/reference/esql/functions/kibana/docs/match_operator.md b/docs/reference/esql/functions/kibana/docs/match_operator.md index 0624329182f3a..59662b36b804f 100644 --- a/docs/reference/esql/functions/kibana/docs/match_operator.md +++ b/docs/reference/esql/functions/kibana/docs/match_operator.md @@ -18,5 +18,5 @@ FROM books | WHERE MATCH(author, "Faulkner") | KEEP book_no, author | SORT book_no -| LIMIT 5; +| LIMIT 5 ``` diff --git a/docs/reference/esql/functions/kibana/docs/qstr.md b/docs/reference/esql/functions/kibana/docs/qstr.md index 7df5a2fe08a9d..374854b805fee 100644 --- a/docs/reference/esql/functions/kibana/docs/qstr.md +++ b/docs/reference/esql/functions/kibana/docs/qstr.md @@ -10,5 +10,5 @@ FROM books | WHERE QSTR("author: Faulkner") | KEEP book_no, author | SORT book_no -| LIMIT 5; +| LIMIT 5 ``` diff --git a/docs/reference/indices/resolve-cluster.asciidoc b/docs/reference/indices/resolve-cluster.asciidoc index b1d379e50557c..195cbb997adb1 100644 --- a/docs/reference/indices/resolve-cluster.asciidoc +++ b/docs/reference/indices/resolve-cluster.asciidoc @@ -24,7 +24,9 @@ with this endpoint. For each cluster in scope, information is returned about: -1. whether the querying ("local") cluster is currently connected to it +1. whether the querying ("local") cluster was able to connect to each remote cluster + specified in the index expression. Note that this endpoint actively attempts to + contact the remote clusters, unlike the <> endpoint. 2. whether each remote cluster is configured with `skip_unavailable` as `true` or `false` 3. whether there are any indices, aliases or data streams on that cluster that match the index expression (if one provided) @@ -42,11 +44,11 @@ Once the proper security permissions are obtained, then you can rely on the `con in the response to determine whether the remote cluster is available and ready for querying. ==== -NOTE: When querying older clusters that do not support the _resolve/cluster endpoint -without an index expression, the local cluster will send the index expression `dummy*` -to those remote clusters, so if an errors occur, you may see a reference to that index -expression even though you didn't request it. If it causes a problem, you can instead -include an index expression like `*:*` to this endpoint to bypass the issue. +NOTE: The ability to query without an index expression was added in 8.18, so when +querying remote clusters older than that, the local cluster will send the index +expression `dummy*` to those remote clusters. Thus, if an errors occur, you may see a reference +to that index expression even though you didn't request it. If it causes a problem, you can +instead include an index expression like `*:*` to bypass the issue. //// [source,console] @@ -83,7 +85,8 @@ GET /_resolve/cluster ---- // TEST[continued] -Returns information about all remote clusters configured on the local cluster. +Returns information about all remote clusters configured on the local cluster +without doing any index matching. [source,console] ---- @@ -122,6 +125,15 @@ Resources on <> can be specified using the [[resolve-cluster-api-query-params]] ==== {api-query-parms-title} +`timeout`:: +(Optional, TimeValue) Specify a max wait time for remote clusters to respond. +If a remote cluster does not respond within this timeout period, the API response +will show the cluster as not connected and include an error message that the +request timed out. The default timeout is unset and the query can take +as long as the networking layer is configured to wait for remote clusters that are +not responding (typically 30 seconds). ++ + include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. @@ -150,11 +162,25 @@ that takes no index expression. [discrete] [[usecases-for-resolve-cluster]] +=== Test availability of remote clusters + +The <> endpoint is commonly used to test whether the "local" +cluster (the cluster being queried) is connected to its remote clusters, but it does not +necessarily reflect whether the remote cluster is available or not. The remote cluster may +be available, while the local cluster is not currently connected to it. + +You can use the resolve-cluster API to attempt to reconnect to remote clusters +(for example with `GET _resolve/cluster` or `GET _resolve/cluster/*:*` ). +The `connected` field in the response will indicate whether it was successful. +If a connection was (re-)established, this will also cause the +<> endpoint to now indicate a connected status. + + === Advantages of using this endpoint before a {ccs} You may want to exclude a cluster or index from a search when: -1. A remote cluster is not currently connected and is configured with `skip_unavailable`=`false`. +1. A remote cluster could not be connected to and is configured with `skip_unavailable`=`false`. Executing a {ccs} under those conditions will cause <>. @@ -238,11 +264,12 @@ The API returns the following response: ==== Identifying potential problems with your {ccs} The following request shows several examples of how modifying your query can -prevent search failures. +prevent search failures. Note also that a `timeout` of 5 seconds is sent, which +sets the maximum time the query will wait for remote clusters to respond. [source,console] -------------------------------------------------- -GET /_resolve/cluster/not-present,clust*:my-index*,oldcluster:*?ignore_unavailable=false +GET /_resolve/cluster/not-present,clust*:my-index*,oldcluster:*?ignore_unavailable=false&timeout=5s -------------------------------------------------- // TEST[continued] // TEST[s/,oldcluster:*//] @@ -268,16 +295,14 @@ GET /_resolve/cluster/not-present,clust*:my-index*,oldcluster:*?ignore_unavailab }, "cluster_two": { "connected": false, <3> + "skip_unavailable": false + }, + "cluster_three": { + "connected": false, "skip_unavailable": false, - "matching_indices": true, - "version": { - "number": "8.13.0", - "build_flavor": "default", - "minimum_wire_compatibility_version": "7.17.0", - "minimum_index_compatibility_version": "7.0.0" - } + "error": "Request timed out before receiving a response from the remote cluster" <4> }, - "oldcluster": { <4> + "oldcluster": { <5> "connected": true, "skip_unavailable": false, "matching_indices": true @@ -299,7 +324,10 @@ could be closed. (You can check this by using the failed). Since this cluster is marked as `skip_unavailable=false`, you should probably exclude this cluster from the search by adding `-cluster_two:*` to the search index expression. -<4> The `oldcluster` remote cluster shows that it has matching indices, but no +<4> For `cluster_three`, the error message indicates that this remote cluster did +not respond within the 5-second timeout window specified, so it is also marked as +not connected. +<5> The `oldcluster` remote cluster shows that it has matching indices, but no version information is included. This indicates that the cluster version predates the introduction of the `_resolve/cluster` API in 8.13.0., so you may want to exclude it from your {ccs}. (Note: the endpoint was able to tell there were diff --git a/docs/reference/indices/shard-stores.asciidoc b/docs/reference/indices/shard-stores.asciidoc index 35f6a0915caa0..941c1ce379078 100644 --- a/docs/reference/indices/shard-stores.asciidoc +++ b/docs/reference/indices/shard-stores.asciidoc @@ -198,10 +198,8 @@ The API returns the following response: // TESTRESPONSE[s/"attributes": \{[^}]*\}/"attributes": $body.$_path/] // TESTRESPONSE[s/"roles": \[[^]]*\]/"roles": $body.$_path/] // TESTRESPONSE[s/"8.10.0"/\$node_version/] -// TESTRESPONSE[s/"min_index_version": 7000099/"min_index_version": $body.$_path/] -// TESTRESPONSE[s/"max_index_version": 8100099/"max_index_version": $body.$_path/] - - +// TESTRESPONSE[s/"min_index_version": [0-9]+/"min_index_version": $body.$_path/] +// TESTRESPONSE[s/"max_index_version": [0-9]+/"max_index_version": $body.$_path/] <1> The key is the corresponding shard id for the store information <2> A list of store information for all copies of the shard diff --git a/docs/reference/inference/elastic-infer-service.asciidoc b/docs/reference/inference/elastic-infer-service.asciidoc index f78bfa967cceb..24ae7e20deec6 100644 --- a/docs/reference/inference/elastic-infer-service.asciidoc +++ b/docs/reference/inference/elastic-infer-service.asciidoc @@ -49,7 +49,7 @@ include::inference-shared.asciidoc[tag=chat-completion-docs] ==== {api-request-body-title} -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/inference-shared.asciidoc b/docs/reference/inference/inference-shared.asciidoc index b133c54082810..9f595bc7b0491 100644 --- a/docs/reference/inference/inference-shared.asciidoc +++ b/docs/reference/inference/inference-shared.asciidoc @@ -48,7 +48,7 @@ tag::chunking-settings-overlap[] Only for `word` chunking strategy. Specifies the number of overlapping words for chunks. Defaults to `100`. -This value cannot be higher than the half of `max_chunking_size`. +This value cannot be higher than the half of `max_chunk_size`. end::chunking-settings-overlap[] tag::chunking-settings-sentence-overlap[] diff --git a/docs/reference/inference/service-alibabacloud-ai-search.asciidoc b/docs/reference/inference/service-alibabacloud-ai-search.asciidoc index eea0e094dce5a..129a51bed415e 100644 --- a/docs/reference/inference/service-alibabacloud-ai-search.asciidoc +++ b/docs/reference/inference/service-alibabacloud-ai-search.asciidoc @@ -44,7 +44,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-amazon-bedrock.asciidoc b/docs/reference/inference/service-amazon-bedrock.asciidoc index d4ae3895b7c76..e8c3f0dc4c811 100644 --- a/docs/reference/inference/service-amazon-bedrock.asciidoc +++ b/docs/reference/inference/service-amazon-bedrock.asciidoc @@ -42,7 +42,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-anthropic.asciidoc b/docs/reference/inference/service-anthropic.asciidoc index 08d8ca43daea8..9eaf407044d7a 100644 --- a/docs/reference/inference/service-anthropic.asciidoc +++ b/docs/reference/inference/service-anthropic.asciidoc @@ -42,7 +42,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-azure-ai-studio.asciidoc b/docs/reference/inference/service-azure-ai-studio.asciidoc index b179a87de1594..fbc70ae01f69f 100644 --- a/docs/reference/inference/service-azure-ai-studio.asciidoc +++ b/docs/reference/inference/service-azure-ai-studio.asciidoc @@ -43,7 +43,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-azure-openai.asciidoc b/docs/reference/inference/service-azure-openai.asciidoc index eded44b7ab0b0..8aedac80fbdca 100644 --- a/docs/reference/inference/service-azure-openai.asciidoc +++ b/docs/reference/inference/service-azure-openai.asciidoc @@ -43,7 +43,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-cohere.asciidoc b/docs/reference/inference/service-cohere.asciidoc index e95f0810fd29d..289f03787580f 100644 --- a/docs/reference/inference/service-cohere.asciidoc +++ b/docs/reference/inference/service-cohere.asciidoc @@ -44,7 +44,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-elasticsearch.asciidoc b/docs/reference/inference/service-elasticsearch.asciidoc index 745b14904dd6d..2db26e4f6c405 100644 --- a/docs/reference/inference/service-elasticsearch.asciidoc +++ b/docs/reference/inference/service-elasticsearch.asciidoc @@ -49,7 +49,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-elser.asciidoc b/docs/reference/inference/service-elser.asciidoc index 6a509ec850903..417a9181d658b 100644 --- a/docs/reference/inference/service-elser.asciidoc +++ b/docs/reference/inference/service-elser.asciidoc @@ -55,7 +55,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-google-ai-studio.asciidoc b/docs/reference/inference/service-google-ai-studio.asciidoc index a6f7d914decfa..8ed49ad0ba7bd 100644 --- a/docs/reference/inference/service-google-ai-studio.asciidoc +++ b/docs/reference/inference/service-google-ai-studio.asciidoc @@ -43,7 +43,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-google-vertex-ai.asciidoc b/docs/reference/inference/service-google-vertex-ai.asciidoc index f9499de7e5602..7e5611d16fb07 100644 --- a/docs/reference/inference/service-google-vertex-ai.asciidoc +++ b/docs/reference/inference/service-google-vertex-ai.asciidoc @@ -43,7 +43,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-hugging-face.asciidoc b/docs/reference/inference/service-hugging-face.asciidoc index 40fb2002975dd..7f9db4e21f02a 100644 --- a/docs/reference/inference/service-hugging-face.asciidoc +++ b/docs/reference/inference/service-hugging-face.asciidoc @@ -42,7 +42,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-jinaai.asciidoc b/docs/reference/inference/service-jinaai.asciidoc index 1470c58315430..e000b3caf52a4 100644 --- a/docs/reference/inference/service-jinaai.asciidoc +++ b/docs/reference/inference/service-jinaai.asciidoc @@ -37,7 +37,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-mistral.asciidoc b/docs/reference/inference/service-mistral.asciidoc index 20e1133e8a83c..afabb2199984f 100644 --- a/docs/reference/inference/service-mistral.asciidoc +++ b/docs/reference/inference/service-mistral.asciidoc @@ -42,7 +42,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-openai.asciidoc b/docs/reference/inference/service-openai.asciidoc index 8d7c6c937333d..511632736a35b 100644 --- a/docs/reference/inference/service-openai.asciidoc +++ b/docs/reference/inference/service-openai.asciidoc @@ -51,7 +51,7 @@ include::inference-shared.asciidoc[tag=chat-completion-docs] (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc b/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc index 8e72a51514a59..a76f857ebf85e 100644 --- a/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc +++ b/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc @@ -96,8 +96,8 @@ or index documents to an enrich index. Instead, update your source indices and <> the enrich policy again. This creates a new enrich index from your updated source indices. -The previous enrich index will deleted with a delayed maintenance job. -By default this is done every 15 minutes. +The previous enrich index will be deleted with a delayed maintenance +job that executes by default every 15 minutes. // end::update-enrich-index[] By default, this API is synchronous: It returns when a policy has been executed. diff --git a/docs/reference/mapping/types/dense-vector.asciidoc b/docs/reference/mapping/types/dense-vector.asciidoc index 75fbaea59c6bd..7cb0bc671264e 100644 --- a/docs/reference/mapping/types/dense-vector.asciidoc +++ b/docs/reference/mapping/types/dense-vector.asciidoc @@ -118,7 +118,7 @@ The three following quantization strategies are supported: * `int8` - Quantizes each dimension of the vector to 1-byte integers. This reduces the memory footprint by 75% (or 4x) at the cost of some accuracy. * `int4` - Quantizes each dimension of the vector to half-byte integers. This reduces the memory footprint by 87% (or 8x) at the cost of accuracy. -* `bbq` - experimental:[] Better binary quantization which reduces each dimension to a single bit precision. This reduces the memory footprint by 96% (or 32x) at a larger cost of accuracy. Generally, oversampling during query time and reranking can help mitigate the accuracy loss. +* `bbq` - Better binary quantization which reduces each dimension to a single bit precision. This reduces the memory footprint by 96% (or 32x) at a larger cost of accuracy. Generally, oversampling during query time and reranking can help mitigate the accuracy loss. When using a quantized format, you may want to oversample and rescore the results to improve accuracy. See <> for more information. @@ -133,7 +133,7 @@ This means disk usage will increase by ~25% for `int8`, ~12.5% for `int4`, and ~ NOTE: `int4` quantization requires an even number of vector dimensions. -NOTE: experimental:[] `bbq` quantization only supports vector dimensions that are greater than 64. +NOTE: `bbq` quantization only supports vector dimensions that are greater than 64. Here is an example of how to create a byte-quantized index: @@ -177,7 +177,7 @@ PUT my-byte-quantized-index } -------------------------------------------------- -experimental:[] Here is an example of how to create a binary quantized index: +Here is an example of how to create a binary quantized index: [source,console] -------------------------------------------------- @@ -325,7 +325,7 @@ by 4x at the cost of some accuracy. See <>. -* experimental:[] `bbq_hnsw` - This utilizes the https://arxiv.org/abs/1603.09320[HNSW algorithm] in addition to automatically binary +* `bbq_hnsw` - This utilizes the https://arxiv.org/abs/1603.09320[HNSW algorithm] in addition to automatically binary quantization for scalable approximate kNN search with `element_type` of `float`. This can reduce the memory footprint by 32x at the cost of accuracy. See <>. * `flat` - This utilizes a brute-force search algorithm for exact kNN search. This supports all `element_type` values. @@ -333,7 +333,7 @@ by 32x at the cost of accuracy. See < "order": "score" <2> } @@ -152,6 +151,33 @@ PUT test-index <1> Specifies the maximum number of fragments to return. <2> Sorts highlighted fragments by score when set to `score`. By default, fragments will be output in the order they appear in the field (order: none). +Highlighting is supported on fields other than semantic_text. +However, if you want to restrict highlighting to the semantic highlighter and return no fragments when the field is not of type semantic_text, +you can explicitly enforce the `semantic` highlighter in the query: + +[source,console] +------------------------------------------------------------ +PUT test-index +{ + "query": { + "match": { + "my_field": "Which country is Paris in?" + } + }, + "highlight": { + "fields": { + "my_field": { + "type": "semantic", <1> + "number_of_fragments": 2, + "order": "score" + } + } + } +} +------------------------------------------------------------ +// TEST[skip:Requires inference endpoint] +<1> Ensures that highlighting is applied exclusively to semantic_text fields. + [discrete] [[custom-indexing]] ==== Customizing `semantic_text` indexing @@ -240,4 +266,4 @@ PUT test-index `semantic_text` field types have the following limitations: * `semantic_text` fields are not currently supported as elements of <>. -* `semantic_text` fields can't currently be set as part of <>. \ No newline at end of file +* `semantic_text` fields can't currently be set as part of <>. diff --git a/docs/reference/migration/apis/create-index-from-source.asciidoc b/docs/reference/migration/apis/create-index-from-source.asciidoc new file mode 100644 index 0000000000000..601650c0dea31 --- /dev/null +++ b/docs/reference/migration/apis/create-index-from-source.asciidoc @@ -0,0 +1,142 @@ +[[indices-create-index-from-source]] +=== Create index from source API +++++ +Create index from source +++++ + +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + +[[indices-create-index-from-source-api-request]] +==== {api-request-title} + +`PUT /_create_from//` + +`POST/_create_from//` + +[[indices-create-index-from-source-api-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have the `manage` +<> for the index. + +[[indices-create-index-from-source-api-desc]] +==== {api-description-title} +This api allows you to add a new index to an {es} cluster, using an existing source index as a basis for the new index. +The settings and mappings from the source index will copied over to the destination index. You can also provide +override settings and mappings which will be combined with the source settings and mappings when creating the +destination index. + +[[indices-create-index-from-source-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) Name of the existing source index which will be used as a basis. + +``:: +(Required, string) Name of the destination index which will be created. + + +[role="child_attributes"] +[[indices-create-index-from-source-api-request-body]] +==== {api-request-body-title} + +`settings_override`:: +(Optional, <>) Settings which override the source settings. + +`mappings_override`:: +(Optional, <>) Mappings which override the source mappings. + +`remove_index_blocks`:: +(Optional, boolean) Filter out any index blocks from the source index when creating the destination index. +Defaults to `true`. + +[[indices-create-index-from-source-api-example]] +==== {api-examples-title} + +Start by creating a source index that we'll copy using this API. + +[source,console] +-------------------------------------------------- +PUT /my-index +{ + "settings": { + "index": { + "number_of_shards": 3, + "blocks.write": true + } + }, + "mappings": { + "properties": { + "field1": { "type": "text" } + } + } +} +-------------------------------------------------- +// TESTSETUP + +Now we create a destination index from the source index. This new index will have the same mappings and settings +as the source index. + +[source,console] +-------------------------------------------------- +POST _create_from/my-index/my-new-index +-------------------------------------------------- + + +Alternatively, we could override some of the source's settings and mappings. This will use the source settings +and mappings as a basis and combine these with the overrides to create the destination settings and mappings. + +[source,console] +-------------------------------------------------- +POST _create_from/my-index/my-new-index +{ + "settings_override": { + "index": { + "number_of_shards": 5 + } + }, + "mappings_override": { + "properties": { + "field2": { "type": "boolean" } + } + } +} +-------------------------------------------------- + +Since the destination index is empty, we very likely will want to write into the index after creation. +This would not be possible if the source index contains an <> which is copied over to the destination index. +One way to handle this is to remove the index write block using a settings override. For example, the following +settings override removes all index blocks. + + +[source,console] +-------------------------------------------------- +POST _create_from/my-index/my-new-index +{ + "settings_override": { + "index": { + "blocks.write": null, + "blocks.read": null, + "blocks.read_only": null, + "blocks.read_only_allow_delete": null, + "blocks.metadata": null + } + } +} +-------------------------------------------------- + +Since this is a common scenario, index blocks are actually removed by default. This is controlled with the parameter +`remove_index_blocks`, which defaults to `true`. If we want the destination index to contains the index blocks from +the source index, we can do the following: + +[source,console] +-------------------------------------------------- +POST _create_from/my-index/my-new-index +{ + "remove_index_blocks": false +} +-------------------------------------------------- diff --git a/docs/reference/migration/apis/data-stream-reindex-cancel.asciidoc b/docs/reference/migration/apis/data-stream-reindex-cancel.asciidoc new file mode 100644 index 0000000000000..8866fc5332a1b --- /dev/null +++ b/docs/reference/migration/apis/data-stream-reindex-cancel.asciidoc @@ -0,0 +1,64 @@ +[role="xpack"] +[[data-stream-reindex-cancel-api]] +=== Reindex data stream cancel API +++++ +Reindex data stream cancel +++++ + +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-migration[Migration APIs]. +-- + +include::{es-ref-dir}/migration/apis/shared-migration-apis-tip.asciidoc[] + +Cancels a running data stream reindex task which was started by the <>. +Any backing indices that have already been reindexed and swapped into the data stream will remain in the data stream. +Only backing indices which are currently being reindexed, or pending backing indices which are still waiting to be reindexed, will be cancelled. +Once a data stream reindex task is cancelled it will no longer be accessible through the +<>. If a reindex task is not currently running +this API will return `resource_not_found_exception`. + + +/////////////////////////////////////////////////////////// +[source,console] +------------------------------------------------------ +POST _migration/reindex +{ + "source": { + "index": "my-data-stream" + }, + "mode": "upgrade" +} +------------------------------------------------------ +// TESTSETUP +// TEST[setup:my_data_stream] +/////////////////////////////////////////////////////////// + + +[source,console] +---- +POST _migration/reindex/my-data-stream/_cancel +---- +// TEST[teardown:data_stream_cleanup] + +[[data-stream-reindex-cancel-request]] +==== {api-request-title} + +`GET /_migration/reindex//_cancel` + + +[[data-stream-reindex-cancel-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have the `manage` +<> for the data stream. + +[[data-stream-reindex-cancel-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +Name of data stream to cancel reindexing. + diff --git a/docs/reference/migration/apis/data-stream-reindex-status.asciidoc b/docs/reference/migration/apis/data-stream-reindex-status.asciidoc new file mode 100644 index 0000000000000..6c391d7571a8a --- /dev/null +++ b/docs/reference/migration/apis/data-stream-reindex-status.asciidoc @@ -0,0 +1,157 @@ +[role="xpack"] +[[data-stream-reindex-status-api]] +=== Reindex data stream status API +++++ +Reindex data stream status +++++ + +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-migration[Migration APIs]. +-- + +include::{es-ref-dir}/migration/apis/shared-migration-apis-tip.asciidoc[] + +Obtains the current status of a reindex task for the requested data stream. This status is +available while the reindex task is running and for 24 hours after completion of the task, +whether it succeeds or fails. If the task is cancelled, the status is no longer available. +If the task fails, the exception will be listed within the status. + +/////////////////////////////////////////////////////////// +[source,console] +------------------------------------------------------ +POST _migration/reindex +{ + "source": { + "index": "my-data-stream" + }, + "mode": "upgrade" +} +------------------------------------------------------ +// TESTSETUP +// TEST[setup:my_data_stream] + +[source,console] +------------------------------------------------------ +POST /_migration/reindex/my-data-stream/_cancel +DELETE _data_stream/my-data-stream +DELETE _index_template/my-data-stream-template +------------------------------------------------------ +// TEARDOWN +/////////////////////////////////////////////////////////// + + +[[data-stream-reindex-status-api-request]] +==== {api-request-title} + +`GET /_migration/reindex//_status` + + +[[data-stream-reindex-status-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have the `manage` +<> for the data stream. + +[[data-stream-reindex-status-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +Name of data stream to get status for. The reindex task for the +data stream should be currently running or have been completed in the last 24 hours. + + +[role="child_attributes"] +[[data-stream-reindex-status-response-body]] +==== {api-response-body-title} + +`start_time`:: +(Optional, <>) The time when the reindex task started. + +`start_time_millis`:: +(integer) The time when the reindex task started, in milliseconds since the epoch. + +`complete`:: +(boolean) `false` if the reindex task is still running, and `true` if the task has completed with success or failure. + +`total_indices_in_data_stream`:: +(integer) The total number of backing indices in the data stream, including the write index. + +`total_indices_requiring_upgrade`:: +(integer) The number of backing indices that need to be upgraded. These will consist of the indices which have an +older version and are not read-only. + +`successes`:: +(integer) The number of backing indices which have already been successfully upgraded. + +`in_progress`:: +(array of objects) Information on the backing indices which are currently being reindexed. ++ +.Properties of objects in `in_progress` +[%collapsible%open] +===== +`index`:: +(string) The name of the source backing index. + +`total_doc_count`:: +(integer) The number of documents in the source backing index. + +`reindexed_doc_count`:: +(integer) The number of documents which have already been added to the destination backing index. +===== + +`pending`:: +(integer) The number of backing indices which still need to be upgraded and have not yet been started. + +`errors`:: +(array of objects) Information on any errors which have occurred. ++ +.Properties of objects in `errors` +[%collapsible%open] +===== +`index`:: +(string) The name of a backing index which has had an error during reindex. + +`message`:: +(string) Description of the error. +===== + +`exceptions`:: +(Optional, string) +Exception message for a reindex failure if the failure could not be tied to a particular index. + + +[[data-stream-reindex-status-example]] +==== {api-examples-title} + +[source,console] +---- +GET _migration/reindex/my-data-stream/_status +---- + +The following is a typical response: +[source,console-result] +---- +{ + "start_time_millis": 1737676174349, + "complete": false, + "total_indices_in_data_stream": 4, + "total_indices_requiring_upgrade": 3, + "successes": 1, + "in_progress": [ + { + "index": ".ds-my-data-stream-2025.01.23-000002", + "total_doc_count": 10000000, + "reindexed_doc_count": 1000 + } + ], + "pending": 1, + "errors": [] +} +---- +// TEST[skip:cannot easily clean up reindex task between tests] + +For a more in-depth example showing the usage of this API along with the <> and <> APIs, +see this <>. diff --git a/docs/reference/migration/apis/data-stream-reindex.asciidoc b/docs/reference/migration/apis/data-stream-reindex.asciidoc new file mode 100644 index 0000000000000..4641e0fe0911a --- /dev/null +++ b/docs/reference/migration/apis/data-stream-reindex.asciidoc @@ -0,0 +1,358 @@ +[role="xpack"] +[[data-stream-reindex-api]] +=== Reindex data stream API +++++ +Reindex data stream +++++ + +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-migration[Migration APIs]. +-- + +include::{es-ref-dir}/migration/apis/shared-migration-apis-tip.asciidoc[] + +The reindex data stream API is used to upgrade the backing indices of a data stream to the most +recent major version. It works by reindexing each backing index into a new index, then replacing the original +backing index with its replacement and deleting the original backing index. The settings and mappings +from the original backing indices are copied to the resulting backing indices. + +This api runs in the background because reindexing all indices in a large data stream +is expected to take a large amount of time and resources. The endpoint will return immediately and a persistent +task will be created to run in the background. The current status of the task can be checked with +the <>. This status will be available for 24 hours after the task completes, whether +it finished successfully or failed. If the status is still available for a task, the task must be cancelled before it can be re-run. +A running or recently completed data stream reindex task can be cancelled using the <>. + +/////////////////////////////////////////////////////////// +[source,console] +------------------------------------------------------ +POST /_migration/reindex/my-data-stream/_cancel +DELETE _data_stream/my-data-stream +DELETE _index_template/my-data-stream-template +------------------------------------------------------ +// TEARDOWN +/////////////////////////////////////////////////////////// + + +[[data-stream-reindex-api-request]] +==== {api-request-title} + +`POST /_migration/reindex` + + +[[data-stream-reindex-api-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have the `manage` +<> for the data stream. + +[[data-stream-reindex-body]] +==== {api-request-body-title} + +`source`:: +`index`::: +(Required, string) The name of the data stream to upgrade. + +`mode`:: +(Required, enum) Set to `upgrade` to upgrade the data stream in-place, using the same source and destination +data stream. Each out-of-date backing index will be reindexed. Then the new backing index is swapped into the data stream and the old index is deleted. +Currently, the only allowed value for this parameter is `upgrade`. + +[[reindex-data-stream-api-settings]] +==== Settings + +You can use the following settings to control the behavior of the reindex data stream API: + +[[migrate_max_concurrent_indices_reindexed_per_data_stream-setting]] +// tag::migrate_max_concurrent_indices_reindexed_per_data_stream-setting-tag[] +`migrate.max_concurrent_indices_reindexed_per_data_stream` +(<>) +The number of backing indices within a given data stream which will be reindexed concurrently. Defaults to `1`. +// end::migrate_max_concurrent_indices_reindexed_per_data_stream-tag[] + +[[migrate_data_stream_reindex_max_request_per_second-setting]] +// tag::migrate_data_stream_reindex_max_request_per_second-setting-tag[] +`migrate.data_stream_reindex_max_request_per_second` +(<>) +The average maximum number of documents within a given backing index to reindex per second. +Defaults to `1000`, though can be any decimal number greater than `0`. +To remove throttling, set to `-1`. +This setting can be used to throttle the reindex process and manage resource usage. +Consult the <> for more information. +// end::migrate_data_stream_reindex_max_request_per_second-tag[] + + +[[reindex-data-stream-api-example]] +==== {api-examples-title} + +Assume we have a data stream `my-data-stream` with the following backing indices, all of which have index major version 7.x. + +* .ds-my-data-stream-2025.01.23-000001 +* .ds-my-data-stream-2025.01.23-000002 +* .ds-my-data-stream-2025.01.23-000003 + +Let's also assume that `.ds-my-data-stream-2025.01.23-000003` is the write index. +If {es} is version 8.x and we wish to upgrade to major version 9.x, the version 7.x indices must be upgraded in preparation. +We can use this API to reindex a data stream with version 7.x backing indices and make them version 8 backing indices. + +Start by calling the API: + +[[reindex-data-stream-start]] +[source,console] +---- +POST _migration/reindex +{ + "source": { + "index": "my-data-stream" + }, + "mode": "upgrade" +} +---- +// TEST[setup:my_data_stream] + + +As this task runs in the background this API will return immediately. +The task will do the following. + +First, the data stream is rolled over. So that no documents are lost during the reindex, we add <> + to the existing backing indices before reindexing them. Since a data stream's write index cannot have a write block, + the data stream is must be rolled over. This will produce a new write index, `.ds-my-data-stream-2025.01.23-000004`; which + has an 8.x version and thus does not need to be upgraded. + +Once the data stream has a write index with an 8.x version we can proceed with reindexing the old indices. +For each of the version 7.x indices, we now do the following: + +* Add a write block to the source index to guarantee that no writes are lost. +* Open the source index if it is closed. +* Delete the destination index if one exists. This is done in case we are retrying after a failure, so that we start with a fresh index. +* Create the destination index using the <>. +This copies the settings and mappings from the old backing index to the new backing index. +* Use the <> to copy the contents of the old backing index to the new backing index. +* Close the destination index if the source index was originally closed. +* Replace the old index in the data stream with the new index, using the <>. +* Finally, the old backing index is deleted. + +By default only one backing index will be processed at a time. +This can be modified using the <>. + +While the reindex data stream task is running, we can inspect the current status using the <>: +[source,console] +---- +GET /_migration/reindex/my-data-stream/_status +---- +// TEST[continued] + +For the above example, the following would be a possible status: + +[source,console-result] +---- +{ + "start_time_millis": 1737676174349, + "complete": false, + "total_indices_in_data_stream": 4, + "total_indices_requiring_upgrade": 3, + "successes": 0, + "in_progress": [ + { + "index": ".ds-my-data-stream-2025.01.23-000001", + "total_doc_count": 10000000, + "reindexed_doc_count": 999999 + } + ], + "pending": 2, + "errors": [] +} +---- +// TEST[skip:specific value is part of explanation] + +This output means that the first backing index, `.ds-my-data-stream-2025.01.23-000001`, is currently being processed, +and none of the backing indices have yet completed. Notice that `total_indices_in_data_stream` has a value of `4`, +because after the rollover, there are 4 indices in the data stream. But the new write index has an 8.x version, and +thus doesn't need to be reindexed, so `total_indices_requiring_upgrade` is only 3. + + + +[[reindex-data-stream-cancel-restart]] +===== Cancelling and Restarting +The <> provide a few ways to control the performance and +resource usage of a reindex task. This example shows how we can stop a running reindex task, modify the settings, +and restart the task. + +Continuing with the above example, assume the reindexing task has not yet completed, and the <> +returns the following: + +[source,console-result] +---- +{ + "start_time_millis": 1737676174349, + "complete": false, + "total_indices_in_data_stream": 4, + "total_indices_requiring_upgrade": 3, + "successes": 1, + "in_progress": [ + { + "index": ".ds-my-data-stream-2025.01.23-000002", + "total_doc_count": 10000000, + "reindexed_doc_count": 1000 + } + ], + "pending": 1, + "errors": [] +} +---- +// TEST[skip:specific value is part of explanation] + +Let's assume the task has been running for a long time. By default, we throttle how many requests the reindex operation +can execute per second. This keeps the reindex process from consuming too many resources. +But the default value of `1000` request per second will not be correct for all use cases. +The <> +can be used to increase or decrease the number of requests per second, or to remove the throttle entirely. + +Changing this setting won't have an effect on the backing index that is currently being reindexed. +For example, changing the setting won't have an effect on `.ds-my-data-stream-2025.01.23-000002`, but would have an +effect on the next backing index. + +But in the above status, `.ds-my-data-stream-2025.01.23-000002` has values of 1000 and 10M for the +`reindexed_doc_count` and `total_doc_count`, respectively. This means it has only reindexed 0.01% of the documents in the index. +It might be a good time to cancel the run and optimize some settings without losing much work. +So we call the <>: + +[source,console] +---- +POST /_migration/reindex/my-data-stream/_cancel +---- +// TEST[skip:task will not be present] + +Now we can use the <> to increase the throttle: + +[source,console] +-------------------------------------------------- +PUT /_cluster/settings +{ + "persistent" : { + "migrate.data_stream_reindex_max_request_per_second" : 10000 + } +} +-------------------------------------------------- +// TEST[continued] + +The <> can now be used to restart reindexing. +Because the first backing index, `.ds-my-data-stream-2025.01.23-000001`, has already been reindexed and thus is already version 8.x, +it will be skipped. The task will start by reindexing `.ds-my-data-stream-2025.01.23-000002` again from the beginning. + +Later, once all the backing indices have finished, the <> will return something like the following: + +[source,console-result] +---- +{ + "start_time_millis": 1737676174349, + "complete": true, + "total_indices_in_data_stream": 4, + "total_indices_requiring_upgrade": 2, + "successes": 2, + "in_progress": [], + "pending": 0, + "errors": [] +} +---- +// TEST[skip:specific value is part of explanation] + +Notice that the value of `total_indices_requiring_upgrade` is `2`, unlike the previous status, which had a value of `3`. +This is because `.ds-my-data-stream-2025.01.23-000001` was upgraded before the task cancellation. +After the restart, the API sees that it does not need to be upgraded, thus does not include it in `total_indices_requiring_upgrade` or `successes`, +despite the fact that it upgraded successfully. + +The completed status will be accessible from the status API for 24 hours after completion of the task. + +We can now check the data stream to verify that indices were upgraded: + +[source,console] +---- +GET _data_stream/my-data-stream?filter_path=data_streams.indices.index_name +---- +// TEST[continued] + + +which returns: +[source,console-result] +---- +{ + "data_streams": [ + { + "indices": [ + { + "index_name": ".migrated-ds-my-data-stream-2025.01.23-000003" + }, + { + "index_name": ".migrated-ds-my-data-stream-2025.01.23-000002" + }, + { + "index_name": ".migrated-ds-my-data-stream-2025.01.23-000001" + }, + { + "index_name": ".ds-my-data-stream-2025.01.23-000004" + } + ] + } + ] +} +---- +// TEST[skip:did not actually run reindex] + +Index `.ds-my-data-stream-2025.01.23-000004` is the write index and didn't need to be upgraded because it was created with version 8.x. +The other three backing indices are now prefixed with `.migrated` because they have been upgraded. + +We can now check the indices and verify that they have version 8.x: +[source,console] +---- +GET .migrated-ds-my-data-stream-2025.01.23-000001?human&filter_path=*.settings.index.version.created_string +---- +// TEST[skip:migrated index does not exist] + +which returns: +[source,console-result] +---- +{ + ".migrated-ds-my-data-stream-2025.01.23-000001": { + "settings": { + "index": { + "version": { + "created_string": "8.18.0" + } + } + } + } +} +---- +// TEST[skip:migrated index does not exist] + +[[reindex-data-stream-handling-failure]] +===== Handling Failures +Since the reindex data stream API runs in the background, failure information can be obtained through the <>. +For example, if the backing index `.ds-my-data-stream-2025.01.23-000002` was accidentally deleted by a user, we would see a status like the following: + +[source,console-result] +---- +{ + "start_time_millis": 1737676174349, + "complete": false, + "total_indices_in_data_stream": 4, + "total_indices_requiring_upgrade": 3, + "successes": 1, + "in_progress": [], + "pending": 1, + "errors": [ + { + "index": ".ds-my-data-stream-2025.01.23-000002", + "message": "index [.ds-my-data-stream-2025.01.23-000002] does not exist" + } + ] +} +---- +// TEST[skip:result just part of explanation] + +Once the issue has been fixed, the failed reindex task can be re-run. First, the failed run's status must be cleared +using the <>. Then the +<> can be called to pick up where it left off. diff --git a/docs/reference/migration/index.asciidoc b/docs/reference/migration/index.asciidoc index 11aca45b003fa..1624910aa3837 100644 --- a/docs/reference/migration/index.asciidoc +++ b/docs/reference/migration/index.asciidoc @@ -1,6 +1,8 @@ include::migration_intro.asciidoc[] +* <> * <> +include::migrate_9_1.asciidoc[] include::migrate_9_0.asciidoc[] diff --git a/docs/reference/migration/migrate_9_0.asciidoc b/docs/reference/migration/migrate_9_0.asciidoc index 8f0b16e31b56e..71516fdd540d6 100644 --- a/docs/reference/migration/migrate_9_0.asciidoc +++ b/docs/reference/migration/migrate_9_0.asciidoc @@ -73,6 +73,7 @@ Lucene 10 ships with an updated Korean dictionary (mecab-ko-dic-2.1.1). For det The change is small and should generally provide better analysis results. Existing indices for full-text use cases should be reindexed though. ==== + [discrete] [[breaking_90_cluster_and_node_setting_changes]] ==== Cluster and node setting changes @@ -318,3 +319,320 @@ The `elser` service of the inference API will be removed in an upcoming release. In the current version there is no impact. In a future version, users of the `elser` service will no longer be able to use it, and will be required to use the `elasticsearch` service to access elser through the inference API. ==== +[discrete] +[[breaking_90_anomaly_detection_results]] +=== Anomaly detection results migration + +The {anomaly-detect} result indices `.ml-anomalies-*` created in {es} 7.x must be either reindexed, marked read-only, or deleted before upgrading to 9.x. + +**Reindexing**: While anomaly detection results are being reindexed, jobs continue to run and process new data. +However, you cannot completely delete an {anomaly-job} that stores results in this index until the reindexing is complete. + +**Marking indices as read-only**: This is useful for large indexes that contain the results of only one or a few {anomaly-jobs}. +If you delete these jobs later, you will not be able to create a new job with the same name. + +**Deleting**: Delete jobs that are no longer needed in the {ml-app} in {kib}. +The result index is deleted when all jobs that store results in it have been deleted. + +[[which_indices_require_attention]] +.Which indices require attention? +[%collapsible] +==== + +To identify indices that require action, use the <>: + +[source,console] +------------------------------------------------------------ +GET /.ml-anomalies-*/_migration/deprecations +------------------------------------------------------------ +// TEST[skip:TBD] + +The response contains the list of critical deprecation warnings in the `index_settings` section: + +[source,console-result] +------------------------------------------------------------ +"index_settings": { + ".ml-anomalies-shared": [ + { + "level": "critical", + "message": "Index created before 8.0", + "url": "https://ela.st/es-deprecation-8-reindex", + "details": "This index was created with version 7.8.23 and is not compatible with 9.0. Reindex or remove the index before upgrading.", + "resolve_during_rolling_upgrade": false + } + ] + } +------------------------------------------------------------ +// NOTCONSOLE + + +==== + +[[reindex_anomaly_result_index]] +.Reindexing anomaly result indices +[%collapsible] +==== +For an index with less than 10GB that contains results from multiple jobs that are still required, we recommend reindexing into a new format using UI. +You can use the <> to obtain the size of an index: + +[source,console] +------------------------------------------------------------ +GET _cat/indices/.ml-anomalies-custom-example?v&h=index,store.size +------------------------------------------------------------ +// TEST[skip:TBD] + +The reindexing can be initiated in the Kibana Upgrade Assistant. + +If an index size is greater than 10 GB it is recommended to use the Reindex API. +Reindexing consists of the following steps: + +. Set the original index to read-only. ++ +-- +[source,console] +------------------------------------------------------------ +PUT .ml-anomalies-custom-example/_block/read_only +------------------------------------------------------------ +// TEST[skip:TBD] +-- + +. Create a new index from the legacy index. ++ +-- +[source,console] +------------------------------------------------------------ +POST _create_from/.ml-anomalies-custom-example/.reindexed-v9-ml-anomalies-custom-example +------------------------------------------------------------ +// TEST[skip:TBD] +-- + +. Reindex documents. +To accelerate the reindexing process, it is recommended that the number of replicas be set to `0` before the reindexing and then set back to the original number once it is completed. +.. Get the number of replicas. ++ +-- +[source,console] +------------------------------------------------------------ +GET /.reindexed-v9-ml-anomalies-custom-example/_settings +------------------------------------------------------------ +// TEST[skip:TBD] +Note the number of replicas in the response. For example: +[source,console-result] +------------------------------------------------------------ +{ + ".reindexed-v9-ml-anomalies-custom-example": { + "settings": { + "index": { + "number_of_replicas": "1", + "number_of_shards": "1" + } + } + } +} +------------------------------------------------------------ +// NOTCONSOLE +-- +.. Set the number of replicas to `0`. ++ +-- +[source,console] +------------------------------------------------------------ +PUT /.reindexed-v9-ml-anomalies-custom-example/_settings +{ + "index": { + "number_of_replicas": 0 + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +-- +.. Start the reindexing process in asynchronous mode. ++ +-- +[source,console] +------------------------------------------------------------ +POST _reindex?wait_for_completion=false +{ + "source": { + "index": ".ml-anomalies-custom-example" + }, + "dest": { + "index": ".reindexed-v9-ml-anomalies-custom-example" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +The response will contain a task_id. You can check when the task is completed using the following command: +[source,console] +------------------------------------------------------------ +GET _tasks/ +------------------------------------------------------------ +// TEST[skip:TBD] +-- +.. Set the number of replicas to the original number when the reindexing is finished. ++ +-- +[source,console] +------------------------------------------------------------ +PUT /.reindexed-v9-ml-anomalies-custom-example/_settings +{ + "index": { + "number_of_replicas": "" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +-- + +. Get the aliases the original index is pointing to. ++ +-- +[source,console] +------------------------------------------------------------ +GET .ml-anomalies-custom-example/_alias +------------------------------------------------------------ +// TEST[skip:TBD] + +The response may contain multiple aliases if the results of multiple jobs are stored in the same index. + +[source,console-result] +------------------------------------------------------------ +{ + ".ml-anomalies-custom-example": { + "aliases": { + ".ml-anomalies-example1": { + "filter": { + "term": { + "job_id": { + "value": "example1" + } + } + }, + "is_hidden": true + }, + ".ml-anomalies-example2": { + "filter": { + "term": { + "job_id": { + "value": "example2" + } + } + }, + "is_hidden": true + } + } + } +} +------------------------------------------------------------ +// NOTCONSOLE +-- + +. Now you can reassign the aliases to the new index and delete the original index in one step. +Note that when adding the new index to the aliases, you must use the same filter and is_hidden parameters as for the original index. ++ +-- +[source,console] +------------------------------------------------------------ +POST _aliases +{ + "actions": [ + { + "add": { + "index": ".reindexed-v9-ml-anomalies-custom-example", + "alias": ".ml-anomalies-example1", + "filter": { + "term": { + "job_id": { + "value": "example1" + } + } + }, + "is_hidden": true + } + }, + { + "add": { + "index": ".reindexed-v9-ml-anomalies-custom-example", + "alias": ".ml-anomalies-example2", + "filter": { + "term": { + "job_id": { + "value": "example2" + } + } + }, + "is_hidden": true + } + }, + { + "remove": { + "index": ".ml-anomalies-custom-example", + "aliases": ".ml-anomalies-*" + } + }, + { + "remove_index": { + "index": ".ml-anomalies-custom-example" + } + }, + { + "add": { + "index": ".reindexed-v9-ml-anomalies-custom-example", + "alias": ".ml-anomalies-custom-example", + "is_hidden": true + } + } + ] +} +------------------------------------------------------------ +// TEST[skip:TBD] +-- +==== + +[[mark_anomaly_result_index_read_only]] +.Marking anomaly result indices as read-only +[%collapsible] +==== +Legacy indexes created in {es} 7.x can be made read-only and supported in {es} 9.x. +Making an index with a large amount of historical results read-only allows for a quick migration to the next major release, since you don't have to wait for the data to be reindexed into the new format. +However, it has the limitation that even after deleting an {anomaly-job}, the historical results associated with this job are not completely deleted. +Therefore, the system will prevent you from creating a new job with the same name. + +To set the index as read-only, add the `write` block to the index: + +[source,console] +------------------------------------------------------------ +PUT .ml-anomalies-custom-example/_block/write +------------------------------------------------------------ +// TEST[skip:TBD] + +Indices created in {es} 7.x that have a `write` block will not raise a critical deprecation warning. +==== + +[[delete_anomaly_result_index]] +.Deleting anomaly result indices +[%collapsible] +==== +If an index contains results of the jobs that are no longer required. +To list all jobs that stored results in an index, use the terms aggregation: + +[source,console] +------------------------------------------------------------ +GET .ml-anomalies-custom-example/_search +{ + "size": 0, + "aggs": { + "job_ids": { + "terms": { + "field": "job_id", + "size": 100 + } + } + } +} +------------------------------------------------------------ +// TEST[skip:TBD] + +The jobs can be deleted in the UI. +After the last job is deleted, the index will be deleted as well. +==== diff --git a/docs/reference/migration/migrate_9_1.asciidoc b/docs/reference/migration/migrate_9_1.asciidoc new file mode 100644 index 0000000000000..07966c04ae109 --- /dev/null +++ b/docs/reference/migration/migrate_9_1.asciidoc @@ -0,0 +1,20 @@ +[[migrating-9.1]] +== Migrating to 9.1 +++++ +9.1 +++++ + +This section discusses the changes that you need to be aware of when migrating +your application to {es} 9.1. + +See also <> and <>. + +coming::[9.1.0] + + +[discrete] +[[breaking-changes-9.1]] +=== Breaking changes + +There are no breaking changes in {es} 9.1. + diff --git a/docs/reference/migration/migration.asciidoc b/docs/reference/migration/migration.asciidoc index 57b6c88aefea4..850d1b1edd05d 100644 --- a/docs/reference/migration/migration.asciidoc +++ b/docs/reference/migration/migration.asciidoc @@ -14,6 +14,14 @@ include::apis/shared-migration-apis-tip.asciidoc[] * <> * <> +* <> +* <> +* <> +* <> include::apis/deprecation.asciidoc[] include::apis/feature-migration.asciidoc[] +include::apis/data-stream-reindex.asciidoc[] +include::apis/data-stream-reindex-status.asciidoc[] +include::apis/data-stream-reindex-cancel.asciidoc[] +include::apis/create-index-from-source.asciidoc[] diff --git a/docs/reference/ml/anomaly-detection/functions/ml-count-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-count-functions.asciidoc index aef8e13bd429e..54298e80b92e2 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-count-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-count-functions.asciidoc @@ -1,4 +1,4 @@ -["appendix",role="exclude",id="ml-count-functions"] +[[ml-count-functions]] = Count functions Count functions detect anomalies when the number of events in a bucket is diff --git a/docs/reference/ml/anomaly-detection/functions/ml-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-functions.asciidoc index ec5e429bfc584..a44e3ceaa3158 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-functions.asciidoc @@ -1,4 +1,3 @@ -[role="xpack"] [[ml-functions]] = Function reference diff --git a/docs/reference/ml/anomaly-detection/functions/ml-geo-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-geo-functions.asciidoc index 63a0f047db647..6c5f075ab24b2 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-geo-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-geo-functions.asciidoc @@ -1,4 +1,4 @@ -["appendix",role="exclude",id="ml-geo-functions"] +[[ml-geo-functions]] = Geographic functions The geographic functions detect anomalies in the geographic location of the diff --git a/docs/reference/ml/anomaly-detection/functions/ml-info-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-info-functions.asciidoc index 7197e535e55e3..d1cbf39cdbe3b 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-info-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-info-functions.asciidoc @@ -1,4 +1,4 @@ -["appendix",role="exclude",id="ml-info-functions"] +[[ml-info-functions]] = Information content functions The information content functions detect anomalies in the amount of information diff --git a/docs/reference/ml/anomaly-detection/functions/ml-metric-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-metric-functions.asciidoc index 31ce07b01570f..bbd9dfc8f09d1 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-metric-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-metric-functions.asciidoc @@ -1,4 +1,4 @@ -["appendix",role="exclude",id="ml-metric-functions"] +[[ml-metric-functions]] = Metric functions The metric functions include functions such as mean, min and max. These values diff --git a/docs/reference/ml/anomaly-detection/functions/ml-rare-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-rare-functions.asciidoc index c993800a9f65b..69378d64f525c 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-rare-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-rare-functions.asciidoc @@ -1,4 +1,4 @@ -["appendix",role="exclude",id="ml-rare-functions"] +[[ml-rare-functions]] = Rare functions The rare functions detect values that occur rarely in time or rarely for a diff --git a/docs/reference/ml/anomaly-detection/functions/ml-sum-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-sum-functions.asciidoc index 423a00154fe88..f0b8b838933b5 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-sum-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-sum-functions.asciidoc @@ -1,4 +1,4 @@ -["appendix",role="exclude",id="ml-sum-functions"] +[[ml-sum-functions]] = Sum functions The sum functions detect anomalies when the sum of a field in a bucket is diff --git a/docs/reference/ml/anomaly-detection/functions/ml-time-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-time-functions.asciidoc index 096fd817ccc4c..7e2301dca0a25 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-time-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-time-functions.asciidoc @@ -1,4 +1,4 @@ -["appendix",role="exclude",id="ml-time-functions"] +[[ml-time-functions]] = Time functions The time functions detect events that happen at unusual times, either of the day diff --git a/docs/reference/ml/ml-shared.asciidoc b/docs/reference/ml/ml-shared.asciidoc index af384c2c90011..590a1a3089f90 100644 --- a/docs/reference/ml/ml-shared.asciidoc +++ b/docs/reference/ml/ml-shared.asciidoc @@ -918,7 +918,7 @@ An array of index names. Wildcards are supported. For example: `["it_ops_metrics", "server*"]`. + -- -NOTE: If any indices are in remote clusters then the {ml} nodes need to have the +NOTE: If any indices are in remote clusters then the master nodes and the {ml} nodes need to have the `remote_cluster_client` role. -- diff --git a/docs/reference/modules/cluster/remote-clusters-cert.asciidoc b/docs/reference/modules/cluster/remote-clusters-cert.asciidoc index 6602c807f5b64..1540b05a9bcfa 100644 --- a/docs/reference/modules/cluster/remote-clusters-cert.asciidoc +++ b/docs/reference/modules/cluster/remote-clusters-cert.asciidoc @@ -1,6 +1,8 @@ [[remote-clusters-cert]] === Add remote clusters using TLS certificate authentication +deprecated::[9.0.0,"Certificate based authentication is deprecated. Configure <> instead or follow a guide on how to <>."] + To add a remote cluster using TLS certificate authentication: . <> @@ -80,4 +82,4 @@ generate certificates for all nodes simplifies this task. include::remote-clusters-connect.asciidoc[] :!trust-mechanism: -include::{es-ref-dir}/security/authentication/remote-clusters-privileges-cert.asciidoc[leveloffset=+1] \ No newline at end of file +include::{es-ref-dir}/security/authentication/remote-clusters-privileges-cert.asciidoc[leveloffset=+1] diff --git a/docs/reference/modules/remote-clusters.asciidoc b/docs/reference/modules/remote-clusters.asciidoc index 87078c0f1956f..11d4400254407 100644 --- a/docs/reference/modules/remote-clusters.asciidoc +++ b/docs/reference/modules/remote-clusters.asciidoc @@ -56,6 +56,8 @@ is performed on the local cluster and a user's role names are passed to the remote cluster. In this model, a superuser on the local cluster gains total read access to the remote cluster, so it is only suitable for clusters that are in the same security domain. <>. ++ +deprecated::[9.0.0, "Use <> instead."] [[sniff-proxy-modes]] [discrete] diff --git a/docs/reference/query-dsl/match-phrase-query.asciidoc b/docs/reference/query-dsl/match-phrase-query.asciidoc index f6b0fa19001f6..88046bc009e7d 100644 --- a/docs/reference/query-dsl/match-phrase-query.asciidoc +++ b/docs/reference/query-dsl/match-phrase-query.asciidoc @@ -19,9 +19,45 @@ GET /_search } -------------------------------------------------- +[[match-phrase-field-params]] +==== Parameters for `` +`query`:: ++ +-- +(Required) Text, number, boolean value or date you wish to find in the provided +``. +-- + +`analyzer`:: +(Optional, string) <> used to convert the text in the `query` +value into tokens. Defaults to the <> mapped for the ``. If no analyzer is mapped, the index's +default analyzer is used. + +`slop`:: +(Optional, integer) Maximum number of positions allowed between matching tokens. +Defaults to `0`. Transposed terms have a slop of `2`. + +`zero_terms_query`:: ++ +-- +(Optional, string) Indicates whether no documents are returned if the `analyzer` +removes all tokens, such as when using a `stop` filter. Valid values are: + + `none` (Default):: +No documents are returned if the `analyzer` removes all tokens. + + `all`:: +Returns all documents, similar to a <> +query. +-- + A phrase query matches terms up to a configurable `slop` (which defaults to 0) in any order. Transposed terms have a slop of 2. +[[query-dsl-match-query-phrase-analyzer]] +===== Analyzer in the match phrase query + The `analyzer` can be set to control which analyzer will perform the analysis process on the text. It defaults to the field explicit mapping definition, or the default search analyzer, for example: @@ -40,5 +76,3 @@ GET /_search } } -------------------------------------------------- - -This query also accepts `zero_terms_query`, as explained in <>. diff --git a/docs/reference/query-dsl/sparse-vector-query.asciidoc b/docs/reference/query-dsl/sparse-vector-query.asciidoc index d46d649079d70..2df9bfd04e77f 100644 --- a/docs/reference/query-dsl/sparse-vector-query.asciidoc +++ b/docs/reference/query-dsl/sparse-vector-query.asciidoc @@ -62,11 +62,14 @@ GET _search (Required, string) The name of the field that contains the token-weight pairs to be searched against. `inference_id`:: -(Optional, string) The <> to use to convert the query text into token-weight pairs. +(Optional, string) +The <> to use to convert the query text into token-weight pairs. It must be the same inference ID that was used to create the tokens from the input text. Only one of `inference_id` and `query_vector` is allowed. If `inference_id` is specified, `query` must also be specified. If all queried fields are of type <>, the inference ID associated with the `semantic_text` field will be inferred. +You can reference a `deployment_id` of a {ml} trained model deployment as an `inference_id`. +For example, if you download and deploy the ELSER model in the {ml-cap} trained models UI in {kib}, you can use the `deployment_id` of that deployment as the `inference_id`. `query`:: (Optional, string) The query text you want to use for search. diff --git a/docs/reference/quickstart/getting-started.asciidoc b/docs/reference/quickstart/getting-started.asciidoc index 03bfb62548b25..192b70c11c635 100644 --- a/docs/reference/quickstart/getting-started.asciidoc +++ b/docs/reference/quickstart/getting-started.asciidoc @@ -293,7 +293,7 @@ POST /books/_doc // TEST[continued] <1> The new field. -View the mapping for the `books` index with the <>. The new field `new_field` has been added to the mapping with a `text` data type. +View the mapping for the `books` index with the <>. The new field `language` has been added to the mapping with a `text` data type. [source,console] ---- @@ -328,7 +328,7 @@ GET /books/_mapping } } }, - "new_field": { + "language": { "type": "text", "fields": { "keyword": { diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index 615e7135365cd..85a5af66aceb7 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -6,9 +6,11 @@ This section summarizes the changes in each release. +* <> * <> -- +include::release-notes/9.1.0.asciidoc[] include::release-notes/9.0.0.asciidoc[] diff --git a/docs/reference/release-notes/9.1.0.asciidoc b/docs/reference/release-notes/9.1.0.asciidoc new file mode 100644 index 0000000000000..2f614615005f1 --- /dev/null +++ b/docs/reference/release-notes/9.1.0.asciidoc @@ -0,0 +1,8 @@ +[[release-notes-9.1.0]] +== {es} version 9.1.0 + +coming[9.1.0] + +Also see <>. + + diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index b87081639c684..eeac565778289 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -8,6 +8,11 @@ ifeval::["{release-state}"!="unreleased"] For detailed information about this release, see the <> and <>. +// Add previous release to the list +Other versions: + +{ref-bare}/9.0/release-highlights.html[9.0] + endif::[] // The notable-highlights tag marks entries that diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index 5db1ae10ae902..74956fb1f205a 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -808,6 +808,7 @@ currently supported: * <> * <> * <> +* <> * <> * <> * <> @@ -1338,7 +1339,7 @@ that lower ranked documents have more influence. This value must be greater than equal to `1`. Defaults to `60`. end::rrf-rank-constant[] -tag::rrf-rank-window-size[] +tag::compound-retriever-rank-window-size[] `rank_window_size`:: (Optional, integer) + @@ -1347,15 +1348,54 @@ query. A higher value will improve result relevance at the cost of performance. ranked result set is pruned down to the search request's <>. `rank_window_size` must be greater than or equal to `size` and greater than or equal to `1`. Defaults to the `size` parameter. -end::rrf-rank-window-size[] +end::compound-retriever-rank-window-size[] -tag::rrf-filter[] +tag::compound-retriever-filter[] `filter`:: (Optional, <>) + Applies the specified <> to all of the specified sub-retrievers, according to each retriever's specifications. -end::rrf-filter[] +end::compound-retriever-filter[] + +tag::linear-retriever-components[] +`retrievers`:: +(Required, array of objects) ++ +A list of the sub-retrievers' configuration, that we will take into account and whose result sets +we will merge through a weighted sum. Each configuration can have a different weight and normalization depending +on the specified retriever. + +Each entry specifies the following parameters: + +* `retriever`:: +(Required, a <> object) ++ +Specifies the retriever for which we will compute the top documents for. The retriever will produce `rank_window_size` +results, which will later be merged based on the specified `weight` and `normalizer`. + +* `weight`:: +(Optional, float) ++ +The weight that each score of this retriever's top docs will be multiplied with. Must be greater or equal to 0. Defaults to 1.0. + +* `normalizer`:: +(Optional, String) ++ +Specifies how we will normalize the retriever's scores, before applying the specified `weight`. +Available values are: `minmax`, and `none`. Defaults to `none`. + +** `none` +** `minmax` : +A `MinMaxScoreNormalizer` that normalizes scores based on the following formula ++ +``` +score = (score - min) / (max - min) +``` + +See also <> using a linear retriever on how to +independently configure and apply normalizers to retrievers. +end::linear-retriever-components[] tag::knn-rescore-vector[] diff --git a/docs/reference/rest-api/info.asciidoc b/docs/reference/rest-api/info.asciidoc index 318170ab089bb..e1a7246342a36 100644 --- a/docs/reference/rest-api/info.asciidoc +++ b/docs/reference/rest-api/info.asciidoc @@ -177,7 +177,7 @@ Example response: }, "logsdb": { "available": true, - "enabled": false + "enabled": true } }, "tagline" : "You know, for X" diff --git a/docs/reference/rest-api/security/query-role.asciidoc b/docs/reference/rest-api/security/query-role.asciidoc index acdfbb45b84f6..907e77866cd02 100644 --- a/docs/reference/rest-api/security/query-role.asciidoc +++ b/docs/reference/rest-api/security/query-role.asciidoc @@ -31,9 +31,13 @@ Retrieves roles with <> in a <>. -The query roles API does not retrieve roles that are defined in roles files, nor <> ones. +The query roles API does not retrieve roles that are defined in `roles.yml` files. You can optionally filter the results with a query. Also, the results can be paginated and sorted. +NOTE: This API automatically returns <> roles as well. +The built-in roles can be filtered out by using the `metadata._reserved` field in the query. +See <> below. + [[security-api-query-role-request-body]] ==== {api-request-body-title} @@ -127,12 +131,21 @@ It contains the array of values that have been used for sorting. [[security-api-query-role-example]] ==== {api-examples-title} -The following request lists all roles, sorted by the role name: +The following request lists all roles (except built-in ones), sorted by the role name: [source,console] ---- POST /_security/_query/role { + "query": { + "bool": { + "must_not": { + "term": { + "metadata._reserved": true + } + } + } + }, "sort": ["name"] } ---- @@ -222,6 +235,7 @@ retrieved for one or more roles: ] } ---- +// TESTRESPONSE[s/"total": 2/"total" : $body.total/] // TEST[continued] <1> The list of roles that were retrieved for this request @@ -287,3 +301,4 @@ POST /_security/_query/role ] } ---- +// TESTRESPONSE[s/"total": 2/"total" : $body.total/] diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index bb46c41b4bcc9..1c907ec63d8fe 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -514,7 +514,7 @@ GET /_xpack/usage }, "logsdb": { "available": true, - "enabled": false, + "enabled": true, "indices_count": 0, "indices_with_synthetic_source": 0, "num_docs": 0, diff --git a/docs/reference/rollup/apis/delete-job.asciidoc b/docs/reference/rollup/apis/delete-job.asciidoc index 03f5349e15d4f..59d4aa9b395db 100644 --- a/docs/reference/rollup/apis/delete-job.asciidoc +++ b/docs/reference/rollup/apis/delete-job.asciidoc @@ -86,6 +86,7 @@ If we have a rollup job named `sensor`, it can be deleted with: DELETE _rollup/job/sensor -------------------------------------------------- // TEST[setup:sensor_rollup_job] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] Which will return the response: diff --git a/docs/reference/rollup/apis/get-job.asciidoc b/docs/reference/rollup/apis/get-job.asciidoc index 9fff4d665f5fd..6138be8a015a5 100644 --- a/docs/reference/rollup/apis/get-job.asciidoc +++ b/docs/reference/rollup/apis/get-job.asciidoc @@ -95,6 +95,7 @@ job can be retrieved with: GET _rollup/job/sensor -------------------------------------------------- // TEST[setup:sensor_rollup_job] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] The API yields the following response: @@ -198,6 +199,7 @@ PUT _rollup/job/sensor2 <1> GET _rollup/job/_all <2> -------------------------------------------------- // TEST[setup:sensor_rollup_job] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] <1> We create a second job with name `sensor2` <2> Then request all jobs by using `_all` in the GetJobs API diff --git a/docs/reference/rollup/apis/put-job.asciidoc b/docs/reference/rollup/apis/put-job.asciidoc index a60f20a3de5bf..0aed61f629156 100644 --- a/docs/reference/rollup/apis/put-job.asciidoc +++ b/docs/reference/rollup/apis/put-job.asciidoc @@ -287,6 +287,7 @@ PUT _rollup/job/sensor } -------------------------------------------------- // TEST[setup:sensor_index] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] <1> This configuration enables date histograms to be used on the `timestamp` field and `terms` aggregations to be used on the `node` field. <2> This configuration defines metrics over two fields: `temperature` and diff --git a/docs/reference/rollup/apis/rollup-caps.asciidoc b/docs/reference/rollup/apis/rollup-caps.asciidoc index be1c3ed171a23..68fc5b0b20b5f 100644 --- a/docs/reference/rollup/apis/rollup-caps.asciidoc +++ b/docs/reference/rollup/apis/rollup-caps.asciidoc @@ -89,6 +89,7 @@ PUT _rollup/job/sensor } -------------------------------------------------- // TEST[setup:sensor_index] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] We can then retrieve the rollup capabilities of that index pattern (`sensor-*`) via the following command: @@ -98,6 +99,7 @@ via the following command: GET _rollup/data/sensor-* -------------------------------------------------- // TEST[continued] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] Which will yield the following response: @@ -170,6 +172,7 @@ We could also retrieve the same information with a request to `_all`: GET _rollup/data/_all -------------------------------------------------- // TEST[continued] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] But note that if we use the concrete index name (`sensor-1`), we'll retrieve no rollup capabilities: @@ -179,6 +182,7 @@ rollup capabilities: GET _rollup/data/sensor-1 -------------------------------------------------- // TEST[continued] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] [source,console-result] ---- diff --git a/docs/reference/rollup/apis/rollup-index-caps.asciidoc b/docs/reference/rollup/apis/rollup-index-caps.asciidoc index 830cc332e8f40..e9d8e5c886e5a 100644 --- a/docs/reference/rollup/apis/rollup-index-caps.asciidoc +++ b/docs/reference/rollup/apis/rollup-index-caps.asciidoc @@ -86,6 +86,7 @@ PUT _rollup/job/sensor } -------------------------------------------------- // TEST[setup:sensor_index] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] If at a later date, we'd like to determine what jobs and capabilities were stored in the `sensor_rollup` index, we can use the get rollup index API: @@ -95,6 +96,7 @@ stored in the `sensor_rollup` index, we can use the get rollup index API: GET /sensor_rollup/_rollup/data -------------------------------------------------- // TEST[continued] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] Note how we are requesting the concrete rollup index name (`sensor_rollup`) as the first part of the URL. This will yield the following response: @@ -170,3 +172,4 @@ instead of explicit indices: GET /*_rollup/_rollup/data -------------------------------------------------- // TEST[continued] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] diff --git a/docs/reference/rollup/apis/rollup-search.asciidoc b/docs/reference/rollup/apis/rollup-search.asciidoc index 088a74973806b..135fa79e8a375 100644 --- a/docs/reference/rollup/apis/rollup-search.asciidoc +++ b/docs/reference/rollup/apis/rollup-search.asciidoc @@ -111,6 +111,7 @@ PUT _rollup/job/sensor } -------------------------------------------------- // TEST[setup:sensor_index] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] This rolls up the `sensor-*` pattern and stores the results in `sensor_rollup`. To search this rolled up data, we need to use the `_rollup_search` endpoint. @@ -133,6 +134,7 @@ GET /sensor_rollup/_rollup_search -------------------------------------------------- // TEST[setup:sensor_prefab_data] // TEST[s/_rollup_search/_rollup_search?filter_path=took,timed_out,terminated_early,_shards,hits,aggregations/] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] The query is targeting the `sensor_rollup` data, since this contains the rollup data as configured in the job. A `max` aggregation has been used on the @@ -188,6 +190,7 @@ GET sensor_rollup/_rollup_search -------------------------------------------------- // TEST[continued] // TEST[catch:/illegal_argument_exception/] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] [source,console-result] ---- @@ -231,6 +234,7 @@ GET sensor-1,sensor_rollup/_rollup_search <1> -------------------------------------------------- // TEST[continued] // TEST[s/_rollup_search/_rollup_search?filter_path=took,timed_out,terminated_early,_shards,hits,aggregations/] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] <1> Note the URI now searches `sensor-1` and `sensor_rollup` at the same time When the search is executed, the rollup search endpoint does two things: diff --git a/docs/reference/rollup/apis/start-job.asciidoc b/docs/reference/rollup/apis/start-job.asciidoc index dbeed8b09d1c8..69cdc62f9640a 100644 --- a/docs/reference/rollup/apis/start-job.asciidoc +++ b/docs/reference/rollup/apis/start-job.asciidoc @@ -57,6 +57,7 @@ If we have already created a {rollup-job} named `sensor`, it can be started with POST _rollup/job/sensor/_start -------------------------------------------------- // TEST[setup:sensor_rollup_job] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] Which will return the response: @@ -65,4 +66,4 @@ Which will return the response: { "started": true } ----- \ No newline at end of file +---- diff --git a/docs/reference/rollup/apis/stop-job.asciidoc b/docs/reference/rollup/apis/stop-job.asciidoc index 8c0fd6ab2f3af..4d80b56667b83 100644 --- a/docs/reference/rollup/apis/stop-job.asciidoc +++ b/docs/reference/rollup/apis/stop-job.asciidoc @@ -82,6 +82,7 @@ the indexer has fully stopped. This is accomplished with the POST _rollup/job/sensor/_stop?wait_for_completion=true&timeout=10s -------------------------------------------------- // TEST[setup:sensor_started_rollup_job] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] The parameter blocks the API call from returning until either the job has moved to `STOPPED` or the specified time has elapsed. If the specified time elapses diff --git a/docs/reference/rollup/migrating-to-downsampling.asciidoc b/docs/reference/rollup/migrating-to-downsampling.asciidoc index de0089230cae2..995d2418b52a6 100644 --- a/docs/reference/rollup/migrating-to-downsampling.asciidoc +++ b/docs/reference/rollup/migrating-to-downsampling.asciidoc @@ -51,6 +51,7 @@ PUT _rollup/job/sensor } -------------------------------------------------- // TEST[setup:sensor_index] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] The equivalent <> setup that uses downsampling via DSL: diff --git a/docs/reference/rollup/rollup-getting-started.asciidoc b/docs/reference/rollup/rollup-getting-started.asciidoc index a2b3956c47f79..23288618e11f9 100644 --- a/docs/reference/rollup/rollup-getting-started.asciidoc +++ b/docs/reference/rollup/rollup-getting-started.asciidoc @@ -64,6 +64,7 @@ PUT _rollup/job/sensor } -------------------------------------------------- // TEST[setup:sensor_index] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] We give the job the ID of "sensor" (in the url: `PUT _rollup/job/sensor`), and tell it to rollup the index pattern `"sensor-*"`. This job will find and rollup any index that matches that pattern. Rollup summaries are then stored in the `"sensor_rollup"` index. @@ -143,6 +144,7 @@ To start the job, execute this command: POST _rollup/job/sensor/_start -------------------------------------------------- // TEST[setup:sensor_rollup_job] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] [discrete] ==== Searching the rolled results @@ -167,6 +169,7 @@ GET /sensor_rollup/_rollup_search } -------------------------------------------------- // TEST[setup:sensor_prefab_data] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] It's a simple aggregation that calculates the maximum of the `temperature` field. But you'll notice that it is being sent to the `sensor_rollup` index instead of the raw `sensor-*` indices. And you'll also notice that it is using the `_rollup_search` endpoint. Otherwise the syntax @@ -198,6 +201,7 @@ If you were to execute that query, you'd receive a result that looks like a norm ---- // TESTRESPONSE[s/"took" : 102/"took" : $body.$_path/] // TESTRESPONSE[s/"_shards" : \.\.\. /"_shards" : $body.$_path/] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] The only notable difference is that Rollup search results have zero `hits`, because we aren't really searching the original, live data any more. Otherwise it's identical syntax. @@ -244,6 +248,7 @@ GET /sensor_rollup/_rollup_search } -------------------------------------------------- // TEST[setup:sensor_prefab_data] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] Which returns a corresponding response: diff --git a/docs/reference/rollup/rollup-search-limitations.asciidoc b/docs/reference/rollup/rollup-search-limitations.asciidoc index bce90454a19ce..9135716d5eeb6 100644 --- a/docs/reference/rollup/rollup-search-limitations.asciidoc +++ b/docs/reference/rollup/rollup-search-limitations.asciidoc @@ -56,6 +56,7 @@ GET sensor_rollup/_rollup_search -------------------------------------------------- // TEST[setup:sensor_prefab_data] // TEST[catch:/illegal_argument_exception/] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] The response will tell you that the field and aggregation were not possible, because no rollup jobs were found which contained them: diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 4fbe5ea1bb9f8..1e03279878fdf 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -176,7 +176,7 @@ The API returns the following result: "time_in_nanos": 775274, "children" : [ { - "name": "SimpleTopScoreDocCollector", + "name": "TopScoreDocCollector", "reason": "search_top_hits", "time_in_nanos": 775274 } @@ -537,7 +537,7 @@ Looking at the previous example: "time_in_nanos": 775274, "children" : [ { - "name": "SimpleTopScoreDocCollector", + "name": "TopScoreDocCollector", "reason": "search_top_hits", "time_in_nanos": 775274 } @@ -551,7 +551,7 @@ Looking at the previous example: We see a top-level collector named `QueryPhaseCollector` which holds a child -`SimpleTopScoreDocCollector`. `SimpleTopScoreDocCollector` is the default +`TopScoreDocCollector`. `TopScoreDocCollector` is the default "scoring and sorting" `Collector` used by {es}. The `reason` field attempts to give a plain English description of the class name. The `time_in_nanos` is similar to the time in the Query tree: a wall-clock time inclusive of all @@ -751,7 +751,7 @@ The API returns the following result: "time_in_nanos": 1945072, "children": [ { - "name": "SimpleTopScoreDocCollector", + "name": "TopScoreDocCollector", "reason": "search_top_hits", "time_in_nanos": 22577 }, @@ -788,7 +788,7 @@ major portions of the query are represented: 2. The second `TermQuery` (message:search) represents the `post_filter` query. The Collector tree is fairly straightforward, showing how a single -QueryPhaseCollector that holds the normal scoring SimpleTopScoreDocCollector +QueryPhaseCollector that holds the normal scoring TopScoreDocCollector used to collect top hits, as well as BucketCollectorWrapper to run all scoped aggregations. @@ -1332,7 +1332,7 @@ One of the `dfs.knn` sections for a shard looks like the following: "rewrite_time" : 1275732, "collector" : [ { - "name" : "SimpleTopScoreDocCollector", + "name" : "TopScoreDocCollector", "reason" : "search_top_hits", "time_in_nanos" : 17163 } diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index 4cccf4d204d99..5be2dd241d9dc 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -28,6 +28,9 @@ A <> that replaces the functionality of a traditi `knn`:: A <> that replaces the functionality of a <>. +`linear`:: +A <> that linearly combines the scores of other retrievers for the top documents. + `rescorer`:: A <> that replaces the functionality of the <>. @@ -45,6 +48,8 @@ A <> that applies contextual <> to pin o A standard retriever returns top documents from a traditional <>. +[discrete] +[[standard-retriever-parameters]] ===== Parameters: `query`:: @@ -195,6 +200,8 @@ Documents matching these conditions will have increased relevancy scores. A kNN retriever returns top documents from a <>. +[discrete] +[[knn-retriever-parameters]] ===== Parameters `field`:: @@ -265,21 +272,37 @@ GET /restaurants/_search This value must be fewer than or equal to `num_candidates`. <5> The size of the initial candidate set from which the final `k` nearest neighbors are selected. +[[linear-retriever]] +==== Linear Retriever +A retriever that normalizes and linearly combines the scores of other retrievers. + +[discrete] +[[linear-retriever-parameters]] +===== Parameters + +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=linear-retriever-components] + +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=compound-retriever-rank-window-size] + +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=compound-retriever-filter] + [[rrf-retriever]] ==== RRF Retriever An <> retriever returns top documents based on the RRF formula, equally weighting two or more child retrievers. Reciprocal rank fusion (RRF) is a method for combining multiple result sets with different relevance indicators into a single result set. +[discrete] +[[rrf-retriever-parameters]] ===== Parameters include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-retrievers] include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-rank-constant] -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-rank-window-size] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=compound-retriever-rank-window-size] -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-filter] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=compound-retriever-filter] [discrete] [[rrf-retriever-example-hybrid]] @@ -433,10 +456,13 @@ GET movies/_search "retriever": { "rescorer": { <2> "rescore": { - "query": { <3> - "window_size": 50, <4> + "window_size": 50, <3> + "query": { <4> "rescore_query": { "script_score": { + "query": { + "match_all": {} + }, "script": { "source": "cosineSimilarity(params.queryVector, 'product-vector_final_stage') + 1.0", "params": { @@ -493,8 +519,8 @@ GET movies/_search // TEST[skip:uses ELSER] <1> Specifies the number of top documents to return in the final response. <2> A `rescorer` retriever applied as the final step. -<3> The definition of the `query` rescorer. -<4> Defines the number of documents to rescore from the child retriever. +<3> Defines the number of documents to rescore from the child retriever. +<4> The definition of the `query` rescorer. <5> Specifies the child retriever definition. <6> Defines the number of documents returned by the `rrf` retriever, which limits the available documents to @@ -540,6 +566,8 @@ score = ln(score), if score < 0 ---- ==== +[discrete] +[[text-similarity-reranker-retriever-parameters]] ===== Parameters `retriever`:: diff --git a/docs/reference/search/rrf.asciidoc b/docs/reference/search/rrf.asciidoc index 842bd7049e3bf..59976cec9c0aa 100644 --- a/docs/reference/search/rrf.asciidoc +++ b/docs/reference/search/rrf.asciidoc @@ -45,7 +45,7 @@ include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-retrievers] include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-rank-constant] -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-rank-window-size] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=compound-retriever-rank-window-size] An example request using RRF: @@ -791,11 +791,11 @@ A more specific example of highlighting in RRF can also be found in the <> functionality, allowing you to retrieve -related nested or parent/child documents alongside your main search results. Inner hits can be -specified as part of any nested sub-retriever and will be propagated to the top-level parent -retriever. Note that the inner hit computation will take place only at end of `rrf` retriever's -evaluation on the top matching documents, and not as part of the query execution of the nested +The `rrf` retriever supports <> functionality, allowing you to retrieve +related nested or parent/child documents alongside your main search results. Inner hits can be +specified as part of any nested sub-retriever and will be propagated to the top-level parent +retriever. Note that the inner hit computation will take place only at end of `rrf` retriever's +evaluation on the top matching documents, and not as part of the query execution of the nested sub-retrievers. [IMPORTANT] diff --git a/docs/reference/search/search-your-data/highlighting.asciidoc b/docs/reference/search/search-your-data/highlighting.asciidoc index 63d9c632bffcf..bc81be389cf9c 100644 --- a/docs/reference/search/search-your-data/highlighting.asciidoc +++ b/docs/reference/search/search-your-data/highlighting.asciidoc @@ -37,8 +37,8 @@ GET /_search // TEST[setup:my_index] {es} supports three highlighters: `unified`, `plain`, and `fvh` (fast vector -highlighter). You can specify the highlighter `type` you want to use -for each field. +highlighter) for `text` and `keyword` fields and the `semantic` highlighter for `semantic_text` fields. +You can specify the highlighter `type` you want to use for each field or rely on the field type's default highlighter. [discrete] [[unified-highlighter]] @@ -48,7 +48,19 @@ highlighter breaks the text into sentences and uses the BM25 algorithm to score individual sentences as if they were documents in the corpus. It also supports accurate phrase and multi-term (fuzzy, prefix, regex) highlighting. The `unified` highlighter can combine matches from multiple fields into one result (see -`matched_fields`). This is the default highlighter. +`matched_fields`). + +This is the default highlighter for all `text` and `keyword` fields. + +[discrete] +[[semantic-highlighter]] +==== Semantic Highlighter + +The `semantic` highlighter is specifically designed for use with the <> field. +It identifies and extracts the most relevant fragments from the field based on semantic +similarity between the query and each fragment. + +By default, <> fields use the semantic highlighter. [discrete] [[plain-highlighter]] diff --git a/docs/reference/search/search-your-data/retrievers-examples.asciidoc b/docs/reference/search/search-your-data/retrievers-examples.asciidoc index c0be7432aa179..bc5f891a759b6 100644 --- a/docs/reference/search/search-your-data/retrievers-examples.asciidoc +++ b/docs/reference/search/search-your-data/retrievers-examples.asciidoc @@ -36,6 +36,9 @@ PUT retrievers_example }, "topic": { "type": "keyword" + }, + "timestamp": { + "type": "date" } } } @@ -46,7 +49,8 @@ POST /retrievers_example/_doc/1 "vector": [0.23, 0.67, 0.89], "text": "Large language models are revolutionizing information retrieval by boosting search precision, deepening contextual understanding, and reshaping user experiences in data-rich environments.", "year": 2024, - "topic": ["llm", "ai", "information_retrieval"] + "topic": ["llm", "ai", "information_retrieval"], + "timestamp": "2021-01-01T12:10:30" } POST /retrievers_example/_doc/2 @@ -54,7 +58,8 @@ POST /retrievers_example/_doc/2 "vector": [0.12, 0.56, 0.78], "text": "Artificial intelligence is transforming medicine, from advancing diagnostics and tailoring treatment plans to empowering predictive patient care for improved health outcomes.", "year": 2023, - "topic": ["ai", "medicine"] + "topic": ["ai", "medicine"], + "timestamp": "2022-01-01T12:10:30" } POST /retrievers_example/_doc/3 @@ -62,7 +67,8 @@ POST /retrievers_example/_doc/3 "vector": [0.45, 0.32, 0.91], "text": "AI is redefining security by enabling advanced threat detection, proactive risk analysis, and dynamic defenses against increasingly sophisticated cyber threats.", "year": 2024, - "topic": ["ai", "security"] + "topic": ["ai", "security"], + "timestamp": "2023-01-01T12:10:30" } POST /retrievers_example/_doc/4 @@ -70,7 +76,8 @@ POST /retrievers_example/_doc/4 "vector": [0.34, 0.21, 0.98], "text": "Elastic introduces Elastic AI Assistant, the open, generative AI sidekick powered by ESRE to democratize cybersecurity and enable users of every skill level.", "year": 2023, - "topic": ["ai", "elastic", "assistant"] + "topic": ["ai", "elastic", "assistant"], + "timestamp": "2024-01-01T12:10:30" } POST /retrievers_example/_doc/5 @@ -78,7 +85,8 @@ POST /retrievers_example/_doc/5 "vector": [0.11, 0.65, 0.47], "text": "Learn how to spin up a deployment of our hosted Elasticsearch Service and use Elastic Observability to gain deeper insight into the behavior of your applications and systems.", "year": 2024, - "topic": ["documentation", "observability", "elastic"] + "topic": ["documentation", "observability", "elastic"], + "timestamp": "2025-01-01T12:10:30" } POST /retrievers_example/_refresh @@ -185,6 +193,248 @@ This returns the following response based on the final rrf score for each result // TESTRESPONSE[s/"took": 42/"took": $body.took/] ============== +[discrete] +[[retrievers-examples-linear-retriever]] +==== Example: Hybrid search with linear retriever + +A different, and more intuitive, way to provide hybrid search, is to linearly combine the top documents of different +retrievers using a weighted sum of the original scores. Since, as above, the scores could lie in different ranges, +we can also specify a `normalizer` that would ensure that all scores for the top ranked documents of a retriever +lie in a specific range. + +To implement this, we define a `linear` retriever, and along with a set of retrievers that will generate the heterogeneous +results sets that we will combine. We will solve a problem similar to the above, by merging the results of a `standard` and a `knn` +retriever. As the `standard` retriever's scores are based on BM25 and are not strictly bounded, we will also define a +`minmax` normalizer to ensure that the scores lie in the [0, 1] range. We will apply the same normalizer to `knn` as well +to ensure that we capture the importance of each document within the result set. + +So, let's now specify the `linear` retriever whose final score is computed as follows: + +[source, text] +---- +score = weight(standard) * score(standard) + weight(knn) * score(knn) +score = 2 * score(standard) + 1.5 * score(knn) +---- +// NOTCONSOLE + +[source,console] +---- +GET /retrievers_example/_search +{ + "retriever": { + "linear": { + "retrievers": [ + { + "retriever": { + "standard": { + "query": { + "query_string": { + "query": "(information retrieval) OR (artificial intelligence)", + "default_field": "text" + } + } + } + }, + "weight": 2, + "normalizer": "minmax" + }, + { + "retriever": { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + }, + "weight": 1.5, + "normalizer": "minmax" + } + ], + "rank_window_size": 10 + } + }, + "_source": false +} +---- +// TEST[continued] + +This returns the following response based on the normalized weighted score for each result. + +.Example response +[%collapsible] +============== +[source,console-result] +---- +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 3, + "relation": "eq" + }, + "max_score": -1, + "hits": [ + { + "_index": "retrievers_example", + "_id": "2", + "_score": -1 + }, + { + "_index": "retrievers_example", + "_id": "1", + "_score": -2 + }, + { + "_index": "retrievers_example", + "_id": "3", + "_score": -3 + } + ] + } +} +---- +// TESTRESPONSE[s/"took": 42/"took": $body.took/] +// TESTRESPONSE[s/"max_score": -1/"max_score": $body.hits.max_score/] +// TESTRESPONSE[s/"_score": -1/"_score": $body.hits.hits.0._score/] +// TESTRESPONSE[s/"_score": -2/"_score": $body.hits.hits.1._score/] +// TESTRESPONSE[s/"_score": -3/"_score": $body.hits.hits.2._score/] +============== + +By normalizing scores and leveraging `function_score` queries, we can also implement more complex ranking strategies, +such as sorting results based on their timestamps, assign the timestamp as a score, and then normalizing this score to +[0, 1]. +Then, we can easily combine the above with a `knn` retriever as follows: + +[source,console] +---- +GET /retrievers_example/_search +{ + "retriever": { + "linear": { + "retrievers": [ + { + "retriever": { + "standard": { + "query": { + "function_score": { + "query": { + "term": { + "topic": "ai" + } + }, + "functions": [ + { + "script_score": { + "script": { + "source": "doc['timestamp'].value.millis" + } + } + } + ], + "boost_mode": "replace" + } + }, + "sort": { + "timestamp": { + "order": "asc" + } + } + } + }, + "weight": 2, + "normalizer": "minmax" + }, + { + "retriever": { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + }, + "weight": 1.5 + } + ], + "rank_window_size": 10 + } + }, + "_source": false +} +---- +// TEST[continued] + +Which would return the following results: + +.Example response +[%collapsible] +============== +[source,console-result] +---- +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 4, + "relation": "eq" + }, + "max_score": -1, + "hits": [ + { + "_index": "retrievers_example", + "_id": "3", + "_score": -1 + }, + { + "_index": "retrievers_example", + "_id": "2", + "_score": -2 + }, + { + "_index": "retrievers_example", + "_id": "4", + "_score": -3 + }, + { + "_index": "retrievers_example", + "_id": "1", + "_score": -4 + } + ] + } +} +---- +// TESTRESPONSE[s/"took": 42/"took": $body.took/] +// TESTRESPONSE[s/"max_score": -1/"max_score": $body.hits.max_score/] +// TESTRESPONSE[s/"_score": -1/"_score": $body.hits.hits.0._score/] +// TESTRESPONSE[s/"_score": -2/"_score": $body.hits.hits.1._score/] +// TESTRESPONSE[s/"_score": -3/"_score": $body.hits.hits.2._score/] +// TESTRESPONSE[s/"_score": -4/"_score": $body.hits.hits.3._score/] +============== + [discrete] [[retrievers-examples-collapsing-retriever-results]] ==== Example: Grouping results by year with `collapse` diff --git a/docs/reference/search/search-your-data/retrievers-overview.asciidoc b/docs/reference/search/search-your-data/retrievers-overview.asciidoc index 1771b5bb0d849..1a94ae18a5c20 100644 --- a/docs/reference/search/search-your-data/retrievers-overview.asciidoc +++ b/docs/reference/search/search-your-data/retrievers-overview.asciidoc @@ -23,6 +23,9 @@ This ensures backward compatibility as existing `_search` requests remain suppor That way you can transition to the new abstraction at your own pace without mixing syntaxes. * <>. Returns top documents from a <>, in the context of a retriever framework. +* <>. +Combines the top results from multiple sub-retrievers using a weighted sum of their scores. Allows to specify different +weights for each retriever, as well as independently normalize the scores from each result set. * <>. Combines and ranks multiple first-stage retrievers using the reciprocal rank fusion (RRF) algorithm. Allows you to combine multiple result sets with different relevance indicators into a single result set. diff --git a/docs/reference/search/search-your-data/search-multiple-indices.asciidoc b/docs/reference/search/search-your-data/search-multiple-indices.asciidoc index 4052097e73c91..f02e10640a6d9 100644 --- a/docs/reference/search/search-your-data/search-multiple-indices.asciidoc +++ b/docs/reference/search/search-your-data/search-multiple-indices.asciidoc @@ -1,5 +1,11 @@ [[search-multiple-indices]] -=== Search multiple data streams and indices +=== Search multiple data streams and indices using a query + +There are two main methods for searching across multiple data streams and indices in {es}: + +* *Query Level*: Directly specify indices in the search request path or use index patterns to target multiple indices. + +* *Index level*: Use <>, which act as pointers to one or more backing indices, enabling logical grouping and management of indices. To search multiple data streams and indices, add them as comma-separated values in the <>'s request path. @@ -39,6 +45,34 @@ GET /my-index-*/_search ---- // TEST[setup:my_index] +You can exclude specific indices from a search. The request will retrieve data from all indices starting with `my-index-`, except for `my-index-01`. + +[source,console] +---- +GET /my-index-*/_search +{ + "query": { + "bool": { + "must": [ + { + "match": { + "user.id": "kimchy" + } + } + ], + "must_not": [ + { + "terms": { + "_index": ["my-index-01"] + } + } + ] + } + } +} +---- +// TEST[setup:my_index] + To search all data streams and indices in a cluster, omit the target from the request path. Alternatively, you can use `_all` or `*`. diff --git a/docs/reference/search/search-your-data/semantic-text-hybrid-search b/docs/reference/search/search-your-data/semantic-text-hybrid-search index 4b49a7c3155db..0dfe97628faf5 100644 --- a/docs/reference/search/search-your-data/semantic-text-hybrid-search +++ b/docs/reference/search/search-your-data/semantic-text-hybrid-search @@ -113,6 +113,7 @@ POST _tasks//_cancel ==== Perform hybrid search After reindexing the data into the `semantic-embeddings` index, you can perform hybrid search by using <>. RRF is a technique that merges the rankings from both semantic and lexical queries, giving more weight to results that rank high in either search. This ensures that the final results are balanced and relevant. +To extract the most relevant fragments from the original text and query, you can use the <>: [source,console] ------------------------------------------------------------ @@ -142,6 +143,13 @@ GET semantic-embeddings/_search } ] } + }, + "highlight": { + "fields": { + "semantic_text": { + "number_of_fragments": 2 <5> + } + } } } ------------------------------------------------------------ @@ -150,7 +158,7 @@ GET semantic-embeddings/_search <2> Lexical search is performed on the `content` field using the specified phrase. <3> The second `standard` retriever refers to the semantic search. <4> The `semantic_text` field is used to perform the semantic search. - +<5> Specifies the maximum number of fragments to return. See <> for a more complete example. After performing the hybrid search, the query will return the top 10 documents that match both semantic and lexical search criteria. The results include detailed information about each document: @@ -178,36 +186,14 @@ After performing the hybrid search, the query will return the top 10 documents t "_score": 0.032786883, "_rank": 1, "_source": { - "semantic_text": { - "inference": { - "inference_id": "my-elser-endpoint", - "model_settings": { - "task_type": "sparse_embedding" - }, - "chunks": [ - { - "text": "What so many out there do not realize is the importance of what you do after you work out. You may have done the majority of the work, but how you treat your body in the minutes and hours after you exercise has a direct effect on muscle soreness, muscle strength and growth, and staying hydrated. Cool Down. After your last exercise, your workout is not over. The first thing you need to do is cool down. Even if running was all that you did, you still should do light cardio for a few minutes. This brings your heart rate down at a slow and steady pace, which helps you avoid feeling sick after a workout.", - "embeddings": { - "exercise": 1.571044, - "after": 1.3603843, - "sick": 1.3281639, - "cool": 1.3227621, - "muscle": 1.2645415, - "sore": 1.2561599, - "cooling": 1.2335974, - "running": 1.1750668, - "hours": 1.1104802, - "out": 1.0991782, - "##io": 1.0794281, - "last": 1.0474665, - (...) - } - } - ] - } - }, "id": 8408852, "content": "What so many out there do not realize is the importance of (...)" + }, + "highlight" : { + "semantic_text" : [ + "... fragment_1 ...", + "... fragment_2 ..." + ] } } ] diff --git a/docs/reference/search/search-your-data/sort-search-results.asciidoc b/docs/reference/search/search-your-data/sort-search-results.asciidoc index 3e32573d7d8ae..e0c1cf910ac1f 100644 --- a/docs/reference/search/search-your-data/sort-search-results.asciidoc +++ b/docs/reference/search/search-your-data/sort-search-results.asciidoc @@ -5,6 +5,8 @@ Allows you to add one or more sorts on specific fields. Each sort can be reversed as well. The sort is defined on a per field level, with special field name for `_score` to sort by score, and `_doc` to sort by index order. +To optimize sorting performance, avoid sorting by <>fields; instead, use <> or <> fields. Additionally, you can improve performance by enabling pre-sorting at index time using <>. While this can speed up query-time sorting, it may reduce indexing performance and increase memory usage. + Assuming the following index mapping: [source,console] diff --git a/docs/reference/settings/security-hash-settings.asciidoc b/docs/reference/settings/security-hash-settings.asciidoc index 93350a7749405..79819e4a389aa 100644 --- a/docs/reference/settings/security-hash-settings.asciidoc +++ b/docs/reference/settings/security-hash-settings.asciidoc @@ -124,4 +124,68 @@ following: initial input with SHA512 first. |======================= +Furthermore, {es} supports authentication via securely-generated high entropy tokens, +for instance <>. +Analogous to passwords, only the tokens' hashes are stored. Since the tokens are guaranteed +to have sufficiently high entropy to resist offline attacks, secure salted hash functions are supported +in addition to the password-hashing algorithms mentioned above. +You can configure the algorithm for API key stored credential hashing +by setting the <> +`xpack.security.authc.api_key.hashing.algorithm` setting to one of the +following + +[[secure-token-hashing-algorithms]] +.Secure token hashing algorithms +|======================= +| Algorithm | | | Description + +| `ssha256` | | | Uses a salted `sha-256` algorithm. (default) +| `bcrypt` | | | Uses `bcrypt` algorithm with salt generated in 1024 rounds. +| `bcrypt4` | | | Uses `bcrypt` algorithm with salt generated in 16 rounds. +| `bcrypt5` | | | Uses `bcrypt` algorithm with salt generated in 32 rounds. +| `bcrypt6` | | | Uses `bcrypt` algorithm with salt generated in 64 rounds. +| `bcrypt7` | | | Uses `bcrypt` algorithm with salt generated in 128 rounds. +| `bcrypt8` | | | Uses `bcrypt` algorithm with salt generated in 256 rounds. +| `bcrypt9` | | | Uses `bcrypt` algorithm with salt generated in 512 rounds. +| `bcrypt10` | | | Uses `bcrypt` algorithm with salt generated in 1024 rounds. +| `bcrypt11` | | | Uses `bcrypt` algorithm with salt generated in 2048 rounds. +| `bcrypt12` | | | Uses `bcrypt` algorithm with salt generated in 4096 rounds. +| `bcrypt13` | | | Uses `bcrypt` algorithm with salt generated in 8192 rounds. +| `bcrypt14` | | | Uses `bcrypt` algorithm with salt generated in 16384 rounds. +| `pbkdf2` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 10000 iterations. +| `pbkdf2_1000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 1000 iterations. +| `pbkdf2_10000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 10000 iterations. +| `pbkdf2_50000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 50000 iterations. +| `pbkdf2_100000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 100000 iterations. +| `pbkdf2_500000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 500000 iterations. +| `pbkdf2_1000000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 1000000 iterations. +| `pbkdf2_stretch` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 10000 iterations, after hashing the + initial input with SHA512 first. +| `pbkdf2_stretch_1000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 1000 iterations, after hashing the + initial input with SHA512 first. +| `pbkdf2_stretch_10000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 10000 iterations, after hashing the + initial input with SHA512 first. +| `pbkdf2_stretch_50000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 50000 iterations, after hashing the + initial input with SHA512 first. +| `pbkdf2_stretch_100000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 100000 iterations, after hashing the + initial input with SHA512 first. +| `pbkdf2_stretch_500000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 500000 iterations, after hashing the + initial input with SHA512 first. +| `pbkdf2_stretch_1000000`| | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 1000000 iterations, after hashing the + initial input with SHA512 first. +|======================= diff --git a/docs/reference/settings/security-settings.asciidoc b/docs/reference/settings/security-settings.asciidoc index 0fc4d59e72350..db95ac48f5be8 100644 --- a/docs/reference/settings/security-settings.asciidoc +++ b/docs/reference/settings/security-settings.asciidoc @@ -23,8 +23,8 @@ For more information about creating and updating the {es} keystore, see ==== General security settings `xpack.security.enabled`:: (<>) -Defaults to `true`, which enables {es} {security-features} on the node. -This setting must be enabled to use Elasticsearch's authentication, +Defaults to `true`, which enables {es} {security-features} on the node. +This setting must be enabled to use Elasticsearch's authentication, authorization and audit features. + + -- @@ -229,7 +229,7 @@ Defaults to `7d`. -- NOTE: Large real-time clock inconsistency across cluster nodes can cause problems -with evaluating the API key retention period. That is, if the clock on the node +with evaluating the API key retention period. That is, if the clock on the node invalidating the API key is significantly different than the one performing the deletion, the key may be retained for longer or shorter than the configured retention period. @@ -252,7 +252,7 @@ Sets the timeout of the internal search and delete call. `xpack.security.authc.api_key.hashing.algorithm`:: (<>) Specifies the hashing algorithm that is used for securing API key credentials. -See <>. Defaults to `pbkdf2`. +See <>. Defaults to `ssha256`. [discrete] [[security-domain-settings]] diff --git a/docs/reference/troubleshooting/common-issues/disk-usage-exceeded.asciidoc b/docs/reference/troubleshooting/common-issues/disk-usage-exceeded.asciidoc index a2342c449c88c..67a872c883afe 100644 --- a/docs/reference/troubleshooting/common-issues/disk-usage-exceeded.asciidoc +++ b/docs/reference/troubleshooting/common-issues/disk-usage-exceeded.asciidoc @@ -57,7 +57,7 @@ GET _cluster/allocation/explain [[fix-watermark-errors-temporary]] ==== Temporary Relief -To immediately restore write operations, you can temporarily increase the +To immediately restore write operations, you can temporarily increase <> and remove the <>. @@ -106,19 +106,35 @@ PUT _cluster/settings [[fix-watermark-errors-resolve]] ==== Resolve -As a long-term solution, we recommend you do one of the following best suited -to your use case: +To resolve watermark errors permanently, perform one of the following actions: -* add nodes to the affected <> -+ -TIP: You should enable <> for clusters deployed using our {ess}, {ece}, and {eck} platforms. +* Horizontally scale nodes of the affected <>. -* upgrade existing nodes to increase disk space -+ -TIP: On {ess}, https://support.elastic.co[Elastic Support] intervention may -become necessary if <> reaches `status:red`. +* Vertically scale existing nodes to increase disk space. -* delete unneeded indices using the <> +* Delete indices using the <>, either +permanently if the index isn't needed, or temporarily to later +<>. * update related <> to push indices through to later <> + +TIP: On {ess} and {ece}, indices may need to be temporarily deleted via +its {cloud}/ec-api-console.html[Elasticsearch API Console] to later +<> in order to resolve +<> `status:red` which will block +{cloud}/ec-activity-page.html[attempted changes]. If you experience issues +with this resolution flow on {ess}, kindly reach out to +https://support.elastic.co[Elastic Support] for assistance. + +[discrete] +[[fix-watermark-errors-prevent]] +=== Prevent watermark errors + +To avoid watermark errors in future, perform one of the following actions: + +* If you're using {ess}, {ece}, or {eck}: Enable <>. + +* Set up {kibana-ref}/kibana-alerts.html[stack monitoring alerts] on top of +<> to be notified before +the flood-stage watermark is reached. diff --git a/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index 7ddb2ca354cfc..b4095e133114b 100644 --- a/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java +++ b/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -286,7 +286,7 @@ protected boolean isWatcherTest() { /** * Compares the results of running two analyzers against many random - * strings. The goal is to figure out if two anlayzers are "the same" by + * strings. The goal is to figure out if two analyzers are "the same" by * comparing their results. This is far from perfect but should be fairly * accurate, especially for gross things like missing {@code decimal_digit} * token filters, and should be fairly fast because it compares a fairly diff --git a/docs/src/yamlRestTest/resources/GeoLite2-City.mmdb b/docs/src/yamlRestTest/resources/GeoLite2-City.mmdb index 0809201619b59..67dcd4a8e5a17 100644 Binary files a/docs/src/yamlRestTest/resources/GeoLite2-City.mmdb and b/docs/src/yamlRestTest/resources/GeoLite2-City.mmdb differ diff --git a/docs/src/yamlRestTest/resources/GeoLite2-Country.mmdb b/docs/src/yamlRestTest/resources/GeoLite2-Country.mmdb index aa81cbe8a2f0e..8b1436b0c387f 100644 Binary files a/docs/src/yamlRestTest/resources/GeoLite2-Country.mmdb and b/docs/src/yamlRestTest/resources/GeoLite2-Country.mmdb differ diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index 05fda8e0244de..1aed327c45796 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -11,7 +11,7 @@ apache-compress = "org.apache.commons:commons-compress:1.26.1" apache-rat = "org.apache.rat:apache-rat:0.11" asm = { group = "org.ow2.asm", name="asm", version.ref="asm" } asm-tree = { group = "org.ow2.asm", name="asm-tree", version.ref="asm" } -bytebuddy = "net.bytebuddy:byte-buddy:1.14.12" +bytebuddy = "net.bytebuddy:byte-buddy:1.15.11" checkstyle = "com.puppycrawl.tools:checkstyle:10.3" commons-codec = "commons-codec:commons-codec:1.11" commmons-io = "commons-io:commons-io:2.2" diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2c46c4642e56e..64efd8e439df1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -904,14 +904,14 @@ - - - + + + - - - + + + @@ -1724,9 +1724,9 @@ - - - + + + @@ -2920,129 +2920,179 @@ - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + @@ -4068,14 +4118,14 @@ - - - + + + - - - + + + diff --git a/libs/core/src/main/java/org/elasticsearch/core/CheckedSupplier.java b/libs/core/src/main/java/org/elasticsearch/core/CheckedSupplier.java new file mode 100644 index 0000000000000..5d3831881f285 --- /dev/null +++ b/libs/core/src/main/java/org/elasticsearch/core/CheckedSupplier.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.core; + +/** + * A {@link java.util.function.Supplier}-like interface which allows throwing checked exceptions. + */ +@FunctionalInterface +public interface CheckedSupplier { + T get() throws E; +} diff --git a/libs/entitlement/asm-provider/build.gradle b/libs/entitlement/asm-provider/build.gradle index dcec0579a5bae..c6b51b7da3dff 100644 --- a/libs/entitlement/asm-provider/build.gradle +++ b/libs/entitlement/asm-provider/build.gradle @@ -11,6 +11,7 @@ apply plugin: 'elasticsearch.build' dependencies { compileOnly project(':libs:entitlement') + compileOnly project(':libs:core') implementation 'org.ow2.asm:asm:9.7.1' testImplementation project(":test:framework") testImplementation project(":libs:entitlement:bridge") diff --git a/libs/entitlement/asm-provider/src/main/java/module-info.java b/libs/entitlement/asm-provider/src/main/java/module-info.java index 8cbeafc9013aa..f953454f93b91 100644 --- a/libs/entitlement/asm-provider/src/main/java/module-info.java +++ b/libs/entitlement/asm-provider/src/main/java/module-info.java @@ -14,5 +14,7 @@ requires org.objectweb.asm; requires org.elasticsearch.entitlement; + requires static org.elasticsearch.base; // for SuppressForbidden + provides InstrumentationService with InstrumentationServiceImpl; } diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java index eaf4d0ad98ef5..05a5af374e5d9 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.instrumentation.impl; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.instrumentation.CheckMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.instrumentation.Instrumenter; @@ -20,14 +21,24 @@ import org.objectweb.asm.Type; import java.io.IOException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.util.ArrayDeque; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; public class InstrumentationServiceImpl implements InstrumentationService { + private static final String OBJECT_INTERNAL_NAME = Type.getInternalName(Object.class); + @Override public Instrumenter newInstrumenter(Class clazz, Map methods) { return InstrumenterImpl.create(clazz, methods); @@ -35,35 +46,193 @@ public Instrumenter newInstrumenter(Class clazz, Map @Override public Map lookupMethods(Class checkerClass) throws IOException { - var methodsToInstrument = new HashMap(); - var classFileInfo = InstrumenterImpl.getClassFileInfo(checkerClass); - ClassReader reader = new ClassReader(classFileInfo.bytecodes()); - ClassVisitor visitor = new ClassVisitor(Opcodes.ASM9) { - @Override - public MethodVisitor visitMethod( - int access, - String checkerMethodName, - String checkerMethodDescriptor, - String signature, - String[] exceptions - ) { - var mv = super.visitMethod(access, checkerMethodName, checkerMethodDescriptor, signature, exceptions); - - var checkerMethodArgumentTypes = Type.getArgumentTypes(checkerMethodDescriptor); - var methodToInstrument = parseCheckerMethodSignature(checkerMethodName, checkerMethodArgumentTypes); - - var checkerParameterDescriptors = Arrays.stream(checkerMethodArgumentTypes).map(Type::getDescriptor).toList(); - var checkMethod = new CheckMethod(Type.getInternalName(checkerClass), checkerMethodName, checkerParameterDescriptors); - - methodsToInstrument.put(methodToInstrument, checkMethod); - - return mv; + Map methodsToInstrument = new HashMap<>(); + + Set> visitedClasses = new HashSet<>(); + ArrayDeque> classesToVisit = new ArrayDeque<>(Collections.singleton(checkerClass)); + while (classesToVisit.isEmpty() == false) { + var currentClass = classesToVisit.remove(); + if (visitedClasses.contains(currentClass)) { + continue; } - }; - reader.accept(visitor, 0); + visitedClasses.add(currentClass); + + var classFileInfo = InstrumenterImpl.getClassFileInfo(currentClass); + ClassReader reader = new ClassReader(classFileInfo.bytecodes()); + ClassVisitor visitor = new ClassVisitor(Opcodes.ASM9) { + + @Override + public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) { + super.visit(version, access, name, signature, superName, interfaces); + try { + if (OBJECT_INTERNAL_NAME.equals(superName) == false) { + classesToVisit.add(Class.forName(Type.getObjectType(superName).getClassName())); + } + for (var interfaceName : interfaces) { + classesToVisit.add(Class.forName(Type.getObjectType(interfaceName).getClassName())); + } + } catch (ClassNotFoundException e) { + throw new IllegalArgumentException("Cannot inspect checker class " + checkerClass.getName(), e); + } + } + + @Override + public MethodVisitor visitMethod( + int access, + String checkerMethodName, + String checkerMethodDescriptor, + String signature, + String[] exceptions + ) { + var mv = super.visitMethod(access, checkerMethodName, checkerMethodDescriptor, signature, exceptions); + if (checkerMethodName.startsWith(InstrumentationService.CHECK_METHOD_PREFIX)) { + var checkerMethodArgumentTypes = Type.getArgumentTypes(checkerMethodDescriptor); + var methodToInstrument = parseCheckerMethodSignature(checkerMethodName, checkerMethodArgumentTypes); + + var checkerParameterDescriptors = Arrays.stream(checkerMethodArgumentTypes).map(Type::getDescriptor).toList(); + var checkMethod = new CheckMethod( + Type.getInternalName(currentClass), + checkerMethodName, + checkerParameterDescriptors + ); + + methodsToInstrument.putIfAbsent(methodToInstrument, checkMethod); + } + return mv; + } + }; + reader.accept(visitor, 0); + } return methodsToInstrument; } + @SuppressForbidden(reason = "Need access to abstract methods (protected/package internal) in base class") + @Override + public InstrumentationInfo lookupImplementationMethod( + Class targetSuperclass, + String methodName, + Class implementationClass, + Class checkerClass, + String checkMethodName, + Class... parameterTypes + ) throws NoSuchMethodException, ClassNotFoundException { + + var targetMethod = targetSuperclass.getDeclaredMethod(methodName, parameterTypes); + var implementationMethod = implementationClass.getMethod(targetMethod.getName(), targetMethod.getParameterTypes()); + validateTargetMethod(implementationClass, targetMethod, implementationMethod); + + var checkerAdditionalArguments = Stream.of(Class.class, targetSuperclass); + var checkMethodArgumentTypes = Stream.concat(checkerAdditionalArguments, Arrays.stream(parameterTypes)) + .map(Type::getType) + .toArray(Type[]::new); + + CheckMethod[] checkMethod = new CheckMethod[1]; + + try { + InstrumenterImpl.ClassFileInfo classFileInfo = InstrumenterImpl.getClassFileInfo(checkerClass); + ClassReader reader = new ClassReader(classFileInfo.bytecodes()); + ClassVisitor visitor = new ClassVisitor(Opcodes.ASM9) { + @Override + public MethodVisitor visitMethod( + int access, + String methodName, + String methodDescriptor, + String signature, + String[] exceptions + ) { + var mv = super.visitMethod(access, methodName, methodDescriptor, signature, exceptions); + if (methodName.equals(checkMethodName)) { + var methodArgumentTypes = Type.getArgumentTypes(methodDescriptor); + if (Arrays.equals(methodArgumentTypes, checkMethodArgumentTypes)) { + var checkerParameterDescriptors = Arrays.stream(methodArgumentTypes).map(Type::getDescriptor).toList(); + checkMethod[0] = new CheckMethod(Type.getInternalName(checkerClass), methodName, checkerParameterDescriptors); + } + } + return mv; + } + }; + reader.accept(visitor, 0); + } catch (IOException e) { + throw new ClassNotFoundException("Cannot find a definition for class [" + checkerClass.getName() + "]", e); + } + + if (checkMethod[0] == null) { + throw new NoSuchMethodException( + String.format( + Locale.ROOT, + "Cannot find a method with name [%s] and arguments [%s] in class [%s]", + checkMethodName, + Arrays.stream(checkMethodArgumentTypes).map(Type::toString).collect(Collectors.joining()), + checkerClass.getName() + ) + ); + } + + return new InstrumentationInfo( + new MethodKey( + Type.getInternalName(implementationMethod.getDeclaringClass()), + implementationMethod.getName(), + Arrays.stream(parameterTypes).map(c -> Type.getType(c).getInternalName()).toList() + ), + checkMethod[0] + ); + } + + private static void validateTargetMethod(Class implementationClass, Method targetMethod, Method implementationMethod) { + if (targetMethod.getDeclaringClass().isAssignableFrom(implementationClass) == false) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Not an implementation class for %s: %s does not implement %s", + targetMethod.getName(), + implementationClass.getName(), + targetMethod.getDeclaringClass().getName() + ) + ); + } + if (Modifier.isPrivate(targetMethod.getModifiers())) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Not a valid instrumentation method: %s is private in %s", + targetMethod.getName(), + targetMethod.getDeclaringClass().getName() + ) + ); + } + if (Modifier.isStatic(targetMethod.getModifiers())) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Not a valid instrumentation method: %s is static in %s", + targetMethod.getName(), + targetMethod.getDeclaringClass().getName() + ) + ); + } + var methodModifiers = implementationMethod.getModifiers(); + if (Modifier.isAbstract(methodModifiers)) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Not a valid instrumentation method: %s is abstract in %s", + targetMethod.getName(), + implementationClass.getName() + ) + ); + } + if (Modifier.isPublic(methodModifiers) == false) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Not a valid instrumentation method: %s is not public in %s", + targetMethod.getName(), + implementationClass.getName() + ) + ); + } + } + private static final Type CLASS_TYPE = Type.getType(Class.class); static ParsedCheckerMethod parseCheckerMethodName(String checkerMethodName) { @@ -85,8 +254,8 @@ static ParsedCheckerMethod parseCheckerMethodName(String checkerMethodName) { String.format( Locale.ROOT, "Checker method %s has incorrect name format. " - + "It should be either check$$methodName (instance), check$package_ClassName$methodName (static) or " - + "check$package_ClassName$ (ctor)", + + "It should be either check$package_ClassName$methodName (instance), check$package_ClassName$$methodName (static) " + + "or check$package_ClassName$ (ctor)", checkerMethodName ) ); diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java index 06408941ac96e..b10c58afacb1e 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java @@ -152,14 +152,13 @@ public MethodVisitor visitMethod(int access, String name, String descriptor, Str if (isAnnotationPresent == false) { boolean isStatic = (access & ACC_STATIC) != 0; boolean isCtor = "".equals(name); - boolean hasReceiver = (isStatic || isCtor) == false; var key = new MethodKey(className, name, Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList()); var instrumentationMethod = checkMethods.get(key); if (instrumentationMethod != null) { - // LOGGER.debug("Will instrument method {}", key); + // System.out.println("Will instrument method " + key); return new EntitlementMethodVisitor(Opcodes.ASM9, mv, isStatic, isCtor, descriptor, instrumentationMethod); } else { - // LOGGER.trace("Will not instrument method {}", key); + // System.out.println("Will not instrument method " + key); } } return mv; diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java index ab0d96a8df96d..2b9b70d46c0ea 100644 --- a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java @@ -29,7 +29,30 @@ public class InstrumentationServiceImplTests extends ESTestCase { final InstrumentationService instrumentationService = new InstrumentationServiceImpl(); - static class TestTargetClass {} + interface TestTargetInterface { + void instanceMethod(int x, String y); + } + + static class TestTargetClass implements TestTargetInterface { + @Override + public void instanceMethod(int x, String y) {} + } + + abstract static class TestTargetBaseClass { + abstract void instanceMethod(int x, String y); + + abstract void instanceMethod2(int x, String y); + } + + abstract static class TestTargetIntermediateClass extends TestTargetBaseClass { + @Override + public void instanceMethod2(int x, String y) {} + } + + static class TestTargetImplementationClass extends TestTargetIntermediateClass { + @Override + public void instanceMethod(int x, String y) {} + } interface TestChecker { void check$org_example_TestTargetClass$$staticMethod(Class clazz, int arg0, String arg1, Object arg2); @@ -39,6 +62,14 @@ interface TestChecker { void check$org_example_TestTargetClass$instanceMethodWithArgs(Class clazz, TestTargetClass that, int x, int y); } + interface TestCheckerDerived extends TestChecker { + void check$org_example_TestTargetClass$instanceMethodNoArgs(Class clazz, TestTargetClass that); + + void check$org_example_TestTargetClass$differentInstanceMethod(Class clazz, TestTargetClass that); + } + + interface TestCheckerDerived2 extends TestCheckerDerived, TestChecker {} + interface TestCheckerOverloads { void check$org_example_TestTargetClass$$staticMethodWithOverload(Class clazz, int x, int y); @@ -51,6 +82,14 @@ interface TestCheckerCtors { void check$org_example_TestTargetClass$(Class clazz, int x, String y); } + interface TestCheckerMixed { + void check$org_example_TestTargetClass$$staticMethod(Class clazz, int arg0, String arg1, Object arg2); + + void checkInstanceMethodManual(Class clazz, TestTargetInterface that, int x, String y); + + void checkInstanceMethodManual(Class clazz, TestTargetBaseClass that, int x, String y); + } + public void testInstrumentationTargetLookup() throws IOException { Map checkMethods = instrumentationService.lookupMethods(TestChecker.class); @@ -136,6 +175,75 @@ public void testInstrumentationTargetLookupWithOverloads() throws IOException { ); } + public void testInstrumentationTargetLookupWithDerivedClass() throws IOException { + Map checkMethods = instrumentationService.lookupMethods(TestCheckerDerived2.class); + + assertThat(checkMethods, aMapWithSize(4)); + assertThat( + checkMethods, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "staticMethod", List.of("I", "java/lang/String", "java/lang/Object"))), + equalTo( + new CheckMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", + "check$org_example_TestTargetClass$$staticMethod", + List.of("Ljava/lang/Class;", "I", "Ljava/lang/String;", "Ljava/lang/Object;") + ) + ) + ) + ); + assertThat( + checkMethods, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "instanceMethodNoArgs", List.of())), + equalTo( + new CheckMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerDerived", + "check$org_example_TestTargetClass$instanceMethodNoArgs", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass;" + ) + ) + ) + ) + ); + assertThat( + checkMethods, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "instanceMethodWithArgs", List.of("I", "I"))), + equalTo( + new CheckMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", + "check$org_example_TestTargetClass$instanceMethodWithArgs", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass;", + "I", + "I" + ) + ) + ) + ) + ); + assertThat( + checkMethods, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "differentInstanceMethod", List.of())), + equalTo( + new CheckMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerDerived", + "check$org_example_TestTargetClass$differentInstanceMethod", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass;" + ) + ) + ) + ) + ); + } + public void testInstrumentationTargetLookupWithCtors() throws IOException { Map checkMethods = instrumentationService.lookupMethods(TestCheckerCtors.class); @@ -168,6 +276,139 @@ public void testInstrumentationTargetLookupWithCtors() throws IOException { ); } + public void testInstrumentationTargetLookupWithExtraMethods() throws IOException { + Map checkMethods = instrumentationService.lookupMethods(TestCheckerMixed.class); + + assertThat(checkMethods, aMapWithSize(1)); + assertThat( + checkMethods, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "staticMethod", List.of("I", "java/lang/String", "java/lang/Object"))), + equalTo( + new CheckMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerMixed", + "check$org_example_TestTargetClass$$staticMethod", + List.of("Ljava/lang/Class;", "I", "Ljava/lang/String;", "Ljava/lang/Object;") + ) + ) + ) + ); + } + + public void testLookupImplementationMethodWithInterface() throws ClassNotFoundException, NoSuchMethodException { + var info = instrumentationService.lookupImplementationMethod( + TestTargetInterface.class, + "instanceMethod", + TestTargetClass.class, + TestCheckerMixed.class, + "checkInstanceMethodManual", + int.class, + String.class + ); + + assertThat( + info.targetMethod(), + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethod", + List.of("I", "java/lang/String") + ) + ) + ); + assertThat( + info.checkMethod(), + equalTo( + new CheckMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerMixed", + "checkInstanceMethodManual", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetInterface;", + "I", + "Ljava/lang/String;" + ) + ) + ) + ); + } + + public void testLookupImplementationMethodWithBaseClass() throws ClassNotFoundException, NoSuchMethodException { + var info = instrumentationService.lookupImplementationMethod( + TestTargetBaseClass.class, + "instanceMethod", + TestTargetImplementationClass.class, + TestCheckerMixed.class, + "checkInstanceMethodManual", + int.class, + String.class + ); + + assertThat( + info.targetMethod(), + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetImplementationClass", + "instanceMethod", + List.of("I", "java/lang/String") + ) + ) + ); + assertThat( + info.checkMethod(), + equalTo( + new CheckMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerMixed", + "checkInstanceMethodManual", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetBaseClass;", + "I", + "Ljava/lang/String;" + ) + ) + ) + ); + } + + public void testLookupImplementationMethodWithInheritance() throws ClassNotFoundException, NoSuchMethodException { + var info = instrumentationService.lookupImplementationMethod( + TestTargetBaseClass.class, + "instanceMethod2", + TestTargetImplementationClass.class, + TestCheckerMixed.class, + "checkInstanceMethodManual", + int.class, + String.class + ); + + assertThat( + info.targetMethod(), + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetIntermediateClass", + "instanceMethod2", + List.of("I", "java/lang/String") + ) + ) + ); + assertThat( + info.checkMethod(), + equalTo( + new CheckMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerMixed", + "checkInstanceMethodManual", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetBaseClass;", + "I", + "Ljava/lang/String;" + ) + ) + ) + ); + } + public void testParseCheckerMethodSignatureStaticMethod() { var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( "check$org_example_TestClass$$staticMethod", diff --git a/libs/entitlement/bridge/build.gradle b/libs/entitlement/bridge/build.gradle index a9f8f6e3a3b0a..5dec95b4b9bb4 100644 --- a/libs/entitlement/bridge/build.gradle +++ b/libs/entitlement/bridge/build.gradle @@ -19,6 +19,9 @@ tasks.named('jar').configure { } } +// The bridge only uses things within the jdk, but the checker +// needs to have many forbidden apis in its signatures. Suppressing +// each use of forbidden apis would be tedious and not useful. tasks.withType(CheckForbiddenApisTask).configureEach { - replaceSignatureFiles 'jdk-signatures' + enabled = false } diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index d509763b3541d..978e4e93e375d 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.bridge; +import java.io.File; import java.io.InputStream; import java.io.PrintStream; import java.io.PrintWriter; @@ -34,6 +35,7 @@ import java.net.Socket; import java.net.SocketAddress; import java.net.SocketImplFactory; +import java.net.URI; import java.net.URL; import java.net.URLStreamHandler; import java.net.URLStreamHandlerFactory; @@ -47,12 +49,27 @@ import java.nio.channels.DatagramChannel; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; +import java.nio.channels.spi.SelectorProvider; +import java.nio.charset.Charset; +import java.nio.file.AccessMode; +import java.nio.file.CopyOption; +import java.nio.file.DirectoryStream; +import java.nio.file.FileStore; +import java.nio.file.LinkOption; +import java.nio.file.OpenOption; import java.nio.file.Path; +import java.nio.file.attribute.FileAttribute; +import java.nio.file.attribute.UserPrincipal; +import java.nio.file.spi.FileSystemProvider; import java.security.cert.CertStoreParameters; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Properties; +import java.util.Set; import java.util.TimeZone; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ForkJoinPool; import java.util.function.Consumer; import javax.net.ssl.HostnameVerifier; @@ -63,7 +80,7 @@ @SuppressWarnings("unused") // Called from instrumentation code inserted by the Entitlements agent public interface EntitlementChecker { - //////////////////// + /// ///////////////// // // Exit the JVM process // @@ -74,9 +91,9 @@ public interface EntitlementChecker { void check$java_lang_System$$exit(Class callerClass, int status); - //////////////////// + /// ///////////////// // - // ClassLoader ctor + // create class loaders // void check$java_lang_ClassLoader$(Class callerClass); @@ -85,22 +102,6 @@ public interface EntitlementChecker { void check$java_lang_ClassLoader$(Class callerClass, String name, ClassLoader parent); - //////////////////// - // - // SecureClassLoader ctor - // - - void check$java_security_SecureClassLoader$(Class callerClass); - - void check$java_security_SecureClassLoader$(Class callerClass, ClassLoader parent); - - void check$java_security_SecureClassLoader$(Class callerClass, String name, ClassLoader parent); - - //////////////////// - // - // URLClassLoader constructors - // - void check$java_net_URLClassLoader$(Class callerClass, URL[] urls); void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent); @@ -111,7 +112,13 @@ public interface EntitlementChecker { void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory); - //////////////////// + void check$java_security_SecureClassLoader$(Class callerClass); + + void check$java_security_SecureClassLoader$(Class callerClass, ClassLoader parent); + + void check$java_security_SecureClassLoader$(Class callerClass, String name, ClassLoader parent); + + /// ///////////////// // // "setFactory" methods // @@ -124,7 +131,7 @@ public interface EntitlementChecker { void check$javax_net_ssl_SSLContext$$setDefault(Class callerClass, SSLContext context); - //////////////////// + /// ///////////////// // // Process creation // @@ -133,47 +140,49 @@ public interface EntitlementChecker { void check$java_lang_ProcessBuilder$$startPipeline(Class callerClass, List builders); - //////////////////// + /// ///////////////// // // System Properties and similar // + void check$java_lang_System$$setProperties(Class callerClass, Properties props); + void check$java_lang_System$$setProperty(Class callerClass, String key, String value); void check$java_lang_System$$clearProperty(Class callerClass, String key); - //////////////////// + /// ///////////////// // // JVM-wide state changes // - void check$java_lang_System$$setIn(Class callerClass, InputStream in); - - void check$java_lang_System$$setOut(Class callerClass, PrintStream out); + void check$com_sun_tools_jdi_VirtualMachineManagerImpl$$virtualMachineManager(Class callerClass); void check$java_lang_System$$setErr(Class callerClass, PrintStream err); - void check$java_lang_System$$setProperties(Class callerClass, Properties props); + void check$java_lang_System$$setIn(Class callerClass, InputStream in); + + void check$java_lang_System$$setOut(Class callerClass, PrintStream out); void check$java_lang_Runtime$addShutdownHook(Class callerClass, Runtime runtime, Thread hook); void check$java_lang_Runtime$removeShutdownHook(Class callerClass, Runtime runtime, Thread hook); - void check$jdk_tools_jlink_internal_Jlink$(Class callerClass); + void check$java_lang_Thread$$setDefaultUncaughtExceptionHandler(Class callerClass, Thread.UncaughtExceptionHandler ueh); - void check$jdk_tools_jlink_internal_Main$$run(Class callerClass, PrintWriter out, PrintWriter err, String... args); + void check$java_net_DatagramSocket$$setDatagramSocketImplFactory(Class callerClass, DatagramSocketImplFactory fac); - void check$jdk_vm_ci_services_JVMCIServiceLocator$$getProviders(Class callerClass, Class service); + void check$java_net_HttpURLConnection$$setFollowRedirects(Class callerClass, boolean set); - void check$jdk_vm_ci_services_Services$$load(Class callerClass, Class service); + void check$java_net_ServerSocket$$setSocketFactory(Class callerClass, SocketImplFactory fac); - void check$jdk_vm_ci_services_Services$$loadSingle(Class callerClass, Class service, boolean required); + void check$java_net_Socket$$setSocketImplFactory(Class callerClass, SocketImplFactory fac); - void check$com_sun_tools_jdi_VirtualMachineManagerImpl$$virtualMachineManager(Class callerClass); + void check$java_net_URL$$setURLStreamHandlerFactory(Class callerClass, URLStreamHandlerFactory fac); - void check$java_lang_Thread$$setDefaultUncaughtExceptionHandler(Class callerClass, Thread.UncaughtExceptionHandler ueh); + void check$java_net_URLConnection$$setFileNameMap(Class callerClass, FileNameMap map); - void check$java_util_spi_LocaleServiceProvider$(Class callerClass); + void check$java_net_URLConnection$$setContentHandlerFactory(Class callerClass, ContentHandlerFactory fac); void check$java_text_spi_BreakIteratorProvider$(Class callerClass); @@ -195,6 +204,8 @@ public interface EntitlementChecker { void check$java_util_spi_LocaleNameProvider$(Class callerClass); + void check$java_util_spi_LocaleServiceProvider$(Class callerClass); + void check$java_util_spi_TimeZoneNameProvider$(Class callerClass); void check$java_util_logging_LogManager$(Class callerClass); @@ -205,21 +216,19 @@ public interface EntitlementChecker { void check$java_util_TimeZone$$setDefault(Class callerClass, TimeZone zone); - void check$java_net_DatagramSocket$$setDatagramSocketImplFactory(Class callerClass, DatagramSocketImplFactory fac); - - void check$java_net_HttpURLConnection$$setFollowRedirects(Class callerClass, boolean set); + void check$jdk_tools_jlink_internal_Jlink$(Class callerClass); - void check$java_net_ServerSocket$$setSocketFactory(Class callerClass, SocketImplFactory fac); + void check$jdk_tools_jlink_internal_Main$$run(Class callerClass, PrintWriter out, PrintWriter err, String... args); - void check$java_net_Socket$$setSocketImplFactory(Class callerClass, SocketImplFactory fac); + void check$jdk_vm_ci_services_JVMCIServiceLocator$$getProviders(Class callerClass, Class service); - void check$java_net_URL$$setURLStreamHandlerFactory(Class callerClass, URLStreamHandlerFactory fac); + void check$jdk_vm_ci_services_Services$$load(Class callerClass, Class service); - void check$java_net_URLConnection$$setFileNameMap(Class callerClass, FileNameMap map); + void check$jdk_vm_ci_services_Services$$loadSingle(Class callerClass, Class service, boolean required); - void check$java_net_URLConnection$$setContentHandlerFactory(Class callerClass, ContentHandlerFactory fac); + void check$java_nio_charset_spi_CharsetProvider$(Class callerClass); - //////////////////// + /// ///////////////// // // Network access // @@ -227,10 +236,6 @@ public interface EntitlementChecker { void check$java_net_ResponseCache$$setDefault(Class callerClass, ResponseCache rc); - void check$java_net_spi_InetAddressResolverProvider$(Class callerClass); - - void check$java_net_spi_URLStreamHandlerProvider$(Class callerClass); - void check$java_net_URL$(Class callerClass, String protocol, String host, int port, String file, URLStreamHandler handler); void check$java_net_URL$(Class callerClass, URL context, String spec, URLStreamHandler handler); @@ -241,14 +246,14 @@ public interface EntitlementChecker { void check$java_net_DatagramSocket$connect(Class callerClass, DatagramSocket that, SocketAddress addr); - void check$java_net_DatagramSocket$send(Class callerClass, DatagramSocket that, DatagramPacket p); - - void check$java_net_DatagramSocket$receive(Class callerClass, DatagramSocket that, DatagramPacket p); - void check$java_net_DatagramSocket$joinGroup(Class callerClass, DatagramSocket that, SocketAddress addr, NetworkInterface ni); void check$java_net_DatagramSocket$leaveGroup(Class callerClass, DatagramSocket that, SocketAddress addr, NetworkInterface ni); + void check$java_net_DatagramSocket$receive(Class callerClass, DatagramSocket that, DatagramPacket p); + + void check$java_net_DatagramSocket$send(Class callerClass, DatagramSocket that, DatagramPacket p); + void check$java_net_MulticastSocket$joinGroup(Class callerClass, MulticastSocket that, InetAddress addr); void check$java_net_MulticastSocket$joinGroup(Class callerClass, MulticastSocket that, SocketAddress addr, NetworkInterface ni); @@ -259,6 +264,10 @@ public interface EntitlementChecker { void check$java_net_MulticastSocket$send(Class callerClass, MulticastSocket that, DatagramPacket p, byte ttl); + void check$java_net_spi_InetAddressResolverProvider$(Class callerClass); + + void check$java_net_spi_URLStreamHandlerProvider$(Class callerClass); + // Binding/connecting ctor void check$java_net_ServerSocket$(Class callerClass, int port); @@ -416,7 +425,17 @@ public interface EntitlementChecker { void check$sun_nio_ch_DatagramChannelImpl$receive(Class callerClass, DatagramChannel that, ByteBuffer dst); - //////////////////// + // providers (SPI) + + // protected constructors + void check$java_nio_channels_spi_SelectorProvider$(Class callerClass); + + void check$java_nio_channels_spi_AsynchronousChannelProvider$(Class callerClass); + + // provider methods (dynamic) + void checkSelectorProviderInheritedChannel(Class callerClass, SelectorProvider that); + + /// ///////////////// // // Load native libraries // @@ -484,4 +503,176 @@ public interface EntitlementChecker { void check$java_lang_foreign_SymbolLookup$$libraryLookup(Class callerClass, Path path, Arena arena); void check$java_lang_ModuleLayer$Controller$enableNativeAccess(Class callerClass, ModuleLayer.Controller that, Module target); + + /// ///////////////// + // + // File access + // + + // old io (ie File) + void check$java_io_File$createNewFile(Class callerClass, File file); + + void check$java_io_File$$createTempFile(Class callerClass, String prefix, String suffix, File directory); + + void check$java_io_File$delete(Class callerClass, File file); + + void check$java_io_File$deleteOnExit(Class callerClass, File file); + + void check$java_io_File$mkdir(Class callerClass, File file); + + void check$java_io_File$mkdirs(Class callerClass, File file); + + void check$java_io_File$renameTo(Class callerClass, File file, File dest); + + void check$java_io_File$setExecutable(Class callerClass, File file, boolean executable); + + void check$java_io_File$setExecutable(Class callerClass, File file, boolean executable, boolean ownerOnly); + + void check$java_io_File$setLastModified(Class callerClass, File file, long time); + + void check$java_io_File$setReadable(Class callerClass, File file, boolean readable); + + void check$java_io_File$setReadable(Class callerClass, File file, boolean readable, boolean ownerOnly); + + void check$java_io_File$setReadOnly(Class callerClass, File file); + + void check$java_io_File$setWritable(Class callerClass, File file, boolean writable); + + void check$java_io_File$setWritable(Class callerClass, File file, boolean writable, boolean ownerOnly); + + void check$java_io_FileOutputStream$(Class callerClass, File file); + + void check$java_io_FileOutputStream$(Class callerClass, File file, boolean append); + + void check$java_io_FileOutputStream$(Class callerClass, String name); + + void check$java_io_FileOutputStream$(Class callerClass, String name, boolean append); + + void check$java_util_Scanner$(Class callerClass, File source); + + void check$java_util_Scanner$(Class callerClass, File source, String charsetName); + + void check$java_util_Scanner$(Class callerClass, File source, Charset charset); + + // nio + void check$java_nio_file_Files$$getOwner(Class callerClass, Path path, LinkOption... options); + + void check$java_nio_file_Files$$probeContentType(Class callerClass, Path path); + + void check$java_nio_file_Files$$setOwner(Class callerClass, Path path, UserPrincipal principal); + + // file system providers + void check$java_nio_file_spi_FileSystemProvider$(Class callerClass); + + void checkNewFileSystem(Class callerClass, FileSystemProvider that, URI uri, Map env); + + void checkNewFileSystem(Class callerClass, FileSystemProvider that, Path path, Map env); + + void checkNewInputStream(Class callerClass, FileSystemProvider that, Path path, OpenOption... options); + + void checkNewOutputStream(Class callerClass, FileSystemProvider that, Path path, OpenOption... options); + + void checkNewFileChannel( + Class callerClass, + FileSystemProvider that, + Path path, + Set options, + FileAttribute... attrs + ); + + void checkNewAsynchronousFileChannel( + Class callerClass, + FileSystemProvider that, + Path path, + Set options, + ExecutorService executor, + FileAttribute... attrs + ); + + void checkNewByteChannel( + Class callerClass, + FileSystemProvider that, + Path path, + Set options, + FileAttribute... attrs + ); + + void checkNewDirectoryStream(Class callerClass, FileSystemProvider that, Path dir, DirectoryStream.Filter filter); + + void checkCreateDirectory(Class callerClass, FileSystemProvider that, Path dir, FileAttribute... attrs); + + void checkCreateSymbolicLink(Class callerClass, FileSystemProvider that, Path link, Path target, FileAttribute... attrs); + + void checkCreateLink(Class callerClass, FileSystemProvider that, Path link, Path existing); + + void checkDelete(Class callerClass, FileSystemProvider that, Path path); + + void checkDeleteIfExists(Class callerClass, FileSystemProvider that, Path path); + + void checkReadSymbolicLink(Class callerClass, FileSystemProvider that, Path link); + + void checkCopy(Class callerClass, FileSystemProvider that, Path source, Path target, CopyOption... options); + + void checkMove(Class callerClass, FileSystemProvider that, Path source, Path target, CopyOption... options); + + void checkIsSameFile(Class callerClass, FileSystemProvider that, Path path, Path path2); + + void checkIsHidden(Class callerClass, FileSystemProvider that, Path path); + + void checkGetFileStore(Class callerClass, FileSystemProvider that, Path path); + + void checkCheckAccess(Class callerClass, FileSystemProvider that, Path path, AccessMode... modes); + + void checkGetFileAttributeView(Class callerClass, FileSystemProvider that, Path path, Class type, LinkOption... options); + + void checkReadAttributes(Class callerClass, FileSystemProvider that, Path path, Class type, LinkOption... options); + + void checkReadAttributes(Class callerClass, FileSystemProvider that, Path path, String attributes, LinkOption... options); + + void checkReadAttributesIfExists(Class callerClass, FileSystemProvider that, Path path, Class type, LinkOption... options); + + void checkSetAttribute(Class callerClass, FileSystemProvider that, Path path, String attribute, Object value, LinkOption... options); + + void checkExists(Class callerClass, FileSystemProvider that, Path path, LinkOption... options); + + // file store + void checkGetFileStoreAttributeView(Class callerClass, FileStore that, Class type); + + void checkGetAttribute(Class callerClass, FileStore that, String attribute); + + void checkGetBlockSize(Class callerClass, FileStore that); + + void checkGetTotalSpace(Class callerClass, FileStore that); + + void checkGetUnallocatedSpace(Class callerClass, FileStore that); + + void checkGetUsableSpace(Class callerClass, FileStore that); + + void checkIsReadOnly(Class callerClass, FileStore that); + + void checkName(Class callerClass, FileStore that); + + void checkType(Class callerClass, FileStore that); + + //////////////////// + // + // Thread management + // + + void check$java_lang_Thread$start(Class callerClass, Thread thread); + + void check$java_lang_Thread$setDaemon(Class callerClass, Thread thread, boolean on); + + void check$java_lang_ThreadGroup$setDaemon(Class callerClass, ThreadGroup threadGroup, boolean daemon); + + void check$java_util_concurrent_ForkJoinPool$setParallelism(Class callerClass, ForkJoinPool forkJoinPool, int size); + + void check$java_lang_Thread$setName(Class callerClass, Thread thread, String name); + + void check$java_lang_Thread$setPriority(Class callerClass, Thread thread, int newPriority); + + void check$java_lang_Thread$setUncaughtExceptionHandler(Class callerClass, Thread thread, Thread.UncaughtExceptionHandler ueh); + + void check$java_lang_ThreadGroup$setMaxPriority(Class callerClass, ThreadGroup threadGroup, int pri); + } diff --git a/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java b/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java index 282860e1cdf60..58bafdc47a0bd 100644 --- a/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java +++ b/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java @@ -11,12 +11,50 @@ import org.elasticsearch.core.SuppressForbidden; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.UserPrincipal; +import java.security.SecureRandom; + +@SuppressForbidden(reason = "Exposes forbidden APIs for testing purposes") public final class EntitledActions { private EntitledActions() {} - @SuppressForbidden(reason = "Exposes forbidden APIs for testing purposes") - static void System_clearProperty(String key) { - System.clearProperty(key); + private static final SecureRandom random = new SecureRandom(); + + private static final Path testRootDir = Paths.get(System.getProperty("es.entitlements.testdir")); + + private static Path readDir() { + return testRootDir.resolve("read_dir"); + } + + private static Path readWriteDir() { + return testRootDir.resolve("read_write_dir"); + } + + public static UserPrincipal getFileOwner(Path path) throws IOException { + return Files.getOwner(path); + } + + public static void createFile(Path path) throws IOException { + Files.createFile(path); + } + + public static Path createTempFileForRead() throws IOException { + return Files.createFile(readDir().resolve("entitlements-" + random.nextLong() + ".tmp")); + } + + public static Path createTempFileForWrite() throws IOException { + return Files.createFile(readWriteDir().resolve("entitlements-" + random.nextLong() + ".tmp")); + } + + public static Path createTempDirectoryForWrite() throws IOException { + return Files.createDirectory(readWriteDir().resolve("entitlements-dir-" + random.nextLong())); } + public static Path createTempSymbolicLink() throws IOException { + return Files.createSymbolicLink(readDir().resolve("entitlements-link-" + random.nextLong()), readWriteDir()); + } } diff --git a/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledPlugin.java b/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledPlugin.java index 7a60d92ecc552..cec48ac168678 100644 --- a/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledPlugin.java +++ b/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledPlugin.java @@ -15,7 +15,7 @@ import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.Plugin; -import static org.elasticsearch.entitlement.qa.entitled.EntitledActions.System_clearProperty; +import java.util.concurrent.atomic.AtomicBoolean; public class EntitledPlugin extends Plugin implements ExtensiblePlugin { @@ -28,11 +28,19 @@ public static void selfTest() { selfTestNotEntitled(); } - private static final String SELF_TEST_PROPERTY = "org.elasticsearch.entitlement.qa.selfTest"; - private static void selfTestEntitled() { logger.debug("selfTestEntitled"); - System_clearProperty(SELF_TEST_PROPERTY); + AtomicBoolean threadRan = new AtomicBoolean(false); + try { + Thread testThread = new Thread(() -> threadRan.set(true), "testThread"); + testThread.start(); + testThread.join(); + } catch (InterruptedException e) { + throw new AssertionError(e); + } + if (threadRan.get() == false) { + throw new AssertionError("Self-test thread did not run"); + } } private static void selfTestNotEntitled() { diff --git a/libs/entitlement/qa/entitled-plugin/src/main/plugin-metadata/entitlement-policy.yaml b/libs/entitlement/qa/entitled-plugin/src/main/plugin-metadata/entitlement-policy.yaml deleted file mode 100644 index 81acd4c467f94..0000000000000 --- a/libs/entitlement/qa/entitled-plugin/src/main/plugin-metadata/entitlement-policy.yaml +++ /dev/null @@ -1,4 +0,0 @@ -org.elasticsearch.entitlement.qa.entitled: - - write_system_properties: - properties: - - org.elasticsearch.entitlement.qa.selfTest diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java index 6564e0eed41e1..ca03014634076 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java @@ -17,11 +17,38 @@ import java.net.DatagramSocketImpl; import java.net.InetAddress; import java.net.NetworkInterface; +import java.net.ProtocolFamily; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketAddress; import java.net.SocketException; import java.net.SocketImpl; +import java.net.URI; +import java.nio.channels.AsynchronousChannelGroup; +import java.nio.channels.AsynchronousServerSocketChannel; +import java.nio.channels.AsynchronousSocketChannel; +import java.nio.channels.DatagramChannel; +import java.nio.channels.Pipe; +import java.nio.channels.SeekableByteChannel; +import java.nio.channels.ServerSocketChannel; +import java.nio.channels.SocketChannel; +import java.nio.channels.spi.AbstractSelector; +import java.nio.channels.spi.AsynchronousChannelProvider; +import java.nio.channels.spi.SelectorProvider; +import java.nio.charset.Charset; +import java.nio.charset.spi.CharsetProvider; +import java.nio.file.AccessMode; +import java.nio.file.CopyOption; +import java.nio.file.DirectoryStream; +import java.nio.file.FileStore; +import java.nio.file.FileSystem; +import java.nio.file.LinkOption; +import java.nio.file.OpenOption; +import java.nio.file.Path; +import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.attribute.FileAttribute; +import java.nio.file.attribute.FileAttributeView; +import java.nio.file.spi.FileSystemProvider; import java.security.cert.Certificate; import java.text.BreakIterator; import java.text.Collator; @@ -35,8 +62,12 @@ import java.text.spi.DateFormatSymbolsProvider; import java.text.spi.DecimalFormatSymbolsProvider; import java.text.spi.NumberFormatProvider; +import java.util.Iterator; import java.util.Locale; import java.util.Map; +import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ThreadFactory; import java.util.spi.CalendarDataProvider; import java.util.spi.CalendarNameProvider; import java.util.spi.CurrencyNameProvider; @@ -486,4 +517,163 @@ protected void connect(InetAddress address, int port) throws SocketException {} private static RuntimeException unexpected() { return new IllegalStateException("This method isn't supposed to be called"); } + + static class DummySelectorProvider extends SelectorProvider { + @Override + public DatagramChannel openDatagramChannel() throws IOException { + return null; + } + + @Override + public DatagramChannel openDatagramChannel(ProtocolFamily family) throws IOException { + return null; + } + + @Override + public Pipe openPipe() throws IOException { + return null; + } + + @Override + public AbstractSelector openSelector() throws IOException { + return null; + } + + @Override + public ServerSocketChannel openServerSocketChannel() throws IOException { + return null; + } + + @Override + public SocketChannel openSocketChannel() throws IOException { + return null; + } + } + + static class DummyAsynchronousChannelProvider extends AsynchronousChannelProvider { + @Override + public AsynchronousChannelGroup openAsynchronousChannelGroup(int nThreads, ThreadFactory threadFactory) throws IOException { + return null; + } + + @Override + public AsynchronousChannelGroup openAsynchronousChannelGroup(ExecutorService executor, int initialSize) throws IOException { + return null; + } + + @Override + public AsynchronousServerSocketChannel openAsynchronousServerSocketChannel(AsynchronousChannelGroup group) throws IOException { + return null; + } + + @Override + public AsynchronousSocketChannel openAsynchronousSocketChannel(AsynchronousChannelGroup group) throws IOException { + return null; + } + } + + static class DummyCharsetProvider extends CharsetProvider { + @Override + public Iterator charsets() { + return null; + } + + @Override + public Charset charsetForName(String charsetName) { + return null; + } + } + + static class DummyFileSystemProvider extends FileSystemProvider { + @Override + public String getScheme() { + return ""; + } + + @Override + public FileSystem newFileSystem(URI uri, Map env) throws IOException { + return null; + } + + @Override + public FileSystem getFileSystem(URI uri) { + return null; + } + + @Override + public Path getPath(URI uri) { + return null; + } + + @Override + public SeekableByteChannel newByteChannel(Path path, Set options, FileAttribute... attrs) + throws IOException { + return null; + } + + @Override + public DirectoryStream newDirectoryStream(Path dir, DirectoryStream.Filter filter) throws IOException { + return null; + } + + @Override + public void createDirectory(Path dir, FileAttribute... attrs) throws IOException { + + } + + @Override + public void delete(Path path) throws IOException { + + } + + @Override + public void copy(Path source, Path target, CopyOption... options) throws IOException { + + } + + @Override + public void move(Path source, Path target, CopyOption... options) throws IOException { + + } + + @Override + public boolean isSameFile(Path path, Path path2) throws IOException { + return false; + } + + @Override + public boolean isHidden(Path path) throws IOException { + return false; + } + + @Override + public FileStore getFileStore(Path path) throws IOException { + return null; + } + + @Override + public void checkAccess(Path path, AccessMode... modes) throws IOException { + + } + + @Override + public V getFileAttributeView(Path path, Class type, LinkOption... options) { + return null; + } + + @Override + public A readAttributes(Path path, Class type, LinkOption... options) throws IOException { + return null; + } + + @Override + public Map readAttributes(Path path, String attributes, LinkOption... options) throws IOException { + return Map.of(); + } + + @Override + public void setAttribute(Path path, String attribute, Object value, LinkOption... options) throws IOException { + + } + } } diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTest.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTest.java new file mode 100644 index 0000000000000..a4b9767c4c64f --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTest.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +public @interface EntitlementTest { + enum ExpectedAccess { + PLUGINS, + ES_MODULES_ONLY, + SERVER_ONLY, + ALWAYS_DENIED + } + + ExpectedAccess expectedAccess(); + + int fromJavaVersion() default -1; +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java new file mode 100644 index 0000000000000..29736a46040e3 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java @@ -0,0 +1,186 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.qa.entitled.EntitledActions; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.UserPrincipal; +import java.util.Scanner; + +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; + +@SuppressForbidden(reason = "Explicitly checking APIs that are forbidden") +class FileCheckActions { + + static Path testRootDir = Paths.get(System.getProperty("es.entitlements.testdir")); + + static Path readDir() { + return testRootDir.resolve("read_dir"); + } + + static Path readWriteDir() { + return testRootDir.resolve("read_write_dir"); + } + + static Path readFile() { + return testRootDir.resolve("read_file"); + } + + static Path readWriteFile() { + return testRootDir.resolve("read_write_file"); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileCreateNewFile() throws IOException { + readWriteDir().resolve("new_file").toFile().createNewFile(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileCreateTempFile() throws IOException { + File.createTempFile("prefix", "suffix", readWriteDir().toFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileDelete() throws IOException { + Path toDelete = readWriteDir().resolve("to_delete"); + EntitledActions.createFile(toDelete); + toDelete.toFile().delete(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileDeleteOnExit() throws IOException { + Path toDelete = readWriteDir().resolve("to_delete_on_exit"); + EntitledActions.createFile(toDelete); + toDelete.toFile().deleteOnExit(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileMkdir() throws IOException { + Path mkdir = readWriteDir().resolve("mkdir"); + mkdir.toFile().mkdir(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileMkdirs() throws IOException { + Path mkdir = readWriteDir().resolve("mkdirs"); + mkdir.toFile().mkdirs(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileRenameTo() throws IOException { + Path toRename = readWriteDir().resolve("to_rename"); + EntitledActions.createFile(toRename); + toRename.toFile().renameTo(readWriteDir().resolve("renamed").toFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileSetExecutable() throws IOException { + readWriteFile().toFile().setExecutable(false); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileSetExecutableOwner() throws IOException { + readWriteFile().toFile().setExecutable(false, false); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileSetLastModified() throws IOException { + readWriteFile().toFile().setLastModified(System.currentTimeMillis()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileSetReadable() throws IOException { + readWriteFile().toFile().setReadable(true); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileSetReadableOwner() throws IOException { + readWriteFile().toFile().setReadable(true, false); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileSetReadOnly() throws IOException { + Path readOnly = readWriteDir().resolve("read_only"); + EntitledActions.createFile(readOnly); + readOnly.toFile().setReadOnly(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileSetWritable() throws IOException { + readWriteFile().toFile().setWritable(true); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileSetWritableOwner() throws IOException { + readWriteFile().toFile().setWritable(true, false); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createScannerFile() throws FileNotFoundException { + new Scanner(readFile().toFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createScannerFileWithCharset() throws IOException { + new Scanner(readFile().toFile(), StandardCharsets.UTF_8); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createScannerFileWithCharsetName() throws FileNotFoundException { + new Scanner(readFile().toFile(), "UTF-8"); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createFileOutputStreamString() throws IOException { + new FileOutputStream(readWriteFile().toString()).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createFileOutputStreamStringWithAppend() throws IOException { + new FileOutputStream(readWriteFile().toString(), false).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createFileOutputStreamFile() throws IOException { + new FileOutputStream(readWriteFile().toFile()).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createFileOutputStreamFileWithAppend() throws IOException { + new FileOutputStream(readWriteFile().toFile(), false).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void filesGetOwner() throws IOException { + Files.getOwner(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void filesProbeContentType() throws IOException { + Files.probeContentType(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void filesSetOwner() throws IOException { + UserPrincipal owner = EntitledActions.getFileOwner(readWriteFile()); + Files.setOwner(readWriteFile(), owner); // set to existing owner, just trying to execute the method + } + + private FileCheckActions() {} +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileStoreActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileStoreActions.java new file mode 100644 index 0000000000000..0c8026ea9fee4 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileStoreActions.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.attribute.FileStoreAttributeView; + +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.SERVER_ONLY; + +class FileStoreActions { + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void checkGetFileStoreAttributeView() throws IOException { + Files.getFileStore(FileCheckActions.readWriteFile()).getFileStoreAttributeView(FileStoreAttributeView.class); + } + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void checkGetAttribute() throws IOException { + try { + Files.getFileStore(FileCheckActions.readFile()).getAttribute("zfs:compression"); + } catch (UnsupportedOperationException e) { + // It's OK if the attribute view is not available or it does not support reading the attribute + } + } + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void checkGetBlockSize() throws IOException { + Files.getFileStore(FileCheckActions.readWriteFile()).getBlockSize(); + } + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void checkGetTotalSpace() throws IOException { + Files.getFileStore(FileCheckActions.readWriteFile()).getTotalSpace(); + } + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void checkGetUnallocatedSpace() throws IOException { + Files.getFileStore(FileCheckActions.readWriteFile()).getUnallocatedSpace(); + } + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void checkGetUsableSpace() throws IOException { + Files.getFileStore(FileCheckActions.readFile()).getUsableSpace(); + } + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void checkIsReadOnly() throws IOException { + Files.getFileStore(FileCheckActions.readFile()).isReadOnly(); + } + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void checkName() throws IOException { + Files.getFileStore(FileCheckActions.readFile()).name(); + } + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void checkType() throws IOException { + Files.getFileStore(FileCheckActions.readFile()).type(); + } + + private FileStoreActions() {} +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/ManageThreadsActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/ManageThreadsActions.java new file mode 100644 index 0000000000000..53f17faf06998 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/ManageThreadsActions.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import org.elasticsearch.core.SuppressForbidden; + +import java.util.concurrent.ForkJoinPool; +import java.util.concurrent.atomic.AtomicBoolean; + +import static java.lang.Thread.currentThread; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; + +@SuppressForbidden(reason = "testing entitlements") +@SuppressWarnings("unused") // used via reflection +class ManageThreadsActions { + private ManageThreadsActions() {} + + @EntitlementTest(expectedAccess = PLUGINS) + static void java_lang_Thread$start() throws InterruptedException { + AtomicBoolean threadRan = new AtomicBoolean(false); + Thread thread = new Thread(() -> threadRan.set(true), "test"); + thread.start(); + thread.join(); + assert threadRan.get(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void java_lang_Thread$setDaemon() { + new Thread().setDaemon(true); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void java_lang_ThreadGroup$setDaemon() { + currentThread().getThreadGroup().setDaemon(currentThread().getThreadGroup().isDaemon()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void java_util_concurrent_ForkJoinPool$setParallelism() { + ForkJoinPool.commonPool().setParallelism(ForkJoinPool.commonPool().getParallelism()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void java_lang_Thread$setName() { + currentThread().setName(currentThread().getName()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void java_lang_Thread$setPriority() { + currentThread().setPriority(currentThread().getPriority()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void java_lang_Thread$setUncaughtExceptionHandler() { + currentThread().setUncaughtExceptionHandler(currentThread().getUncaughtExceptionHandler()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void java_lang_ThreadGroup$setMaxPriority() { + currentThread().getThreadGroup().setMaxPriority(currentThread().getThreadGroup().getMaxPriority()); + } + +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main22/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NativeActions.java similarity index 85% rename from libs/entitlement/qa/entitlement-test-plugin/src/main22/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java rename to libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NativeActions.java index 0a69f7255a200..5079e0d38a001 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main22/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NativeActions.java @@ -30,9 +30,12 @@ import static java.lang.foreign.ValueLayout.ADDRESS; import static java.lang.foreign.ValueLayout.JAVA_LONG; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.SERVER_ONLY; -class VersionSpecificNativeChecks { +class NativeActions { + @EntitlementTest(expectedAccess = SERVER_ONLY) static void enableNativeAccess() throws Exception { ModuleLayer parent = ModuleLayer.boot(); @@ -49,16 +52,19 @@ static void enableNativeAccess() throws Exception { controller.enableNativeAccess(targetModule.get()); } + @EntitlementTest(expectedAccess = PLUGINS) static void addressLayoutWithTargetLayout() { AddressLayout addressLayout = ADDRESS.withoutTargetLayout(); addressLayout.withTargetLayout(MemoryLayout.sequenceLayout(Long.MAX_VALUE, ValueLayout.JAVA_BYTE)); } + @EntitlementTest(expectedAccess = PLUGINS) static void linkerDowncallHandle() { Linker linker = Linker.nativeLinker(); linker.downcallHandle(FunctionDescriptor.of(JAVA_LONG, ADDRESS)); } + @EntitlementTest(expectedAccess = PLUGINS) static void linkerDowncallHandleWithAddress() { Linker linker = Linker.nativeLinker(); linker.downcallHandle(linker.defaultLookup().find("strlen").get(), FunctionDescriptor.of(JAVA_LONG, ADDRESS)); @@ -68,12 +74,13 @@ static int callback() { return 0; } + @EntitlementTest(expectedAccess = PLUGINS) static void linkerUpcallStub() throws NoSuchMethodException { Linker linker = Linker.nativeLinker(); MethodHandle mh = null; try { - mh = MethodHandles.lookup().findStatic(VersionSpecificNativeChecks.class, "callback", MethodType.methodType(int.class)); + mh = MethodHandles.lookup().findStatic(NativeActions.class, "callback", MethodType.methodType(int.class)); } catch (IllegalAccessException e) { assert false; } @@ -82,24 +89,28 @@ static void linkerUpcallStub() throws NoSuchMethodException { linker.upcallStub(mh, callbackDescriptor, Arena.ofAuto()); } + @EntitlementTest(expectedAccess = PLUGINS) static void memorySegmentReinterpret() { Arena arena = Arena.ofAuto(); MemorySegment segment = arena.allocate(100); segment.reinterpret(50); } + @EntitlementTest(expectedAccess = PLUGINS) static void memorySegmentReinterpretWithCleanup() { Arena arena = Arena.ofAuto(); MemorySegment segment = arena.allocate(100); segment.reinterpret(Arena.ofAuto(), s -> {}); } + @EntitlementTest(expectedAccess = PLUGINS) static void memorySegmentReinterpretWithSizeAndCleanup() { Arena arena = Arena.ofAuto(); MemorySegment segment = arena.allocate(100); segment.reinterpret(50, Arena.ofAuto(), s -> {}); } + @EntitlementTest(expectedAccess = PLUGINS) static void symbolLookupWithPath() { try { SymbolLookup.libraryLookup(Path.of("/foo/bar/libFoo.so"), Arena.ofAuto()); @@ -108,6 +119,7 @@ static void symbolLookupWithPath() { } } + @EntitlementTest(expectedAccess = PLUGINS) static void symbolLookupWithName() { try { SymbolLookup.libraryLookup("foo", Arena.ofAuto()); diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NioFileSystemActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NioFileSystemActions.java new file mode 100644 index 0000000000000..9dc36bda840e5 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NioFileSystemActions.java @@ -0,0 +1,230 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.entitlement.qa.entitled.EntitledActions; + +import java.io.IOException; +import java.net.URI; +import java.nio.file.FileSystemException; +import java.nio.file.FileSystems; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.attribute.FileOwnerAttributeView; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.SERVER_ONLY; + +class NioFileSystemActions { + + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void createFileSystemProvider() { + new DummyImplementations.DummyFileSystemProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void checkNewFileSystemFromUri() throws IOException { + try (var fs = FileSystems.getDefault().provider().newFileSystem(URI.create("/dummy/path"), Map.of())) {} + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void checkNewFileSystemFromPath() { + var fs = FileSystems.getDefault().provider(); + try (var newFs = fs.newFileSystem(Path.of("/dummy/path"), Map.of())) {} catch (IOException e) { + // When entitled, we expect to throw IOException, as the path is not valid - we don't really want to create a FS + } + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkNewInputStream() throws IOException { + var fs = FileSystems.getDefault().provider(); + try (var is = fs.newInputStream(FileCheckActions.readFile())) {} + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkNewOutputStream() throws IOException { + var fs = FileSystems.getDefault().provider(); + try (var os = fs.newOutputStream(FileCheckActions.readWriteFile())) {} + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkNewFileChannelRead() throws IOException { + var fs = FileSystems.getDefault().provider(); + try (var fc = fs.newFileChannel(FileCheckActions.readFile(), Set.of(StandardOpenOption.READ))) {} + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkNewFileChannelWrite() throws IOException { + var fs = FileSystems.getDefault().provider(); + try (var fc = fs.newFileChannel(FileCheckActions.readWriteFile(), Set.of(StandardOpenOption.WRITE))) {} + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkNewAsynchronousFileChannel() throws IOException { + var fs = FileSystems.getDefault().provider(); + try ( + var fc = fs.newAsynchronousFileChannel( + FileCheckActions.readWriteFile(), + Set.of(StandardOpenOption.WRITE), + EsExecutors.DIRECT_EXECUTOR_SERVICE + ) + ) {} + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkNewByteChannel() throws IOException { + var fs = FileSystems.getDefault().provider(); + try (var bc = fs.newByteChannel(FileCheckActions.readWriteFile(), Set.of(StandardOpenOption.WRITE))) {} + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkNewDirectoryStream() throws IOException { + var fs = FileSystems.getDefault().provider(); + try (var bc = fs.newDirectoryStream(FileCheckActions.readDir(), entry -> false)) {} + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkCreateDirectory() throws IOException { + var fs = FileSystems.getDefault().provider(); + var directory = EntitledActions.createTempDirectoryForWrite(); + fs.createDirectory(directory.resolve("subdir")); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkCreateSymbolicLink() throws IOException { + var fs = FileSystems.getDefault().provider(); + var directory = EntitledActions.createTempDirectoryForWrite(); + try { + fs.createSymbolicLink(directory.resolve("link"), FileCheckActions.readFile()); + } catch (UnsupportedOperationException | FileSystemException e) { + // OK not to implement symbolic link in the filesystem + } + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkCreateLink() throws IOException { + var fs = FileSystems.getDefault().provider(); + var directory = EntitledActions.createTempDirectoryForWrite(); + try { + fs.createLink(directory.resolve("link"), FileCheckActions.readFile()); + } catch (UnsupportedOperationException | FileSystemException e) { + // OK not to implement symbolic link in the filesystem + } + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkDelete() throws IOException { + var fs = FileSystems.getDefault().provider(); + var file = EntitledActions.createTempFileForWrite(); + fs.delete(file); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkDeleteIfExists() throws IOException { + var fs = FileSystems.getDefault().provider(); + var file = EntitledActions.createTempFileForWrite(); + fs.deleteIfExists(file); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkReadSymbolicLink() throws IOException { + var fs = FileSystems.getDefault().provider(); + var link = EntitledActions.createTempSymbolicLink(); + fs.readSymbolicLink(link); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkCopy() throws IOException { + var fs = FileSystems.getDefault().provider(); + var directory = EntitledActions.createTempDirectoryForWrite(); + fs.copy(FileCheckActions.readFile(), directory.resolve("copied")); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkMove() throws IOException { + var fs = FileSystems.getDefault().provider(); + var directory = EntitledActions.createTempDirectoryForWrite(); + var file = EntitledActions.createTempFileForWrite(); + fs.move(file, directory.resolve("moved")); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkIsSameFile() throws IOException { + var fs = FileSystems.getDefault().provider(); + fs.isSameFile(FileCheckActions.readWriteFile(), FileCheckActions.readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkIsHidden() throws IOException { + var fs = FileSystems.getDefault().provider(); + fs.isHidden(FileCheckActions.readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkGetFileStore() throws IOException { + var fs = FileSystems.getDefault().provider(); + var file = EntitledActions.createTempFileForRead(); + var store = fs.getFileStore(file); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkCheckAccess() throws IOException { + var fs = FileSystems.getDefault().provider(); + fs.checkAccess(FileCheckActions.readFile()); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void checkGetFileAttributeView() { + var fs = FileSystems.getDefault().provider(); + fs.getFileAttributeView(FileCheckActions.readFile(), FileOwnerAttributeView.class); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkReadAttributesWithClass() throws IOException { + var fs = FileSystems.getDefault().provider(); + fs.readAttributes(FileCheckActions.readFile(), BasicFileAttributes.class); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkReadAttributesWithString() throws IOException { + var fs = FileSystems.getDefault().provider(); + fs.readAttributes(FileCheckActions.readFile(), "*"); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkReadAttributesIfExists() throws IOException { + var fs = FileSystems.getDefault().provider(); + fs.readAttributesIfExists(FileCheckActions.readFile(), BasicFileAttributes.class); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkSetAttribute() throws IOException { + var fs = FileSystems.getDefault().provider(); + var file = EntitledActions.createTempFileForWrite(); + try { + fs.setAttribute(file, "dos:hidden", true); + } catch (UnsupportedOperationException | IllegalArgumentException | FileSystemException e) { + // OK if the file does not have/does not support the attribute + } + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkExists() { + var fs = FileSystems.getDefault().provider(); + fs.exists(FileCheckActions.readFile()); + } + + private NioFileSystemActions() {} +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java index c2b6478e561a8..7e8adc4736684 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java @@ -13,18 +13,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyBreakIteratorProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyCalendarDataProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyCalendarNameProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyCollatorProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyCurrencyNameProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyDateFormatProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyDateFormatSymbolsProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyDecimalFormatSymbolsProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyLocaleNameProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyLocaleServiceProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyNumberFormatProvider; -import org.elasticsearch.entitlement.qa.test.DummyImplementations.DummyTimeZoneNameProvider; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.rest.BaseRestHandler; @@ -33,6 +21,9 @@ import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.HttpURLConnection; @@ -51,9 +42,12 @@ import java.net.URLStreamHandler; import java.net.spi.URLStreamHandlerProvider; import java.security.NoSuchAlgorithmException; +import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; +import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -61,6 +55,7 @@ import javax.net.ssl.SSLContext; import static java.util.Map.entry; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; import static org.elasticsearch.entitlement.qa.test.RestEntitlementsCheckAction.CheckAction.alwaysDenied; import static org.elasticsearch.entitlement.qa.test.RestEntitlementsCheckAction.CheckAction.deniedToPlugins; import static org.elasticsearch.entitlement.qa.test.RestEntitlementsCheckAction.CheckAction.forPlugins; @@ -69,7 +64,6 @@ @SuppressWarnings("unused") public class RestEntitlementsCheckAction extends BaseRestHandler { private static final Logger logger = LogManager.getLogger(RestEntitlementsCheckAction.class); - public static final Thread NO_OP_SHUTDOWN_HOOK = new Thread(() -> {}, "Shutdown hook for testing"); record CheckAction(CheckedRunnable action, boolean isAlwaysDeniedToPlugins, Integer fromJavaVersion) { /** @@ -88,135 +82,159 @@ static CheckAction alwaysDenied(CheckedRunnable action) { } } - private static final Map checkActions = Stream.>of( - entry("runtime_exit", deniedToPlugins(RestEntitlementsCheckAction::runtimeExit)), - entry("runtime_halt", deniedToPlugins(RestEntitlementsCheckAction::runtimeHalt)), - entry("system_exit", deniedToPlugins(RestEntitlementsCheckAction::systemExit)), - entry("create_classloader", forPlugins(RestEntitlementsCheckAction::createClassLoader)), - entry("processBuilder_start", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_start)), - entry("processBuilder_startPipeline", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_startPipeline)), - entry("set_https_connection_properties", forPlugins(RestEntitlementsCheckAction::setHttpsConnectionProperties)), - entry("set_default_ssl_socket_factory", alwaysDenied(RestEntitlementsCheckAction::setDefaultSSLSocketFactory)), - entry("set_default_hostname_verifier", alwaysDenied(RestEntitlementsCheckAction::setDefaultHostnameVerifier)), - entry("set_default_ssl_context", alwaysDenied(RestEntitlementsCheckAction::setDefaultSSLContext)), - entry("system_setIn", alwaysDenied(RestEntitlementsCheckAction::system$$setIn)), - entry("system_setOut", alwaysDenied(RestEntitlementsCheckAction::system$$setOut)), - entry("system_setErr", alwaysDenied(RestEntitlementsCheckAction::system$$setErr)), - entry("runtime_addShutdownHook", alwaysDenied(RestEntitlementsCheckAction::runtime$addShutdownHook)), - entry("runtime_removeShutdownHook", alwaysDenied(RestEntitlementsCheckAction::runtime$$removeShutdownHook)), - entry( - "thread_setDefaultUncaughtExceptionHandler", - alwaysDenied(RestEntitlementsCheckAction::thread$$setDefaultUncaughtExceptionHandler) + private static final Map checkActions = Stream.of( + Stream.>of( + entry("create_classloader", forPlugins(RestEntitlementsCheckAction::createClassLoader)), + entry("processBuilder_start", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_start)), + entry("processBuilder_startPipeline", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_startPipeline)), + entry("set_https_connection_properties", forPlugins(RestEntitlementsCheckAction::setHttpsConnectionProperties)), + entry("set_default_ssl_socket_factory", alwaysDenied(RestEntitlementsCheckAction::setDefaultSSLSocketFactory)), + entry("set_default_hostname_verifier", alwaysDenied(RestEntitlementsCheckAction::setDefaultHostnameVerifier)), + entry("set_default_ssl_context", alwaysDenied(RestEntitlementsCheckAction::setDefaultSSLContext)), + entry( + "thread_setDefaultUncaughtExceptionHandler", + alwaysDenied(RestEntitlementsCheckAction::thread$$setDefaultUncaughtExceptionHandler) + ), + entry("logManager", alwaysDenied(RestEntitlementsCheckAction::logManager$)), + + entry("locale_setDefault", alwaysDenied(WritePropertiesCheckActions::setDefaultLocale)), + entry("locale_setDefaultForCategory", alwaysDenied(WritePropertiesCheckActions::setDefaultLocaleForCategory)), + entry("timeZone_setDefault", alwaysDenied(WritePropertiesCheckActions::setDefaultTimeZone)), + + entry("system_setProperty", forPlugins(WritePropertiesCheckActions::setSystemProperty)), + entry("system_clearProperty", forPlugins(WritePropertiesCheckActions::clearSystemProperty)), + entry("system_setSystemProperties", alwaysDenied(WritePropertiesCheckActions::setSystemProperties)), + + // This group is a bit nasty: if entitlements don't prevent these, then networking is + // irreparably borked for the remainder of the test run. + entry( + "datagramSocket_setDatagramSocketImplFactory", + alwaysDenied(RestEntitlementsCheckAction::datagramSocket$$setDatagramSocketImplFactory) + ), + entry("httpURLConnection_setFollowRedirects", alwaysDenied(RestEntitlementsCheckAction::httpURLConnection$$setFollowRedirects)), + entry("serverSocket_setSocketFactory", alwaysDenied(RestEntitlementsCheckAction::serverSocket$$setSocketFactory)), + entry("socket_setSocketImplFactory", alwaysDenied(RestEntitlementsCheckAction::socket$$setSocketImplFactory)), + entry("url_setURLStreamHandlerFactory", alwaysDenied(RestEntitlementsCheckAction::url$$setURLStreamHandlerFactory)), + entry("urlConnection_setFileNameMap", alwaysDenied(RestEntitlementsCheckAction::urlConnection$$setFileNameMap)), + entry( + "urlConnection_setContentHandlerFactory", + alwaysDenied(RestEntitlementsCheckAction::urlConnection$$setContentHandlerFactory) + ), + + entry("proxySelector_setDefault", alwaysDenied(RestEntitlementsCheckAction::setDefaultProxySelector)), + entry("responseCache_setDefault", alwaysDenied(RestEntitlementsCheckAction::setDefaultResponseCache)), + entry( + "createInetAddressResolverProvider", + new CheckAction(VersionSpecificNetworkChecks::createInetAddressResolverProvider, true, 18) + ), + entry("createURLStreamHandlerProvider", alwaysDenied(RestEntitlementsCheckAction::createURLStreamHandlerProvider)), + entry("createURLWithURLStreamHandler", alwaysDenied(RestEntitlementsCheckAction::createURLWithURLStreamHandler)), + entry("createURLWithURLStreamHandler2", alwaysDenied(RestEntitlementsCheckAction::createURLWithURLStreamHandler2)), + entry("datagram_socket_bind", forPlugins(RestEntitlementsCheckAction::bindDatagramSocket)), + entry("datagram_socket_connect", forPlugins(RestEntitlementsCheckAction::connectDatagramSocket)), + entry("datagram_socket_send", forPlugins(RestEntitlementsCheckAction::sendDatagramSocket)), + entry("datagram_socket_receive", forPlugins(RestEntitlementsCheckAction::receiveDatagramSocket)), + entry("datagram_socket_join_group", forPlugins(RestEntitlementsCheckAction::joinGroupDatagramSocket)), + entry("datagram_socket_leave_group", forPlugins(RestEntitlementsCheckAction::leaveGroupDatagramSocket)), + + entry("create_socket_with_proxy", forPlugins(NetworkAccessCheckActions::createSocketWithProxy)), + entry("socket_bind", forPlugins(NetworkAccessCheckActions::socketBind)), + entry("socket_connect", forPlugins(NetworkAccessCheckActions::socketConnect)), + entry("server_socket_bind", forPlugins(NetworkAccessCheckActions::serverSocketBind)), + entry("server_socket_accept", forPlugins(NetworkAccessCheckActions::serverSocketAccept)), + + entry("url_open_connection_proxy", forPlugins(NetworkAccessCheckActions::urlOpenConnectionWithProxy)), + entry("http_client_send", forPlugins(VersionSpecificNetworkChecks::httpClientSend)), + entry("http_client_send_async", forPlugins(VersionSpecificNetworkChecks::httpClientSendAsync)), + entry("create_ldap_cert_store", forPlugins(NetworkAccessCheckActions::createLDAPCertStore)), + + entry("server_socket_channel_bind", forPlugins(NetworkAccessCheckActions::serverSocketChannelBind)), + entry("server_socket_channel_bind_backlog", forPlugins(NetworkAccessCheckActions::serverSocketChannelBindWithBacklog)), + entry("server_socket_channel_accept", forPlugins(NetworkAccessCheckActions::serverSocketChannelAccept)), + entry("asynchronous_server_socket_channel_bind", forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelBind)), + entry( + "asynchronous_server_socket_channel_bind_backlog", + forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelBindWithBacklog) + ), + entry( + "asynchronous_server_socket_channel_accept", + forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelAccept) + ), + entry( + "asynchronous_server_socket_channel_accept_with_handler", + forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelAcceptWithHandler) + ), + entry("socket_channel_bind", forPlugins(NetworkAccessCheckActions::socketChannelBind)), + entry("socket_channel_connect", forPlugins(NetworkAccessCheckActions::socketChannelConnect)), + entry("asynchronous_socket_channel_bind", forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelBind)), + entry("asynchronous_socket_channel_connect", forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelConnect)), + entry( + "asynchronous_socket_channel_connect_with_completion", + forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelConnectWithCompletion) + ), + entry("datagram_channel_bind", forPlugins(NetworkAccessCheckActions::datagramChannelBind)), + entry("datagram_channel_connect", forPlugins(NetworkAccessCheckActions::datagramChannelConnect)), + entry("datagram_channel_send", forPlugins(NetworkAccessCheckActions::datagramChannelSend)), + entry("datagram_channel_receive", forPlugins(NetworkAccessCheckActions::datagramChannelReceive)), + + entry("runtime_load", forPlugins(LoadNativeLibrariesCheckActions::runtimeLoad)), + entry("runtime_load_library", forPlugins(LoadNativeLibrariesCheckActions::runtimeLoadLibrary)), + entry("system_load", forPlugins(LoadNativeLibrariesCheckActions::systemLoad)), + entry("system_load_library", forPlugins(LoadNativeLibrariesCheckActions::systemLoadLibrary)) + + // MAINTENANCE NOTE: Please don't add any more entries to this map. + // Put new tests into their own "Actions" class using the @EntitlementTest annotation. ), - entry("localeServiceProvider", alwaysDenied(RestEntitlementsCheckAction::localeServiceProvider$)), - entry("breakIteratorProvider", alwaysDenied(RestEntitlementsCheckAction::breakIteratorProvider$)), - entry("collatorProvider", alwaysDenied(RestEntitlementsCheckAction::collatorProvider$)), - entry("dateFormatProvider", alwaysDenied(RestEntitlementsCheckAction::dateFormatProvider$)), - entry("dateFormatSymbolsProvider", alwaysDenied(RestEntitlementsCheckAction::dateFormatSymbolsProvider$)), - entry("decimalFormatSymbolsProvider", alwaysDenied(RestEntitlementsCheckAction::decimalFormatSymbolsProvider$)), - entry("numberFormatProvider", alwaysDenied(RestEntitlementsCheckAction::numberFormatProvider$)), - entry("calendarDataProvider", alwaysDenied(RestEntitlementsCheckAction::calendarDataProvider$)), - entry("calendarNameProvider", alwaysDenied(RestEntitlementsCheckAction::calendarNameProvider$)), - entry("currencyNameProvider", alwaysDenied(RestEntitlementsCheckAction::currencyNameProvider$)), - entry("localeNameProvider", alwaysDenied(RestEntitlementsCheckAction::localeNameProvider$)), - entry("timeZoneNameProvider", alwaysDenied(RestEntitlementsCheckAction::timeZoneNameProvider$)), - entry("logManager", alwaysDenied(RestEntitlementsCheckAction::logManager$)), - - entry("locale_setDefault", alwaysDenied(WritePropertiesCheckActions::setDefaultLocale)), - entry("locale_setDefaultForCategory", alwaysDenied(WritePropertiesCheckActions::setDefaultLocaleForCategory)), - entry("timeZone_setDefault", alwaysDenied(WritePropertiesCheckActions::setDefaultTimeZone)), - - entry("system_setProperty", forPlugins(WritePropertiesCheckActions::setSystemProperty)), - entry("system_clearProperty", forPlugins(WritePropertiesCheckActions::clearSystemProperty)), - entry("system_setSystemProperties", alwaysDenied(WritePropertiesCheckActions::setSystemProperties)), - - // This group is a bit nasty: if entitlements don't prevent these, then networking is - // irreparably borked for the remainder of the test run. - entry( - "datagramSocket_setDatagramSocketImplFactory", - alwaysDenied(RestEntitlementsCheckAction::datagramSocket$$setDatagramSocketImplFactory) - ), - entry("httpURLConnection_setFollowRedirects", alwaysDenied(RestEntitlementsCheckAction::httpURLConnection$$setFollowRedirects)), - entry("serverSocket_setSocketFactory", alwaysDenied(RestEntitlementsCheckAction::serverSocket$$setSocketFactory)), - entry("socket_setSocketImplFactory", alwaysDenied(RestEntitlementsCheckAction::socket$$setSocketImplFactory)), - entry("url_setURLStreamHandlerFactory", alwaysDenied(RestEntitlementsCheckAction::url$$setURLStreamHandlerFactory)), - entry("urlConnection_setFileNameMap", alwaysDenied(RestEntitlementsCheckAction::urlConnection$$setFileNameMap)), - entry("urlConnection_setContentHandlerFactory", alwaysDenied(RestEntitlementsCheckAction::urlConnection$$setContentHandlerFactory)), - - entry("proxySelector_setDefault", alwaysDenied(RestEntitlementsCheckAction::setDefaultProxySelector)), - entry("responseCache_setDefault", alwaysDenied(RestEntitlementsCheckAction::setDefaultResponseCache)), - entry( - "createInetAddressResolverProvider", - new CheckAction(VersionSpecificNetworkChecks::createInetAddressResolverProvider, true, 18) - ), - entry("createURLStreamHandlerProvider", alwaysDenied(RestEntitlementsCheckAction::createURLStreamHandlerProvider)), - entry("createURLWithURLStreamHandler", alwaysDenied(RestEntitlementsCheckAction::createURLWithURLStreamHandler)), - entry("createURLWithURLStreamHandler2", alwaysDenied(RestEntitlementsCheckAction::createURLWithURLStreamHandler2)), - entry("datagram_socket_bind", forPlugins(RestEntitlementsCheckAction::bindDatagramSocket)), - entry("datagram_socket_connect", forPlugins(RestEntitlementsCheckAction::connectDatagramSocket)), - entry("datagram_socket_send", forPlugins(RestEntitlementsCheckAction::sendDatagramSocket)), - entry("datagram_socket_receive", forPlugins(RestEntitlementsCheckAction::receiveDatagramSocket)), - entry("datagram_socket_join_group", forPlugins(RestEntitlementsCheckAction::joinGroupDatagramSocket)), - entry("datagram_socket_leave_group", forPlugins(RestEntitlementsCheckAction::leaveGroupDatagramSocket)), - - entry("create_socket_with_proxy", forPlugins(NetworkAccessCheckActions::createSocketWithProxy)), - entry("socket_bind", forPlugins(NetworkAccessCheckActions::socketBind)), - entry("socket_connect", forPlugins(NetworkAccessCheckActions::socketConnect)), - entry("server_socket_bind", forPlugins(NetworkAccessCheckActions::serverSocketBind)), - entry("server_socket_accept", forPlugins(NetworkAccessCheckActions::serverSocketAccept)), - - entry("url_open_connection_proxy", forPlugins(NetworkAccessCheckActions::urlOpenConnectionWithProxy)), - entry("http_client_send", forPlugins(VersionSpecificNetworkChecks::httpClientSend)), - entry("http_client_send_async", forPlugins(VersionSpecificNetworkChecks::httpClientSendAsync)), - entry("create_ldap_cert_store", forPlugins(NetworkAccessCheckActions::createLDAPCertStore)), - - entry("server_socket_channel_bind", forPlugins(NetworkAccessCheckActions::serverSocketChannelBind)), - entry("server_socket_channel_bind_backlog", forPlugins(NetworkAccessCheckActions::serverSocketChannelBindWithBacklog)), - entry("server_socket_channel_accept", forPlugins(NetworkAccessCheckActions::serverSocketChannelAccept)), - entry("asynchronous_server_socket_channel_bind", forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelBind)), - entry( - "asynchronous_server_socket_channel_bind_backlog", - forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelBindWithBacklog) - ), - entry("asynchronous_server_socket_channel_accept", forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelAccept)), - entry( - "asynchronous_server_socket_channel_accept_with_handler", - forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelAcceptWithHandler) - ), - entry("socket_channel_bind", forPlugins(NetworkAccessCheckActions::socketChannelBind)), - entry("socket_channel_connect", forPlugins(NetworkAccessCheckActions::socketChannelConnect)), - entry("asynchronous_socket_channel_bind", forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelBind)), - entry("asynchronous_socket_channel_connect", forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelConnect)), - entry( - "asynchronous_socket_channel_connect_with_completion", - forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelConnectWithCompletion) - ), - entry("datagram_channel_bind", forPlugins(NetworkAccessCheckActions::datagramChannelBind)), - entry("datagram_channel_connect", forPlugins(NetworkAccessCheckActions::datagramChannelConnect)), - entry("datagram_channel_send", forPlugins(NetworkAccessCheckActions::datagramChannelSend)), - entry("datagram_channel_receive", forPlugins(NetworkAccessCheckActions::datagramChannelReceive)), - - entry("runtime_load", forPlugins(LoadNativeLibrariesCheckActions::runtimeLoad)), - entry("runtime_load_library", forPlugins(LoadNativeLibrariesCheckActions::runtimeLoadLibrary)), - entry("system_load", forPlugins(LoadNativeLibrariesCheckActions::systemLoad)), - entry("system_load_library", forPlugins(LoadNativeLibrariesCheckActions::systemLoadLibrary)), - - entry("enable_native_access", new CheckAction(VersionSpecificNativeChecks::enableNativeAccess, false, 22)), - entry("address_target_layout", new CheckAction(VersionSpecificNativeChecks::addressLayoutWithTargetLayout, false, 22)), - entry("donwncall_handle", new CheckAction(VersionSpecificNativeChecks::linkerDowncallHandle, false, 22)), - entry("donwncall_handle_with_address", new CheckAction(VersionSpecificNativeChecks::linkerDowncallHandleWithAddress, false, 22)), - entry("upcall_stub", new CheckAction(VersionSpecificNativeChecks::linkerUpcallStub, false, 22)), - entry("reinterpret", new CheckAction(VersionSpecificNativeChecks::memorySegmentReinterpret, false, 22)), - entry("reinterpret_cleanup", new CheckAction(VersionSpecificNativeChecks::memorySegmentReinterpretWithCleanup, false, 22)), - entry( - "reinterpret_size_cleanup", - new CheckAction(VersionSpecificNativeChecks::memorySegmentReinterpretWithSizeAndCleanup, false, 22) - ), - entry("symbol_lookup_name", new CheckAction(VersionSpecificNativeChecks::symbolLookupWithName, false, 22)), - entry("symbol_lookup_path", new CheckAction(VersionSpecificNativeChecks::symbolLookupWithPath, false, 22)) + getTestEntries(FileCheckActions.class), + getTestEntries(FileStoreActions.class), + getTestEntries(ManageThreadsActions.class), + getTestEntries(NativeActions.class), + getTestEntries(NioFileSystemActions.class), + getTestEntries(SpiActions.class), + getTestEntries(SystemActions.class) ) + .flatMap(Function.identity()) .filter(entry -> entry.getValue().fromJavaVersion() == null || Runtime.version().feature() >= entry.getValue().fromJavaVersion()) - .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); + .collect(Collectors.toUnmodifiableMap(Entry::getKey, Entry::getValue)); + + @SuppressForbidden(reason = "Need package private methods so we don't have to make them all public") + private static Method[] getDeclaredMethods(Class clazz) { + return clazz.getDeclaredMethods(); + } + + private static Stream> getTestEntries(Class actionsClass) { + List> entries = new ArrayList<>(); + for (var method : getDeclaredMethods(actionsClass)) { + var testAnnotation = method.getAnnotation(EntitlementTest.class); + if (testAnnotation == null) { + continue; + } + if (Modifier.isStatic(method.getModifiers()) == false) { + throw new AssertionError("Entitlement test method [" + method + "] must be static"); + } + if (method.getParameterTypes().length != 0) { + throw new AssertionError("Entitlement test method [" + method + "] must not have parameters"); + } + + CheckedRunnable runnable = () -> { + try { + method.invoke(null); + } catch (IllegalAccessException e) { + throw new AssertionError(e); + } catch (InvocationTargetException e) { + if (e.getCause() instanceof Exception exc) { + throw exc; + } else { + throw new AssertionError(e); + } + } + }; + boolean deniedToPlugins = testAnnotation.expectedAccess() != PLUGINS; + Integer fromJavaVersion = testAnnotation.fromJavaVersion() == -1 ? null : testAnnotation.fromJavaVersion(); + entries.add(entry(method.getName(), new CheckAction(runnable, deniedToPlugins, fromJavaVersion))); + } + return entries.stream(); + } private static void createURLStreamHandlerProvider() { var x = new URLStreamHandlerProvider() { @@ -267,21 +285,6 @@ private static void setDefaultSSLSocketFactory() { HttpsURLConnection.setDefaultSSLSocketFactory(new DummyImplementations.DummySSLSocketFactory()); } - @SuppressForbidden(reason = "Specifically testing Runtime.exit") - private static void runtimeExit() { - Runtime.getRuntime().exit(123); - } - - @SuppressForbidden(reason = "Specifically testing Runtime.halt") - private static void runtimeHalt() { - Runtime.getRuntime().halt(123); - } - - @SuppressForbidden(reason = "Specifically testing System.exit") - private static void systemExit() { - System.exit(123); - } - private static void createClassLoader() throws IOException { try (var classLoader = new URLClassLoader("test", new URL[0], RestEntitlementsCheckAction.class.getClassLoader())) { logger.info("Created URLClassLoader [{}]", classLoader.getName()); @@ -300,80 +303,10 @@ private static void setHttpsConnectionProperties() { new DummyImplementations.DummyHttpsURLConnection().setSSLSocketFactory(new DummyImplementations.DummySSLSocketFactory()); } - private static void system$$setIn() { - System.setIn(System.in); - } - - @SuppressForbidden(reason = "This should be a no-op so we don't interfere with system streams") - private static void system$$setOut() { - System.setOut(System.out); - } - - @SuppressForbidden(reason = "This should be a no-op so we don't interfere with system streams") - private static void system$$setErr() { - System.setErr(System.err); - } - - private static void runtime$addShutdownHook() { - Runtime.getRuntime().addShutdownHook(NO_OP_SHUTDOWN_HOOK); - } - - private static void runtime$$removeShutdownHook() { - Runtime.getRuntime().removeShutdownHook(NO_OP_SHUTDOWN_HOOK); - } - private static void thread$$setDefaultUncaughtExceptionHandler() { Thread.setDefaultUncaughtExceptionHandler(Thread.getDefaultUncaughtExceptionHandler()); } - private static void localeServiceProvider$() { - new DummyLocaleServiceProvider(); - } - - private static void breakIteratorProvider$() { - new DummyBreakIteratorProvider(); - } - - private static void collatorProvider$() { - new DummyCollatorProvider(); - } - - private static void dateFormatProvider$() { - new DummyDateFormatProvider(); - } - - private static void dateFormatSymbolsProvider$() { - new DummyDateFormatSymbolsProvider(); - } - - private static void decimalFormatSymbolsProvider$() { - new DummyDecimalFormatSymbolsProvider(); - } - - private static void numberFormatProvider$() { - new DummyNumberFormatProvider(); - } - - private static void calendarDataProvider$() { - new DummyCalendarDataProvider(); - } - - private static void calendarNameProvider$() { - new DummyCalendarNameProvider(); - } - - private static void currencyNameProvider$() { - new DummyCurrencyNameProvider(); - } - - private static void localeNameProvider$() { - new DummyLocaleNameProvider(); - } - - private static void timeZoneNameProvider$() { - new DummyTimeZoneNameProvider(); - } - private static void logManager$() { new java.util.logging.LogManager() { }; @@ -462,7 +395,7 @@ public static Set getCheckActionsAllowedInPlugins() { return checkActions.entrySet() .stream() .filter(kv -> kv.getValue().isAlwaysDeniedToPlugins() == false) - .map(Map.Entry::getKey) + .map(Entry::getKey) .collect(Collectors.toSet()); } @@ -495,7 +428,9 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli return channel -> { logger.info("Calling check action [{}]", actionName); checkAction.action().run(); + logger.debug("Check action [{}] returned", actionName); channel.sendResponse(new RestResponse(RestStatus.OK, Strings.format("Succesfully executed action [%s]", actionName))); }; } + } diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SpiActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SpiActions.java new file mode 100644 index 0000000000000..a335964c6fa81 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SpiActions.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import java.io.IOException; +import java.nio.channels.Channel; +import java.nio.channels.spi.SelectorProvider; + +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED; + +class SpiActions { + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createBreakIteratorProvider() { + new DummyImplementations.DummyBreakIteratorProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createCollatorProvider() { + new DummyImplementations.DummyCollatorProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createDateFormatProvider() { + new DummyImplementations.DummyDateFormatProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createDateFormatSymbolsProvider() { + new DummyImplementations.DummyDateFormatSymbolsProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createDecimalFormatSymbolsProvider() { + new DummyImplementations.DummyDecimalFormatSymbolsProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createNumberFormatProvider() { + new DummyImplementations.DummyNumberFormatProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createCalendarDataProvider() { + new DummyImplementations.DummyCalendarDataProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createCalendarNameProvider() { + new DummyImplementations.DummyCalendarNameProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createCurrencyNameProvider() { + new DummyImplementations.DummyCurrencyNameProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createLocaleNameProvider() { + new DummyImplementations.DummyLocaleNameProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createTimeZoneNameProvider() { + new DummyImplementations.DummyTimeZoneNameProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createLocaleServiceProvider() { + new DummyImplementations.DummyLocaleServiceProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void getInheritedChannel() throws IOException { + Channel channel = null; + try { + channel = SelectorProvider.provider().inheritedChannel(); + } finally { + if (channel != null) { + channel.close(); + } + } + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createSelectorProvider() { + new DummyImplementations.DummySelectorProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createAsynchronousChannelProvider() { + new DummyImplementations.DummyAsynchronousChannelProvider(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createCharsetProvider() { + new DummyImplementations.DummyCharsetProvider(); + } + + private SpiActions() {} +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SystemActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SystemActions.java new file mode 100644 index 0000000000000..4df1b1dd26d61 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/SystemActions.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import org.elasticsearch.core.SuppressForbidden; + +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.SERVER_ONLY; + +class SystemActions { + + @SuppressForbidden(reason = "Specifically testing Runtime.exit") + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void runtimeExit() { + Runtime.getRuntime().exit(123); + } + + @SuppressForbidden(reason = "Specifically testing Runtime.halt") + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void runtimeHalt() { + Runtime.getRuntime().halt(123); + } + + @SuppressForbidden(reason = "Specifically testing System.exit") + @EntitlementTest(expectedAccess = SERVER_ONLY) + static void systemExit() { + System.exit(123); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void systemSetIn() { + System.setIn(System.in); + } + + @SuppressForbidden(reason = "This should be a no-op so we don't interfere with system streams") + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void systemSetOut() { + System.setOut(System.out); + } + + @SuppressForbidden(reason = "This should be a no-op so we don't interfere with system streams") + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void systemSetErr() { + System.setErr(System.err); + } + + private static final Thread NO_OP_SHUTDOWN_HOOK = new Thread(() -> {}, "Shutdown hook for testing"); + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void runtimeAddShutdownHook() { + Runtime.getRuntime().addShutdownHook(NO_OP_SHUTDOWN_HOOK); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void runtimeRemoveShutdownHook() { + Runtime.getRuntime().removeShutdownHook(NO_OP_SHUTDOWN_HOOK); + } + + private SystemActions() {} +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java deleted file mode 100644 index cb84c9bd9042d..0000000000000 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.entitlement.qa.test; - -class VersionSpecificNativeChecks { - - static void enableNativeAccess() throws Exception {} - - static void addressLayoutWithTargetLayout() {} - - static void linkerDowncallHandle() {} - - static void linkerDowncallHandleWithAddress() {} - - static void linkerUpcallStub() throws NoSuchMethodException {} - - static void memorySegmentReinterpret() {} - - static void memorySegmentReinterpretWithCleanup() {} - - static void memorySegmentReinterpretWithSizeAndCleanup() {} - - static void symbolLookupWithPath() {} - - static void symbolLookupWithName() {} -} diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/AbstractEntitlementsIT.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/AbstractEntitlementsIT.java index b770b4915a317..bd88c23fc5b91 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/AbstractEntitlementsIT.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/AbstractEntitlementsIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.entitlement.qa.EntitlementsTestRule.PolicyBuilder; import org.elasticsearch.test.rest.ESRestTestCase; import java.io.IOException; @@ -22,18 +23,30 @@ public abstract class AbstractEntitlementsIT extends ESRestTestCase { - static final EntitlementsTestRule.PolicyBuilder ALLOWED_TEST_ENTITLEMENTS = (builder, tempDir) -> { + static final PolicyBuilder ALLOWED_TEST_ENTITLEMENTS = (builder, tempDir) -> { builder.value("create_class_loader"); builder.value("set_https_connection_properties"); builder.value("inbound_network"); builder.value("outbound_network"); builder.value("load_native_libraries"); + builder.value("manage_threads"); builder.value( Map.of( "write_system_properties", Map.of("properties", List.of("es.entitlements.checkSetSystemProperty", "es.entitlements.checkClearSystemProperty")) ) ); + builder.value( + Map.of( + "files", + List.of( + Map.of("path", tempDir.resolve("read_dir"), "mode", "read"), + Map.of("path", tempDir.resolve("read_write_dir"), "mode", "read_write"), + Map.of("path", tempDir.resolve("read_file"), "mode", "read"), + Map.of("path", tempDir.resolve("read_write_file"), "mode", "read_write") + ) + ) + ); }; private final String actionName; diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java index 8c9dcb6dd0efe..9dc1028148a31 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java @@ -15,6 +15,7 @@ import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.yaml.YamlXContent; +import org.junit.rules.ExternalResource; import org.junit.rules.RuleChain; import org.junit.rules.TemporaryFolder; import org.junit.rules.TestRule; @@ -23,10 +24,29 @@ import java.io.IOException; import java.io.UncheckedIOException; +import java.nio.file.Files; import java.nio.file.Path; +import java.util.List; +import java.util.Map; class EntitlementsTestRule implements TestRule { + // entitlements that test methods may use, see EntitledActions + private static final PolicyBuilder ENTITLED_POLICY = (builder, tempDir) -> { + builder.value("manage_threads"); + builder.value( + Map.of( + "files", + List.of( + Map.of("path", tempDir.resolve("read_dir"), "mode", "read"), + Map.of("path", tempDir.resolve("read_write_dir"), "mode", "read_write"), + Map.of("path", tempDir.resolve("read_file"), "mode", "read"), + Map.of("path", tempDir.resolve("read_write_file"), "mode", "read_write") + ) + ) + ); + }; + interface PolicyBuilder { void build(XContentBuilder builder, Path tempDir) throws IOException; } @@ -38,14 +58,26 @@ interface PolicyBuilder { @SuppressWarnings("this-escape") EntitlementsTestRule(boolean modular, PolicyBuilder policyBuilder) { testDir = new TemporaryFolder(); + var tempDirSetup = new ExternalResource() { + @Override + protected void before() throws Throwable { + Path testPath = testDir.getRoot().toPath(); + Files.createDirectory(testPath.resolve("read_dir")); + Files.createDirectory(testPath.resolve("read_write_dir")); + Files.writeString(testPath.resolve("read_file"), ""); + Files.writeString(testPath.resolve("read_write_file"), ""); + } + }; cluster = ElasticsearchCluster.local() - .module("entitled") + .module("entitled", spec -> buildEntitlements(spec, "org.elasticsearch.entitlement.qa.entitled", ENTITLED_POLICY)) .module("entitlement-test-plugin", spec -> setupEntitlements(spec, modular, policyBuilder)) .systemProperty("es.entitlements.enabled", "true") .systemProperty("es.entitlements.testdir", () -> testDir.getRoot().getAbsolutePath()) .setting("xpack.security.enabled", "false") + // Logs in libs/entitlement/qa/build/test-results/javaRestTest/TEST-org.elasticsearch.entitlement.qa.EntitlementsXXX.xml + // .setting("logger.org.elasticsearch.entitlement", "DEBUG") .build(); - ruleChain = RuleChain.outerRule(testDir).around(cluster); + ruleChain = RuleChain.outerRule(testDir).around(tempDirSetup).around(cluster); } @Override @@ -53,28 +85,30 @@ public Statement apply(Statement statement, Description description) { return ruleChain.apply(statement, description); } + private void buildEntitlements(PluginInstallSpec spec, String moduleName, PolicyBuilder policyBuilder) { + spec.withEntitlementsOverride(old -> { + try (var builder = YamlXContent.contentBuilder()) { + builder.startObject(); + builder.field(moduleName); + builder.startArray(); + + policyBuilder.build(builder, testDir.getRoot().toPath()); + builder.endArray(); + builder.endObject(); + + String policy = Strings.toString(builder); + System.out.println("Using entitlement policy for module " + moduleName + ":\n" + policy); + return Resource.fromString(policy); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }); + } + private void setupEntitlements(PluginInstallSpec spec, boolean modular, PolicyBuilder policyBuilder) { String moduleName = modular ? "org.elasticsearch.entitlement.qa.test" : "ALL-UNNAMED"; if (policyBuilder != null) { - spec.withEntitlementsOverride(old -> { - try { - try (var builder = YamlXContent.contentBuilder()) { - builder.startObject(); - builder.field(moduleName); - builder.startArray(); - policyBuilder.build(builder, testDir.getRoot().toPath()); - builder.endArray(); - builder.endObject(); - - String policy = Strings.toString(builder); - System.out.println("Using entitlement policy:\n" + policy); - return Resource.fromString(policy); - } - - } catch (IOException e) { - throw new UncheckedIOException(e); - } - }); + buildEntitlements(spec, moduleName, policyBuilder); } if (modular == false) { diff --git a/libs/entitlement/src/main/java/module-info.java b/libs/entitlement/src/main/java/module-info.java index c0959f212558a..5c8441bcecb9c 100644 --- a/libs/entitlement/src/main/java/module-info.java +++ b/libs/entitlement/src/main/java/module-info.java @@ -19,6 +19,7 @@ exports org.elasticsearch.entitlement.runtime.api; exports org.elasticsearch.entitlement.runtime.policy; + exports org.elasticsearch.entitlement.runtime.policy.entitlements to org.elasticsearch.server; exports org.elasticsearch.entitlement.instrumentation; exports org.elasticsearch.entitlement.bootstrap to org.elasticsearch.server; exports org.elasticsearch.entitlement.initialization to java.base; diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java index 496a28a448381..19acd0decdca7 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java @@ -14,6 +14,7 @@ import com.sun.tools.attach.AttachNotSupportedException; import com.sun.tools.attach.VirtualMachine; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.initialization.EntitlementInitialization; import org.elasticsearch.entitlement.runtime.api.NotEntitledException; @@ -22,6 +23,7 @@ import org.elasticsearch.logging.Logger; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Map; @@ -31,10 +33,22 @@ public class EntitlementBootstrap { - public record BootstrapArgs(Map pluginPolicies, Function, String> pluginResolver) { + public record BootstrapArgs( + Map pluginPolicies, + Function, String> pluginResolver, + Path[] dataDirs, + Path configDir, + Path tempDir + ) { public BootstrapArgs { requireNonNull(pluginPolicies); requireNonNull(pluginResolver); + requireNonNull(dataDirs); + if (dataDirs.length == 0) { + throw new IllegalArgumentException("must provide at least one data directory"); + } + requireNonNull(configDir); + requireNonNull(tempDir); } } @@ -50,13 +64,22 @@ public static BootstrapArgs bootstrapArgs() { * * @param pluginPolicies a map holding policies for plugins (and modules), by plugin (or module) name. * @param pluginResolver a functor to map a Java Class to the plugin it belongs to (the plugin name). + * @param dataDirs data directories for Elasticsearch + * @param configDir the config directory for Elasticsearch + * @param tempDir the temp directory for Elasticsearch */ - public static void bootstrap(Map pluginPolicies, Function, String> pluginResolver) { + public static void bootstrap( + Map pluginPolicies, + Function, String> pluginResolver, + Path[] dataDirs, + Path configDir, + Path tempDir + ) { logger.debug("Loading entitlement agent"); if (EntitlementBootstrap.bootstrapArgs != null) { throw new IllegalStateException("plugin data is already set"); } - EntitlementBootstrap.bootstrapArgs = new BootstrapArgs(pluginPolicies, pluginResolver); + EntitlementBootstrap.bootstrapArgs = new BootstrapArgs(pluginPolicies, pluginResolver, dataDirs, configDir, tempDir); exportInitializationToAgent(); loadAgent(findAgentJar()); selfTest(); @@ -123,44 +146,31 @@ private static String findAgentJar() { * @throws IllegalStateException if the entitlements system can't prevent an unauthorized action of our choosing */ private static void selfTest() { - ensureCannotStartProcess(); - ensureCanCreateTempFile(); + ensureCannotStartProcess(ProcessBuilder::start); + // Try again with reflection + ensureCannotStartProcess(EntitlementBootstrap::reflectiveStartProcess); } - private static void ensureCannotStartProcess() { + private static void ensureCannotStartProcess(CheckedConsumer startProcess) { try { // The command doesn't matter; it doesn't even need to exist - new ProcessBuilder("").start(); + startProcess.accept(new ProcessBuilder("")); } catch (NotEntitledException e) { logger.debug("Success: Entitlement protection correctly prevented process creation"); return; - } catch (IOException e) { + } catch (Exception e) { throw new IllegalStateException("Failed entitlement protection self-test", e); } throw new IllegalStateException("Entitlement protection self-test was incorrectly permitted"); } - /** - * Originally {@code Security.selfTest}. - */ - @SuppressForbidden(reason = "accesses jvm default tempdir as a self-test") - private static void ensureCanCreateTempFile() { + private static void reflectiveStartProcess(ProcessBuilder pb) throws Exception { try { - Path p = Files.createTempFile(null, null); - p.toFile().deleteOnExit(); - - // Make an effort to clean up the file immediately; also, deleteOnExit leaves the file if the JVM exits abnormally. - try { - Files.delete(p); - } catch (IOException ignored) { - // Can be caused by virus scanner - } - } catch (NotEntitledException e) { - throw new IllegalStateException("Entitlement protection self-test was incorrectly forbidden", e); - } catch (Exception e) { - throw new IllegalStateException("Unable to perform entitlement protection self-test", e); + var start = ProcessBuilder.class.getMethod("start"); + start.invoke(pb); + } catch (InvocationTargetException e) { + throw (Exception) e.getCause(); } - logger.debug("Success: Entitlement protection correctly permitted temp file creation"); } private static final Logger logger = LogManager.getLogger(EntitlementBootstrap.class); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index a8938c16955a7..12af77de248e1 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -18,24 +18,48 @@ import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.elasticsearch.entitlement.instrumentation.Transformer; import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementChecker; -import org.elasticsearch.entitlement.runtime.policy.CreateClassLoaderEntitlement; -import org.elasticsearch.entitlement.runtime.policy.Entitlement; -import org.elasticsearch.entitlement.runtime.policy.ExitVMEntitlement; -import org.elasticsearch.entitlement.runtime.policy.InboundNetworkEntitlement; -import org.elasticsearch.entitlement.runtime.policy.LoadNativeLibrariesEntitlement; -import org.elasticsearch.entitlement.runtime.policy.OutboundNetworkEntitlement; import org.elasticsearch.entitlement.runtime.policy.Policy; import org.elasticsearch.entitlement.runtime.policy.PolicyManager; import org.elasticsearch.entitlement.runtime.policy.Scope; +import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ExitVMEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.FileData; +import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ManageThreadsEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ReadStoreAttributesEntitlement; import java.lang.instrument.Instrumentation; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; +import java.net.URI; +import java.nio.channels.spi.SelectorProvider; +import java.nio.file.AccessMode; +import java.nio.file.CopyOption; +import java.nio.file.DirectoryStream; +import java.nio.file.FileStore; +import java.nio.file.FileSystems; +import java.nio.file.LinkOption; +import java.nio.file.OpenOption; +import java.nio.file.Path; +import java.nio.file.attribute.FileAttribute; +import java.nio.file.spi.FileSystemProvider; import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +import static org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.Mode.READ_WRITE; /** * Called by the agent during {@code agentmain} to configure the entitlement system, @@ -46,10 +70,16 @@ */ public class EntitlementInitialization { + private static final String AGENTS_PACKAGE_NAME = "co.elastic.apm.agent"; private static final Module ENTITLEMENTS_MODULE = PolicyManager.class.getModule(); private static ElasticsearchEntitlementChecker manager; + interface InstrumentationInfoFactory { + InstrumentationService.InstrumentationInfo of(String methodName, Class... parameterTypes) throws ClassNotFoundException, + NoSuchMethodException; + } + // Note: referenced by bridge reflectively public static EntitlementChecker checker() { return manager; @@ -59,11 +89,28 @@ public static EntitlementChecker checker() { public static void initialize(Instrumentation inst) throws Exception { manager = initChecker(); - Map checkMethods = INSTRUMENTER_FACTORY.lookupMethods(EntitlementChecker.class); + var latestCheckerInterface = getVersionSpecificCheckerClass(EntitlementChecker.class); + + Map checkMethods = new HashMap<>(INSTRUMENTATION_SERVICE.lookupMethods(latestCheckerInterface)); + Stream.of( + fileSystemProviderChecks(), + fileStoreChecks(), + Stream.of( + INSTRUMENTATION_SERVICE.lookupImplementationMethod( + SelectorProvider.class, + "inheritedChannel", + SelectorProvider.provider().getClass(), + EntitlementChecker.class, + "checkSelectorProviderInheritedChannel" + ) + ) + ) + .flatMap(Function.identity()) + .forEach(instrumentation -> checkMethods.put(instrumentation.targetMethod(), instrumentation.checkMethod())); var classesToTransform = checkMethods.keySet().stream().map(MethodKey::className).collect(Collectors.toSet()); - Instrumenter instrumenter = INSTRUMENTER_FACTORY.newInstrumenter(EntitlementChecker.class, checkMethods); + Instrumenter instrumenter = INSTRUMENTATION_SERVICE.newInstrumenter(latestCheckerInterface, checkMethods); inst.addTransformer(new Transformer(instrumenter, classesToTransform), true); inst.retransformClasses(findClassesToRetransform(inst.getAllLoadedClasses(), classesToTransform)); } @@ -80,6 +127,7 @@ private static Class[] findClassesToRetransform(Class[] loadedClasses, Set private static PolicyManager createPolicyManager() { Map pluginPolicies = EntitlementBootstrap.bootstrapArgs().pluginPolicies(); + Path[] dataDirs = EntitlementBootstrap.bootstrapArgs().dataDirs(); // TODO(ES-10031): Decide what goes in the elasticsearch default policy and extend it var serverPolicy = new Policy( @@ -91,42 +139,159 @@ private static PolicyManager createPolicyManager() { "org.elasticsearch.server", List.of( new ExitVMEntitlement(), + new ReadStoreAttributesEntitlement(), new CreateClassLoaderEntitlement(), new InboundNetworkEntitlement(), new OutboundNetworkEntitlement(), - new LoadNativeLibrariesEntitlement() + new LoadNativeLibrariesEntitlement(), + new ManageThreadsEntitlement(), + new FilesEntitlement( + List.of(new FilesEntitlement.FileData(EntitlementBootstrap.bootstrapArgs().tempDir().toString(), READ_WRITE)) + ) ) ), new Scope("org.apache.httpcomponents.httpclient", List.of(new OutboundNetworkEntitlement())), new Scope("io.netty.transport", List.of(new InboundNetworkEntitlement(), new OutboundNetworkEntitlement())), - new Scope("org.apache.lucene.core", List.of(new LoadNativeLibrariesEntitlement())), - new Scope("org.elasticsearch.nativeaccess", List.of(new LoadNativeLibrariesEntitlement())) + new Scope("org.apache.lucene.core", List.of(new LoadNativeLibrariesEntitlement(), new ManageThreadsEntitlement())), + new Scope("org.apache.logging.log4j.core", List.of(new ManageThreadsEntitlement())), + new Scope( + "org.elasticsearch.nativeaccess", + List.of( + new LoadNativeLibrariesEntitlement(), + new FilesEntitlement(Arrays.stream(dataDirs).map(d -> new FileData(d.toString(), READ_WRITE)).toList()) + ) + ) ) ); // agents run without a module, so this is a special hack for the apm agent // this should be removed once https://github.com/elastic/elasticsearch/issues/109335 is completed - List agentEntitlements = List.of(new CreateClassLoaderEntitlement()); + List agentEntitlements = List.of(new CreateClassLoaderEntitlement(), new ManageThreadsEntitlement()); var resolver = EntitlementBootstrap.bootstrapArgs().pluginResolver(); - return new PolicyManager(serverPolicy, agentEntitlements, pluginPolicies, resolver, ENTITLEMENTS_MODULE); + return new PolicyManager(serverPolicy, agentEntitlements, pluginPolicies, resolver, AGENTS_PACKAGE_NAME, ENTITLEMENTS_MODULE); } - private static ElasticsearchEntitlementChecker initChecker() { - final PolicyManager policyManager = createPolicyManager(); + private static Stream fileSystemProviderChecks() throws ClassNotFoundException, + NoSuchMethodException { + var fileSystemProviderClass = FileSystems.getDefault().provider().getClass(); + var instrumentation = new InstrumentationInfoFactory() { + @Override + public InstrumentationService.InstrumentationInfo of(String methodName, Class... parameterTypes) + throws ClassNotFoundException, NoSuchMethodException { + return INSTRUMENTATION_SERVICE.lookupImplementationMethod( + FileSystemProvider.class, + methodName, + fileSystemProviderClass, + EntitlementChecker.class, + "check" + Character.toUpperCase(methodName.charAt(0)) + methodName.substring(1), + parameterTypes + ); + } + }; + + return Stream.of( + instrumentation.of("newFileSystem", URI.class, Map.class), + instrumentation.of("newFileSystem", Path.class, Map.class), + instrumentation.of("newInputStream", Path.class, OpenOption[].class), + instrumentation.of("newOutputStream", Path.class, OpenOption[].class), + instrumentation.of("newFileChannel", Path.class, Set.class, FileAttribute[].class), + instrumentation.of("newAsynchronousFileChannel", Path.class, Set.class, ExecutorService.class, FileAttribute[].class), + instrumentation.of("newByteChannel", Path.class, Set.class, FileAttribute[].class), + instrumentation.of("newDirectoryStream", Path.class, DirectoryStream.Filter.class), + instrumentation.of("createDirectory", Path.class, FileAttribute[].class), + instrumentation.of("createSymbolicLink", Path.class, Path.class, FileAttribute[].class), + instrumentation.of("createLink", Path.class, Path.class), + instrumentation.of("delete", Path.class), + instrumentation.of("deleteIfExists", Path.class), + instrumentation.of("readSymbolicLink", Path.class), + instrumentation.of("copy", Path.class, Path.class, CopyOption[].class), + instrumentation.of("move", Path.class, Path.class, CopyOption[].class), + instrumentation.of("isSameFile", Path.class, Path.class), + instrumentation.of("isHidden", Path.class), + instrumentation.of("getFileStore", Path.class), + instrumentation.of("checkAccess", Path.class, AccessMode[].class), + instrumentation.of("getFileAttributeView", Path.class, Class.class, LinkOption[].class), + instrumentation.of("readAttributes", Path.class, Class.class, LinkOption[].class), + instrumentation.of("readAttributes", Path.class, String.class, LinkOption[].class), + instrumentation.of("readAttributesIfExists", Path.class, Class.class, LinkOption[].class), + instrumentation.of("setAttribute", Path.class, String.class, Object.class, LinkOption[].class), + instrumentation.of("exists", Path.class, LinkOption[].class) + ); + } + + private static Stream fileStoreChecks() { + var fileStoreClasses = StreamSupport.stream(FileSystems.getDefault().getFileStores().spliterator(), false) + .map(FileStore::getClass) + .distinct(); + return fileStoreClasses.flatMap(fileStoreClass -> { + var instrumentation = new InstrumentationInfoFactory() { + @Override + public InstrumentationService.InstrumentationInfo of(String methodName, Class... parameterTypes) + throws ClassNotFoundException, NoSuchMethodException { + return INSTRUMENTATION_SERVICE.lookupImplementationMethod( + FileStore.class, + methodName, + fileStoreClass, + EntitlementChecker.class, + "check" + Character.toUpperCase(methodName.charAt(0)) + methodName.substring(1), + parameterTypes + ); + } + }; + + try { + return Stream.of( + instrumentation.of("getFileStoreAttributeView", Class.class), + instrumentation.of("getAttribute", String.class), + instrumentation.of("getBlockSize"), + instrumentation.of("getTotalSpace"), + instrumentation.of("getUnallocatedSpace"), + instrumentation.of("getUsableSpace"), + instrumentation.of("isReadOnly"), + instrumentation.of("name"), + instrumentation.of("type") + + ); + } catch (NoSuchMethodException | ClassNotFoundException e) { + throw new RuntimeException(e); + } + }); + } + + /** + * Returns the "most recent" checker class compatible with the current runtime Java version. + * For checkers, we have (optionally) version specific classes, each with a prefix (e.g. Java23). + * The mapping cannot be automatic, as it depends on the actual presence of these classes in the final Jar (see + * the various mainXX source sets). + */ + private static Class getVersionSpecificCheckerClass(Class baseClass) { + String packageName = baseClass.getPackageName(); + String baseClassName = baseClass.getSimpleName(); int javaVersion = Runtime.version().feature(); + final String classNamePrefix; if (javaVersion >= 23) { + // All Java version from 23 onwards will be able to use che checks in the Java23EntitlementChecker interface and implementation classNamePrefix = "Java23"; } else { + // For any other Java version, the basic EntitlementChecker interface and implementation contains all the supported checks classNamePrefix = ""; } - final String className = "org.elasticsearch.entitlement.runtime.api." + classNamePrefix + "ElasticsearchEntitlementChecker"; + final String className = packageName + "." + classNamePrefix + baseClassName; Class clazz; try { clazz = Class.forName(className); } catch (ClassNotFoundException e) { - throw new AssertionError("entitlement lib cannot find entitlement impl", e); + throw new AssertionError("entitlement lib cannot find entitlement class " + className, e); } + return clazz; + } + + private static ElasticsearchEntitlementChecker initChecker() { + final PolicyManager policyManager = createPolicyManager(); + + final Class clazz = getVersionSpecificCheckerClass(ElasticsearchEntitlementChecker.class); + Constructor constructor; try { constructor = clazz.getConstructor(PolicyManager.class); @@ -140,7 +305,7 @@ private static ElasticsearchEntitlementChecker initChecker() { } } - private static final InstrumentationService INSTRUMENTER_FACTORY = new ProviderLocator<>( + private static final InstrumentationService INSTRUMENTATION_SERVICE = new ProviderLocator<>( "entitlement", InstrumentationService.class, "org.elasticsearch.entitlement.instrumentation", diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java index 66d8ad9488cfa..79673418eb321 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java @@ -16,7 +16,21 @@ * The SPI service entry point for instrumentation. */ public interface InstrumentationService { + + String CHECK_METHOD_PREFIX = "check$"; + + record InstrumentationInfo(MethodKey targetMethod, CheckMethod checkMethod) {} + Instrumenter newInstrumenter(Class clazz, Map methods); Map lookupMethods(Class clazz) throws IOException; + + InstrumentationInfo lookupImplementationMethod( + Class targetSuperclass, + String methodName, + Class implementationClass, + Class checkerClass, + String checkMethodName, + Class... parameterTypes + ) throws NoSuchMethodException, ClassNotFoundException; } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index 8600dd357c384..986d8bee5bf27 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -9,9 +9,11 @@ package org.elasticsearch.entitlement.runtime.api; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.bridge.EntitlementChecker; import org.elasticsearch.entitlement.runtime.policy.PolicyManager; +import java.io.File; import java.io.InputStream; import java.io.PrintStream; import java.io.PrintWriter; @@ -38,6 +40,7 @@ import java.net.Socket; import java.net.SocketAddress; import java.net.SocketImplFactory; +import java.net.URI; import java.net.URL; import java.net.URLStreamHandler; import java.net.URLStreamHandlerFactory; @@ -51,12 +54,28 @@ import java.nio.channels.DatagramChannel; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; +import java.nio.channels.spi.SelectorProvider; +import java.nio.charset.Charset; +import java.nio.file.AccessMode; +import java.nio.file.CopyOption; +import java.nio.file.DirectoryStream; +import java.nio.file.FileStore; +import java.nio.file.LinkOption; +import java.nio.file.OpenOption; import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.nio.file.attribute.FileAttribute; +import java.nio.file.attribute.UserPrincipal; +import java.nio.file.spi.FileSystemProvider; import java.security.cert.CertStoreParameters; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Properties; +import java.util.Set; import java.util.TimeZone; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ForkJoinPool; import java.util.function.Consumer; import javax.net.ssl.HostnameVerifier; @@ -69,6 +88,7 @@ * API methods for managing the checks. * The trampoline module loads this object via SPI. */ +@SuppressForbidden(reason = "Explicitly checking APIs that are forbidden") public class ElasticsearchEntitlementChecker implements EntitlementChecker { private final PolicyManager policyManager; @@ -77,6 +97,11 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { this.policyManager = policyManager; } + /// ///////////////// + // + // Exit the JVM process + // + @Override public void check$java_lang_Runtime$exit(Class callerClass, Runtime runtime, int status) { policyManager.checkExitVM(callerClass); @@ -92,6 +117,11 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { policyManager.checkExitVM(callerClass); } + /// ///////////////// + // + // create class loaders + // + @Override public void check$java_lang_ClassLoader$(Class callerClass) { policyManager.checkCreateClassLoader(callerClass); @@ -108,51 +138,85 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { } @Override - public void check$java_security_SecureClassLoader$(Class callerClass) { + public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls) { policyManager.checkCreateClassLoader(callerClass); } @Override - public void check$java_security_SecureClassLoader$(Class callerClass, ClassLoader parent) { + public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent) { policyManager.checkCreateClassLoader(callerClass); } @Override - public void check$java_security_SecureClassLoader$(Class callerClass, String name, ClassLoader parent) { + public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory) { policyManager.checkCreateClassLoader(callerClass); } @Override - public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls) { + public void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent) { policyManager.checkCreateClassLoader(callerClass); } @Override - public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent) { + public void check$java_net_URLClassLoader$( + Class callerClass, + String name, + URL[] urls, + ClassLoader parent, + URLStreamHandlerFactory factory + ) { policyManager.checkCreateClassLoader(callerClass); } @Override - public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory) { + public void check$java_security_SecureClassLoader$(Class callerClass) { policyManager.checkCreateClassLoader(callerClass); } @Override - public void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent) { + public void check$java_security_SecureClassLoader$(Class callerClass, ClassLoader parent) { policyManager.checkCreateClassLoader(callerClass); } @Override - public void check$java_net_URLClassLoader$( + public void check$java_security_SecureClassLoader$(Class callerClass, String name, ClassLoader parent) { + policyManager.checkCreateClassLoader(callerClass); + } + + /// ///////////////// + // + // "setFactory" methods + // + + @Override + public void check$javax_net_ssl_HttpsURLConnection$setSSLSocketFactory( Class callerClass, - String name, - URL[] urls, - ClassLoader parent, - URLStreamHandlerFactory factory + HttpsURLConnection connection, + SSLSocketFactory sf ) { - policyManager.checkCreateClassLoader(callerClass); + policyManager.checkSetHttpsConnectionProperties(callerClass); } + @Override + public void check$javax_net_ssl_HttpsURLConnection$$setDefaultSSLSocketFactory(Class callerClass, SSLSocketFactory sf) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + + @Override + public void check$javax_net_ssl_HttpsURLConnection$$setDefaultHostnameVerifier(Class callerClass, HostnameVerifier hv) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + + @Override + public void check$javax_net_ssl_SSLContext$$setDefault(Class callerClass, SSLContext context) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + + /// ///////////////// + // + // Process creation + // + @Override public void check$java_lang_ProcessBuilder$start(Class callerClass, ProcessBuilder processBuilder) { policyManager.checkStartProcess(callerClass); @@ -163,6 +227,31 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { policyManager.checkStartProcess(callerClass); } + /// ///////////////// + // + // System Properties and similar + // + + @Override + public void check$java_lang_System$$clearProperty(Class callerClass, String key) { + policyManager.checkWriteProperty(callerClass, key); + } + + @Override + public void check$java_lang_System$$setProperties(Class callerClass, Properties props) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + + @Override + public void check$java_lang_System$$setProperty(Class callerClass, String key, String value) { + policyManager.checkWriteProperty(callerClass, key); + } + + /// ///////////////// + // + // JVM-wide state changes + // + @Override public void check$java_lang_System$$setIn(Class callerClass, InputStream in) { policyManager.checkChangeJVMGlobalState(callerClass); @@ -214,27 +303,17 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { } @Override - public void check$com_sun_tools_jdi_VirtualMachineManagerImpl$$virtualMachineManager(Class callerClass) { + public void check$java_nio_charset_spi_CharsetProvider$(Class callerClass) { policyManager.checkChangeJVMGlobalState(callerClass); } @Override - public void check$java_lang_Thread$$setDefaultUncaughtExceptionHandler(Class callerClass, Thread.UncaughtExceptionHandler ueh) { + public void check$com_sun_tools_jdi_VirtualMachineManagerImpl$$virtualMachineManager(Class callerClass) { policyManager.checkChangeJVMGlobalState(callerClass); } @Override - public void check$java_lang_System$$clearProperty(Class callerClass, String key) { - policyManager.checkWriteProperty(callerClass, key); - } - - @Override - public void check$java_lang_System$$setProperty(Class callerClass, String key, String value) { - policyManager.checkWriteProperty(callerClass, key); - } - - @Override - public void check$java_lang_System$$setProperties(Class callerClass, Properties props) { + public void check$java_lang_Thread$$setDefaultUncaughtExceptionHandler(Class callerClass, Thread.UncaughtExceptionHandler ueh) { policyManager.checkChangeJVMGlobalState(callerClass); } @@ -353,29 +432,10 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { policyManager.checkChangeJVMGlobalState(callerClass); } - @Override - public void check$javax_net_ssl_HttpsURLConnection$setSSLSocketFactory( - Class callerClass, - HttpsURLConnection connection, - SSLSocketFactory sf - ) { - policyManager.checkSetHttpsConnectionProperties(callerClass); - } - - @Override - public void check$javax_net_ssl_HttpsURLConnection$$setDefaultSSLSocketFactory(Class callerClass, SSLSocketFactory sf) { - policyManager.checkChangeJVMGlobalState(callerClass); - } - - @Override - public void check$javax_net_ssl_HttpsURLConnection$$setDefaultHostnameVerifier(Class callerClass, HostnameVerifier hv) { - policyManager.checkChangeJVMGlobalState(callerClass); - } - - @Override - public void check$javax_net_ssl_SSLContext$$setDefault(Class callerClass, SSLContext context) { - policyManager.checkChangeJVMGlobalState(callerClass); - } + /// ///////////////// + // + // Network access + // @Override public void check$java_net_ProxySelector$$setDefault(Class callerClass, ProxySelector ps) { @@ -759,6 +819,21 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { policyManager.checkInboundNetworkAccess(callerClass); } + @Override + public void check$java_nio_channels_spi_SelectorProvider$(Class callerClass) { + policyManager.checkChangeNetworkHandling(callerClass); + } + + @Override + public void check$java_nio_channels_spi_AsynchronousChannelProvider$(Class callerClass) { + policyManager.checkChangeNetworkHandling(callerClass); + } + + @Override + public void checkSelectorProviderInheritedChannel(Class callerClass, SelectorProvider that) { + policyManager.checkChangeNetworkHandling(callerClass); + } + @Override public void check$java_lang_Runtime$load(Class callerClass, Runtime that, String filename) { // TODO: check filesystem entitlement READ @@ -866,6 +941,432 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { ModuleLayer.Controller that, Module target ) { - policyManager.checkLoadingNativeLibraries(callerClass); + policyManager.checkChangeJVMGlobalState(callerClass); + } + + /// ///////////////// + // + // File access + // + + // old io (ie File) + + @Override + public void check$java_io_File$createNewFile(Class callerClass, File file) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$$createTempFile(Class callerClass, String prefix, String suffix, File directory) { + policyManager.checkFileWrite(callerClass, directory); + } + + @Override + public void check$java_io_File$delete(Class callerClass, File file) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$deleteOnExit(Class callerClass, File file) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$mkdir(Class callerClass, File file) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$mkdirs(Class callerClass, File file) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$renameTo(Class callerClass, File file, File dest) { + policyManager.checkFileRead(callerClass, file); + policyManager.checkFileWrite(callerClass, dest); + } + + @Override + public void check$java_io_File$setExecutable(Class callerClass, File file, boolean executable) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$setExecutable(Class callerClass, File file, boolean executable, boolean ownerOnly) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$setLastModified(Class callerClass, File file, long time) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$setReadable(Class callerClass, File file, boolean readable) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$setReadable(Class callerClass, File file, boolean readable, boolean ownerOnly) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$setReadOnly(Class callerClass, File file) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$setWritable(Class callerClass, File file, boolean writable) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_File$setWritable(Class callerClass, File file, boolean writable, boolean ownerOnly) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_FileOutputStream$(Class callerClass, String name) { + policyManager.checkFileWrite(callerClass, new File(name)); + } + + @Override + public void check$java_io_FileOutputStream$(Class callerClass, String name, boolean append) { + policyManager.checkFileWrite(callerClass, new File(name)); + } + + @Override + public void check$java_io_FileOutputStream$(Class callerClass, File file) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_FileOutputStream$(Class callerClass, File file, boolean append) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_util_Scanner$(Class callerClass, File source) { + policyManager.checkFileRead(callerClass, source); + } + + @Override + public void check$java_util_Scanner$(Class callerClass, File source, String charsetName) { + policyManager.checkFileRead(callerClass, source); + } + + @Override + public void check$java_util_Scanner$(Class callerClass, File source, Charset charset) { + policyManager.checkFileRead(callerClass, source); + } + + // nio + + @Override + public void check$java_nio_file_Files$$getOwner(Class callerClass, Path path, LinkOption... options) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$probeContentType(Class callerClass, Path path) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$setOwner(Class callerClass, Path path, UserPrincipal principal) { + policyManager.checkFileWrite(callerClass, path); + } + + // file system providers + + @Override + public void check$java_nio_file_spi_FileSystemProvider$(Class callerClass) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + + @Override + public void checkNewFileSystem(Class callerClass, FileSystemProvider that, URI uri, Map env) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + + @Override + public void checkNewFileSystem(Class callerClass, FileSystemProvider that, Path path, Map env) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + + @Override + public void checkNewInputStream(Class callerClass, FileSystemProvider that, Path path, OpenOption... options) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void checkNewOutputStream(Class callerClass, FileSystemProvider that, Path path, OpenOption... options) { + policyManager.checkFileWrite(callerClass, path); + } + + private static boolean isOpenForWrite(Set options) { + return options.contains(StandardOpenOption.WRITE) + || options.contains(StandardOpenOption.APPEND) + || options.contains(StandardOpenOption.CREATE) + || options.contains(StandardOpenOption.CREATE_NEW) + || options.contains(StandardOpenOption.DELETE_ON_CLOSE); + } + + @Override + public void checkNewFileChannel( + Class callerClass, + FileSystemProvider that, + Path path, + Set options, + FileAttribute... attrs + ) { + if (isOpenForWrite(options)) { + policyManager.checkFileWrite(callerClass, path); + } else { + policyManager.checkFileRead(callerClass, path); + } + } + + @Override + public void checkNewAsynchronousFileChannel( + Class callerClass, + FileSystemProvider that, + Path path, + Set options, + ExecutorService executor, + FileAttribute... attrs + ) { + if (isOpenForWrite(options)) { + policyManager.checkFileWrite(callerClass, path); + } else { + policyManager.checkFileRead(callerClass, path); + } + } + + @Override + public void checkNewByteChannel( + Class callerClass, + FileSystemProvider that, + Path path, + Set options, + FileAttribute... attrs + ) { + if (isOpenForWrite(options)) { + policyManager.checkFileWrite(callerClass, path); + } else { + policyManager.checkFileRead(callerClass, path); + } + } + + @Override + public void checkNewDirectoryStream( + Class callerClass, + FileSystemProvider that, + Path dir, + DirectoryStream.Filter filter + ) { + policyManager.checkFileRead(callerClass, dir); + } + + @Override + public void checkCreateDirectory(Class callerClass, FileSystemProvider that, Path dir, FileAttribute... attrs) { + policyManager.checkFileWrite(callerClass, dir); + } + + @Override + public void checkCreateSymbolicLink(Class callerClass, FileSystemProvider that, Path link, Path target, FileAttribute... attrs) { + policyManager.checkFileWrite(callerClass, link); + policyManager.checkFileRead(callerClass, target); + } + + @Override + public void checkCreateLink(Class callerClass, FileSystemProvider that, Path link, Path existing) { + policyManager.checkFileWrite(callerClass, link); + policyManager.checkFileRead(callerClass, existing); + } + + @Override + public void checkDelete(Class callerClass, FileSystemProvider that, Path path) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void checkDeleteIfExists(Class callerClass, FileSystemProvider that, Path path) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void checkReadSymbolicLink(Class callerClass, FileSystemProvider that, Path link) { + policyManager.checkFileRead(callerClass, link); + } + + @Override + public void checkCopy(Class callerClass, FileSystemProvider that, Path source, Path target, CopyOption... options) { + policyManager.checkFileWrite(callerClass, target); + policyManager.checkFileRead(callerClass, source); + } + + @Override + public void checkMove(Class callerClass, FileSystemProvider that, Path source, Path target, CopyOption... options) { + policyManager.checkFileWrite(callerClass, target); + policyManager.checkFileWrite(callerClass, source); + } + + @Override + public void checkIsSameFile(Class callerClass, FileSystemProvider that, Path path, Path path2) { + policyManager.checkFileRead(callerClass, path); + policyManager.checkFileRead(callerClass, path2); + } + + @Override + public void checkIsHidden(Class callerClass, FileSystemProvider that, Path path) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void checkGetFileStore(Class callerClass, FileSystemProvider that, Path path) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void checkCheckAccess(Class callerClass, FileSystemProvider that, Path path, AccessMode... modes) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void checkGetFileAttributeView(Class callerClass, FileSystemProvider that, Path path, Class type, LinkOption... options) { + policyManager.checkGetFileAttributeView(callerClass); + } + + @Override + public void checkReadAttributes(Class callerClass, FileSystemProvider that, Path path, Class type, LinkOption... options) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void checkReadAttributes(Class callerClass, FileSystemProvider that, Path path, String attributes, LinkOption... options) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void checkReadAttributesIfExists( + Class callerClass, + FileSystemProvider that, + Path path, + Class type, + LinkOption... options + ) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void checkSetAttribute( + Class callerClass, + FileSystemProvider that, + Path path, + String attribute, + Object value, + LinkOption... options + ) { + policyManager.checkFileWrite(callerClass, path); + + } + + @Override + public void checkExists(Class callerClass, FileSystemProvider that, Path path, LinkOption... options) { + policyManager.checkFileRead(callerClass, path); + } + + // Thread management + + @Override + public void check$java_lang_Thread$start(Class callerClass, Thread thread) { + policyManager.checkManageThreadsEntitlement(callerClass); + } + + @Override + public void check$java_lang_Thread$setDaemon(Class callerClass, Thread thread, boolean on) { + policyManager.checkManageThreadsEntitlement(callerClass); + } + + @Override + public void check$java_lang_ThreadGroup$setDaemon(Class callerClass, ThreadGroup threadGroup, boolean daemon) { + policyManager.checkManageThreadsEntitlement(callerClass); + } + + @Override + public void check$java_util_concurrent_ForkJoinPool$setParallelism(Class callerClass, ForkJoinPool forkJoinPool, int size) { + policyManager.checkManageThreadsEntitlement(callerClass); + } + + @Override + public void check$java_lang_Thread$setName(Class callerClass, Thread thread, String name) { + policyManager.checkManageThreadsEntitlement(callerClass); + } + + @Override + public void check$java_lang_Thread$setPriority(Class callerClass, Thread thread, int newPriority) { + policyManager.checkManageThreadsEntitlement(callerClass); + } + + @Override + public void check$java_lang_Thread$setUncaughtExceptionHandler( + Class callerClass, + Thread thread, + Thread.UncaughtExceptionHandler ueh + ) { + policyManager.checkManageThreadsEntitlement(callerClass); + } + + @Override + public void check$java_lang_ThreadGroup$setMaxPriority(Class callerClass, ThreadGroup threadGroup, int pri) { + policyManager.checkManageThreadsEntitlement(callerClass); + } + + @Override + public void checkGetFileStoreAttributeView(Class callerClass, FileStore that, Class type) { + policyManager.checkWriteStoreAttributes(callerClass); + } + + @Override + public void checkGetAttribute(Class callerClass, FileStore that, String attribute) { + policyManager.checkReadStoreAttributes(callerClass); + } + + @Override + public void checkGetBlockSize(Class callerClass, FileStore that) { + policyManager.checkReadStoreAttributes(callerClass); + } + + @Override + public void checkGetTotalSpace(Class callerClass, FileStore that) { + policyManager.checkReadStoreAttributes(callerClass); + } + + @Override + public void checkGetUnallocatedSpace(Class callerClass, FileStore that) { + policyManager.checkReadStoreAttributes(callerClass); + } + + @Override + public void checkGetUsableSpace(Class callerClass, FileStore that) { + policyManager.checkReadStoreAttributes(callerClass); + } + + @Override + public void checkIsReadOnly(Class callerClass, FileStore that) { + policyManager.checkReadStoreAttributes(callerClass); + } + + @Override + public void checkName(Class callerClass, FileStore that) { + policyManager.checkReadStoreAttributes(callerClass); + } + + @Override + public void checkType(Class callerClass, FileStore that) { + policyManager.checkReadStoreAttributes(callerClass); } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java index 768babdb840f5..fef7b5d11aff0 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java @@ -9,6 +9,8 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; @@ -20,7 +22,7 @@ * using this annotation is considered parseable as part of a policy file * for entitlements. */ -@Target(ElementType.CONSTRUCTOR) +@Target({ ElementType.CONSTRUCTOR, ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) public @interface ExternalEntitlement { diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java new file mode 100644 index 0000000000000..c69244d7e8a99 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.core.PathUtils.getDefaultFileSystem; + +public final class FileAccessTree { + public static final FileAccessTree EMPTY = new FileAccessTree(FilesEntitlement.EMPTY); + private static final String FILE_SEPARATOR = getDefaultFileSystem().getSeparator(); + + private final String[] readPaths; + private final String[] writePaths; + + private FileAccessTree(FilesEntitlement filesEntitlement) { + List readPaths = new ArrayList<>(); + List writePaths = new ArrayList<>(); + for (FilesEntitlement.FileData fileData : filesEntitlement.filesData()) { + var path = normalizePath(Path.of(fileData.path())); + var mode = fileData.mode(); + if (mode == FilesEntitlement.Mode.READ_WRITE) { + writePaths.add(path); + } + readPaths.add(path); + } + + readPaths.sort(String::compareTo); + writePaths.sort(String::compareTo); + + this.readPaths = readPaths.toArray(new String[0]); + this.writePaths = writePaths.toArray(new String[0]); + } + + public static FileAccessTree of(FilesEntitlement filesEntitlement) { + return new FileAccessTree(filesEntitlement); + } + + boolean canRead(Path path) { + return checkPath(normalizePath(path), readPaths); + } + + boolean canWrite(Path path) { + return checkPath(normalizePath(path), writePaths); + } + + /** + * @return the "canonical" form of the given {@code path}, to be used for entitlement checks. + */ + static String normalizePath(Path path) { + // Note that toAbsolutePath produces paths separated by the default file separator, + // so on Windows, if the given path uses forward slashes, this consistently + // converts it to backslashes. + return path.toAbsolutePath().normalize().toString(); + } + + private static boolean checkPath(String path, String[] paths) { + if (paths.length == 0) { + return false; + } + int ndx = Arrays.binarySearch(paths, path); + if (ndx < -1) { + String maybeParent = paths[-ndx - 2]; + return path.startsWith(maybeParent) && path.startsWith(FILE_SEPARATOR, maybeParent.length()); + } + return ndx >= 0; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) return false; + FileAccessTree that = (FileAccessTree) o; + return Objects.deepEquals(readPaths, that.readPaths) && Objects.deepEquals(writePaths, that.writePaths); + } + + @Override + public int hashCode() { + return Objects.hash(Arrays.hashCode(readPaths), Arrays.hashCode(writePaths)); + } +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java deleted file mode 100644 index 4fdbcc93ea6e0..0000000000000 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.entitlement.runtime.policy; - -import java.util.List; -import java.util.Objects; - -/** - * Describes a file entitlement with a path and actions. - */ -public class FileEntitlement implements Entitlement { - - public static final int READ_ACTION = 0x1; - public static final int WRITE_ACTION = 0x2; - - public static final String READ = "read"; - public static final String WRITE = "write"; - - private final String path; - private final int actions; - - @ExternalEntitlement(parameterNames = { "path", "actions" }, esModulesOnly = false) - public FileEntitlement(String path, List actionsList) { - this.path = path; - int actionsInt = 0; - - for (String actionString : actionsList) { - if (READ.equals(actionString)) { - if ((actionsInt & READ_ACTION) == READ_ACTION) { - throw new IllegalArgumentException("file action [read] specified multiple times"); - } - actionsInt |= READ_ACTION; - } else if (WRITE.equals(actionString)) { - if ((actionsInt & WRITE_ACTION) == WRITE_ACTION) { - throw new IllegalArgumentException("file action [write] specified multiple times"); - } - actionsInt |= WRITE_ACTION; - } else { - throw new IllegalArgumentException("unknown file action [" + actionString + "]"); - } - } - - this.actions = actionsInt; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - FileEntitlement that = (FileEntitlement) o; - return actions == that.actions && Objects.equals(path, that.path); - } - - @Override - public int hashCode() { - return Objects.hash(path, actions); - } - - @Override - public String toString() { - return "FileEntitlement{" + "path='" + path + '\'' + ", actions=" + actions + '}'; - } -} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index f6448bc455d14..008983f099be4 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -10,13 +10,29 @@ package org.elasticsearch.entitlement.runtime.policy; import org.elasticsearch.core.Strings; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.runtime.api.NotEntitledException; +import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ExitVMEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ManageThreadsEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ReadStoreAttributesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.SetHttpsConnectionPropertiesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteSystemPropertiesEntitlement; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; +import java.io.File; import java.lang.StackWalker.StackFrame; import java.lang.module.ModuleFinder; import java.lang.module.ModuleReference; +import java.nio.file.Path; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; @@ -36,15 +52,40 @@ public class PolicyManager { private static final Logger logger = LogManager.getLogger(PolicyManager.class); - record ModuleEntitlements(Map, List> entitlementsByType) { - public static final ModuleEntitlements NONE = new ModuleEntitlements(Map.of()); + static final String UNKNOWN_COMPONENT_NAME = "(unknown)"; + static final String SERVER_COMPONENT_NAME = "(server)"; + static final String APM_AGENT_COMPONENT_NAME = "(APM agent)"; + + /** + * @param componentName the plugin name; or else one of the special component names + * like {@link #SERVER_COMPONENT_NAME} or {@link #APM_AGENT_COMPONENT_NAME}. + */ + record ModuleEntitlements( + String componentName, + Map, List> entitlementsByType, + FileAccessTree fileAccess + ) { ModuleEntitlements { entitlementsByType = Map.copyOf(entitlementsByType); } - public static ModuleEntitlements from(List entitlements) { - return new ModuleEntitlements(entitlements.stream().collect(groupingBy(Entitlement::getClass))); + public static ModuleEntitlements none(String componentName) { + return new ModuleEntitlements(componentName, Map.of(), FileAccessTree.EMPTY); + } + + public static ModuleEntitlements from(String componentName, List entitlements) { + FilesEntitlement filesEntitlement = FilesEntitlement.EMPTY; + for (Entitlement entitlement : entitlements) { + if (entitlement instanceof FilesEntitlement) { + filesEntitlement = (FilesEntitlement) entitlement; + } + } + return new ModuleEntitlements( + componentName, + entitlements.stream().collect(groupingBy(Entitlement::getClass)), + FileAccessTree.of(filesEntitlement) + ); } public boolean hasEntitlement(Class entitlementClass) { @@ -62,9 +103,9 @@ public Stream getEntitlements(Class entitlementCla final Map moduleEntitlementsMap = new ConcurrentHashMap<>(); - protected final Map> serverEntitlements; - protected final List agentEntitlements; - protected final Map>> pluginsEntitlements; + private final Map> serverEntitlements; + private final List apmAgentEntitlements; + private final Map>> pluginsEntitlements; private final Function, String> pluginResolver; public static final String ALL_UNNAMED = "ALL-UNNAMED"; @@ -84,6 +125,11 @@ private static Set findSystemModules() { .collect(Collectors.toUnmodifiableSet()); } + /** + * The package name containing classes from the APM agent. + */ + private final String apmAgentPackageName; + /** * Frames originating from this module are ignored in the permission logic. */ @@ -91,42 +137,58 @@ private static Set findSystemModules() { public PolicyManager( Policy serverPolicy, - List agentEntitlements, + List apmAgentEntitlements, Map pluginPolicies, Function, String> pluginResolver, + String apmAgentPackageName, Module entitlementsModule ) { this.serverEntitlements = buildScopeEntitlementsMap(requireNonNull(serverPolicy)); - this.agentEntitlements = agentEntitlements; + this.apmAgentEntitlements = apmAgentEntitlements; this.pluginsEntitlements = requireNonNull(pluginPolicies).entrySet() .stream() .collect(toUnmodifiableMap(Map.Entry::getKey, e -> buildScopeEntitlementsMap(e.getValue()))); this.pluginResolver = pluginResolver; + this.apmAgentPackageName = apmAgentPackageName; this.entitlementsModule = entitlementsModule; + + for (var e : serverEntitlements.entrySet()) { + validateEntitlementsPerModule(SERVER_COMPONENT_NAME, e.getKey(), e.getValue()); + } + validateEntitlementsPerModule(APM_AGENT_COMPONENT_NAME, "unnamed", apmAgentEntitlements); + for (var p : pluginsEntitlements.entrySet()) { + for (var m : p.getValue().entrySet()) { + validateEntitlementsPerModule(p.getKey(), m.getKey(), m.getValue()); + } + } } private static Map> buildScopeEntitlementsMap(Policy policy) { return policy.scopes().stream().collect(toUnmodifiableMap(Scope::moduleName, Scope::entitlements)); } + private static void validateEntitlementsPerModule(String componentName, String moduleName, List entitlements) { + Set> found = new HashSet<>(); + for (var e : entitlements) { + if (found.contains(e.getClass())) { + throw new IllegalArgumentException( + "[" + componentName + "] using module [" + moduleName + "] found duplicate entitlement [" + e.getClass().getName() + "]" + ); + } + found.add(e.getClass()); + } + } + public void checkStartProcess(Class callerClass) { - neverEntitled(callerClass, "start process"); + neverEntitled(callerClass, () -> "start process"); } - private void neverEntitled(Class callerClass, String operationDescription) { - var requestingClass = requestingClass(callerClass); - if (isTriviallyAllowed(requestingClass)) { - return; - } + public void checkWriteStoreAttributes(Class callerClass) { + neverEntitled(callerClass, () -> "change file store attributes"); + } - throw new NotEntitledException( - Strings.format( - "Not entitled: caller [%s], module [%s], operation [%s]", - callerClass, - requestingClass.getModule() == null ? "" : requestingClass.getModule().getName(), - operationDescription - ) - ); + public void checkReadStoreAttributes(Class callerClass) { + checkEntitlementPresent(callerClass, ReadStoreAttributesEntitlement.class); } /** @@ -139,11 +201,12 @@ private void neverEntitled(Class callerClass, Supplier operationDescr return; } - throw new NotEntitledException( + notEntitled( Strings.format( - "Not entitled: caller [%s], module [%s], operation [%s]", - callerClass, - requestingClass.getModule() == null ? "" : requestingClass.getModule().getName(), + "Not entitled: component [%s], module [%s], class [%s], operation [%s]", + getEntitlements(requestingClass).componentName(), + requestingClass.getModule().getName(), + requestingClass, operationDescription.get() ) ); @@ -162,17 +225,19 @@ public void checkSetHttpsConnectionProperties(Class callerClass) { } public void checkChangeJVMGlobalState(Class callerClass) { - neverEntitled(callerClass, () -> { - // Look up the check$ method to compose an informative error message. - // This way, we don't need to painstakingly describe every individual global-state change. - Optional checkMethodName = StackWalker.getInstance() - .walk( - frames -> frames.map(StackFrame::getMethodName) - .dropWhile(not(methodName -> methodName.startsWith("check$"))) - .findFirst() - ); - return checkMethodName.map(this::operationDescription).orElse("change JVM global state"); - }); + neverEntitled(callerClass, () -> walkStackForCheckMethodName().orElse("change JVM global state")); + } + + private Optional walkStackForCheckMethodName() { + // Look up the check$ method to compose an informative error message. + // This way, we don't need to painstakingly describe every individual global-state change. + return StackWalker.getInstance() + .walk( + frames -> frames.map(StackFrame::getMethodName) + .dropWhile(not(methodName -> methodName.startsWith(InstrumentationService.CHECK_METHOD_PREFIX))) + .findFirst() + ) + .map(this::operationDescription); } /** @@ -182,6 +247,65 @@ public void checkChangeNetworkHandling(Class callerClass) { checkChangeJVMGlobalState(callerClass); } + @SuppressForbidden(reason = "Explicitly checking File apis") + public void checkFileRead(Class callerClass, File file) { + checkFileRead(callerClass, file.toPath()); + } + + public void checkFileRead(Class callerClass, Path path) { + var requestingClass = requestingClass(callerClass); + if (isTriviallyAllowed(requestingClass)) { + return; + } + + ModuleEntitlements entitlements = getEntitlements(requestingClass); + if (entitlements.fileAccess().canRead(path) == false) { + notEntitled( + Strings.format( + "Not entitled: component [%s], module [%s], class [%s], entitlement [file], operation [read], path [%s]", + entitlements.componentName(), + requestingClass.getModule().getName(), + requestingClass, + path + ) + ); + } + } + + @SuppressForbidden(reason = "Explicitly checking File apis") + public void checkFileWrite(Class callerClass, File file) { + checkFileWrite(callerClass, file.toPath()); + } + + public void checkFileWrite(Class callerClass, Path path) { + var requestingClass = requestingClass(callerClass); + if (isTriviallyAllowed(requestingClass)) { + return; + } + + ModuleEntitlements entitlements = getEntitlements(requestingClass); + if (entitlements.fileAccess().canWrite(path) == false) { + notEntitled( + Strings.format( + "Not entitled: component [%s], module [%s], class [%s], entitlement [file], operation [write], path [%s]", + entitlements.componentName(), + requestingClass.getModule().getName(), + requestingClass, + path + ) + ); + } + } + + /** + * Invoked when we try to get an arbitrary {@code FileAttributeView} class. Such a class can modify attributes, like owner etc.; + * we could think about introducing checks for each of the operations, but for now we over-approximate this and simply deny when it is + * used directly. + */ + public void checkGetFileAttributeView(Class callerClass) { + neverEntitled(callerClass, () -> "get file attribute view"); + } + /** * Check for operations that can access sensitive network information, e.g. secrets, tokens or SSL sessions */ @@ -209,30 +333,33 @@ public void checkAllNetworkAccess(Class callerClass) { } var classEntitlements = getEntitlements(requestingClass); - if (classEntitlements.hasEntitlement(InboundNetworkEntitlement.class) == false) { - throw new NotEntitledException( - Strings.format( - "Missing entitlement: class [%s], module [%s], entitlement [inbound_network]", - requestingClass, - requestingClass.getModule().getName() - ) - ); - } + checkFlagEntitlement(classEntitlements, InboundNetworkEntitlement.class, requestingClass); + checkFlagEntitlement(classEntitlements, OutboundNetworkEntitlement.class, requestingClass); + } - if (classEntitlements.hasEntitlement(OutboundNetworkEntitlement.class) == false) { - throw new NotEntitledException( + private static void checkFlagEntitlement( + ModuleEntitlements classEntitlements, + Class entitlementClass, + Class requestingClass + ) { + if (classEntitlements.hasEntitlement(entitlementClass) == false) { + notEntitled( Strings.format( - "Missing entitlement: class [%s], module [%s], entitlement [outbound_network]", + "Not entitled: component [%s], module [%s], class [%s], entitlement [%s]", + classEntitlements.componentName(), + requestingClass.getModule().getName(), requestingClass, - requestingClass.getModule().getName() + PolicyParser.getEntitlementTypeName(entitlementClass) ) ); } logger.debug( () -> Strings.format( - "Entitled: class [%s], module [%s], entitlements [inbound_network, outbound_network]", + "Entitled: component [%s], module [%s], class [%s], entitlement [%s]", + classEntitlements.componentName(), + requestingClass.getModule().getName(), requestingClass, - requestingClass.getModule().getName() + PolicyParser.getEntitlementTypeName(entitlementClass) ) ); } @@ -247,50 +374,40 @@ public void checkWriteProperty(Class callerClass, String property) { if (entitlements.getEntitlements(WriteSystemPropertiesEntitlement.class).anyMatch(e -> e.properties().contains(property))) { logger.debug( () -> Strings.format( - "Entitled: class [%s], module [%s], entitlement [write_system_properties], property [%s]", - requestingClass, + "Entitled: component [%s], module [%s], class [%s], entitlement [write_system_properties], property [%s]", + entitlements.componentName(), requestingClass.getModule().getName(), + requestingClass, property ) ); return; } - throw new NotEntitledException( + notEntitled( Strings.format( - "Missing entitlement: class [%s], module [%s], entitlement [write_system_properties], property [%s]", - requestingClass, + "Not entitled: component [%s], module [%s], class [%s], entitlement [write_system_properties], property [%s]", + entitlements.componentName(), requestingClass.getModule().getName(), + requestingClass, property ) ); } + private static void notEntitled(String message) { + throw new NotEntitledException(message); + } + + public void checkManageThreadsEntitlement(Class callerClass) { + checkEntitlementPresent(callerClass, ManageThreadsEntitlement.class); + } + private void checkEntitlementPresent(Class callerClass, Class entitlementClass) { var requestingClass = requestingClass(callerClass); if (isTriviallyAllowed(requestingClass)) { return; } - - ModuleEntitlements entitlements = getEntitlements(requestingClass); - if (entitlements.hasEntitlement(entitlementClass)) { - logger.debug( - () -> Strings.format( - "Entitled: class [%s], module [%s], entitlement [%s]", - requestingClass, - requestingClass.getModule().getName(), - PolicyParser.getEntitlementTypeName(entitlementClass) - ) - ); - return; - } - throw new NotEntitledException( - Strings.format( - "Missing entitlement: class [%s], module [%s], entitlement [%s]", - requestingClass, - requestingClass.getModule().getName(), - PolicyParser.getEntitlementTypeName(entitlementClass) - ) - ); + checkFlagEntitlement(getEntitlements(requestingClass), entitlementClass, requestingClass); } ModuleEntitlements getEntitlements(Class requestingClass) { @@ -300,45 +417,44 @@ ModuleEntitlements getEntitlements(Class requestingClass) { private ModuleEntitlements computeEntitlements(Class requestingClass) { Module requestingModule = requestingClass.getModule(); if (isServerModule(requestingModule)) { - return getModuleScopeEntitlements(requestingClass, serverEntitlements, requestingModule.getName(), "server"); + return getModuleScopeEntitlements(serverEntitlements, requestingModule.getName(), SERVER_COMPONENT_NAME); } // plugins var pluginName = pluginResolver.apply(requestingClass); if (pluginName != null) { var pluginEntitlements = pluginsEntitlements.get(pluginName); - if (pluginEntitlements != null) { + if (pluginEntitlements == null) { + return ModuleEntitlements.none(pluginName); + } else { final String scopeName; if (requestingModule.isNamed() == false) { scopeName = ALL_UNNAMED; } else { scopeName = requestingModule.getName(); } - return getModuleScopeEntitlements(requestingClass, pluginEntitlements, scopeName, pluginName); + return getModuleScopeEntitlements(pluginEntitlements, scopeName, pluginName); } } - if (requestingModule.isNamed() == false) { - // agents are the only thing running non-modular - return ModuleEntitlements.from(agentEntitlements); + if (requestingModule.isNamed() == false && requestingClass.getPackageName().startsWith(apmAgentPackageName)) { + // The APM agent is the only thing running non-modular in the system classloader + return ModuleEntitlements.from(APM_AGENT_COMPONENT_NAME, apmAgentEntitlements); } - logger.warn("No applicable entitlement policy for class [{}]", requestingClass.getName()); - return ModuleEntitlements.NONE; + return ModuleEntitlements.none(UNKNOWN_COMPONENT_NAME); } private ModuleEntitlements getModuleScopeEntitlements( - Class callerClass, Map> scopeEntitlements, String moduleName, - String component + String componentName ) { var entitlements = scopeEntitlements.get(moduleName); if (entitlements == null) { - logger.warn("No applicable entitlement policy for [{}], module [{}], class [{}]", component, moduleName, callerClass); - return ModuleEntitlements.NONE; + return ModuleEntitlements.none(componentName); } - return ModuleEntitlements.from(entitlements); + return ModuleEntitlements.from(componentName, entitlements); } private static boolean isServerModule(Module requestingModule) { diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java index 95437027239b0..9698b9e86704a 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java @@ -9,6 +9,16 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ManageThreadsEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.SetHttpsConnectionPropertiesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteAllSystemPropertiesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteSystemPropertiesEntitlement; import org.elasticsearch.xcontent.XContentLocation; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -19,6 +29,8 @@ import java.io.UncheckedIOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -35,20 +47,22 @@ */ public class PolicyParser { - private static final Map> EXTERNAL_ENTITLEMENTS = Stream.of( - FileEntitlement.class, + private static final Map> EXTERNAL_ENTITLEMENTS = Stream.of( CreateClassLoaderEntitlement.class, - SetHttpsConnectionPropertiesEntitlement.class, - OutboundNetworkEntitlement.class, + FilesEntitlement.class, InboundNetworkEntitlement.class, - WriteSystemPropertiesEntitlement.class, - LoadNativeLibrariesEntitlement.class + LoadNativeLibrariesEntitlement.class, + ManageThreadsEntitlement.class, + OutboundNetworkEntitlement.class, + SetHttpsConnectionPropertiesEntitlement.class, + WriteAllSystemPropertiesEntitlement.class, + WriteSystemPropertiesEntitlement.class ).collect(Collectors.toUnmodifiableMap(PolicyParser::getEntitlementTypeName, Function.identity())); protected final XContentParser policyParser; protected final String policyName; private final boolean isExternalPlugin; - private final Map> externalEntitlements; + private final Map> externalEntitlements; static String getEntitlementTypeName(Class entitlementClass) { var entitlementClassName = entitlementClass.getSimpleName(); @@ -71,8 +85,12 @@ public PolicyParser(InputStream inputStream, String policyName, boolean isExtern } // package private for tests - PolicyParser(InputStream inputStream, String policyName, boolean isExternalPlugin, Map> externalEntitlements) - throws IOException { + PolicyParser( + InputStream inputStream, + String policyName, + boolean isExternalPlugin, + Map> externalEntitlements + ) throws IOException { this.policyParser = YamlXContent.yamlXContent.createParser(XContentParserConfiguration.EMPTY, Objects.requireNonNull(inputStream)); this.policyName = policyName; this.isExternalPlugin = isExternalPlugin; @@ -139,6 +157,7 @@ protected Entitlement parseEntitlement(String scopeName, String entitlementType) } Constructor entitlementConstructor = null; + Method entitlementMethod = null; ExternalEntitlement entitlementMetadata = null; for (var ctor : entitlementClass.getConstructors()) { var metadata = ctor.getAnnotation(ExternalEntitlement.class); @@ -153,8 +172,27 @@ protected Entitlement parseEntitlement(String scopeName, String entitlementType) entitlementConstructor = ctor; entitlementMetadata = metadata; } - } + for (var method : entitlementClass.getMethods()) { + var metadata = method.getAnnotation(ExternalEntitlement.class); + if (metadata != null) { + if (Modifier.isStatic(method.getModifiers()) == false) { + throw new IllegalStateException( + "entitlement class [" + entitlementClass.getName() + "] has non-static method annotated with ExternalEntitlement" + ); + } + if (entitlementMetadata != null) { + throw new IllegalStateException( + "entitlement class [" + + entitlementClass.getName() + + "] has more than one constructor and/or method annotated with ExternalEntitlement" + ); + } + entitlementMethod = method; + entitlementMetadata = metadata; + } + } + if (entitlementMetadata == null) { throw newPolicyParserException(scopeName, "unknown entitlement type [" + entitlementType + "]"); } @@ -163,40 +201,53 @@ protected Entitlement parseEntitlement(String scopeName, String entitlementType) throw newPolicyParserException("entitlement type [" + entitlementType + "] is allowed only on modules"); } - Class[] parameterTypes = entitlementConstructor.getParameterTypes(); + Class[] parameterTypes = entitlementConstructor != null + ? entitlementConstructor.getParameterTypes() + : entitlementMethod.getParameterTypes(); String[] parametersNames = entitlementMetadata.parameterNames(); + Object[] parameterValues = new Object[parameterTypes.length]; if (parameterTypes.length != 0 || parametersNames.length != 0) { - if (policyParser.nextToken() != XContentParser.Token.START_OBJECT) { - throw newPolicyParserException(scopeName, entitlementType, "expected entitlement parameters"); - } - } + if (policyParser.nextToken() == XContentParser.Token.START_OBJECT) { + Map parsedValues = policyParser.map(); - Map parsedValues = policyParser.map(); - - Object[] parameterValues = new Object[parameterTypes.length]; - for (int parameterIndex = 0; parameterIndex < parameterTypes.length; ++parameterIndex) { - String parameterName = parametersNames[parameterIndex]; - Object parameterValue = parsedValues.remove(parameterName); - if (parameterValue == null) { - throw newPolicyParserException(scopeName, entitlementType, "missing entitlement parameter [" + parameterName + "]"); - } - Class parameterType = parameterTypes[parameterIndex]; - if (parameterType.isAssignableFrom(parameterValue.getClass()) == false) { - throw newPolicyParserException( - scopeName, - entitlementType, - "unexpected parameter type [" + parameterType.getSimpleName() + "] for entitlement parameter [" + parameterName + "]" - ); + for (int parameterIndex = 0; parameterIndex < parameterTypes.length; ++parameterIndex) { + String parameterName = parametersNames[parameterIndex]; + Object parameterValue = parsedValues.remove(parameterName); + if (parameterValue == null) { + throw newPolicyParserException(scopeName, entitlementType, "missing entitlement parameter [" + parameterName + "]"); + } + Class parameterType = parameterTypes[parameterIndex]; + if (parameterType.isAssignableFrom(parameterValue.getClass()) == false) { + throw newPolicyParserException( + scopeName, + entitlementType, + "unexpected parameter type [" + + parameterType.getSimpleName() + + "] for entitlement parameter [" + + parameterName + + "]" + ); + } + parameterValues[parameterIndex] = parameterValue; + } + if (parsedValues.isEmpty() == false) { + throw newPolicyParserException(scopeName, entitlementType, "extraneous entitlement parameter(s) " + parsedValues); + } + } else if (policyParser.currentToken() == XContentParser.Token.START_ARRAY) { + List parsedValues = policyParser.list(); + parameterValues[0] = parsedValues; + } else { + throw newPolicyParserException(scopeName, entitlementType, "expected entitlement parameters"); } - parameterValues[parameterIndex] = parameterValue; - } - if (parsedValues.isEmpty() == false) { - throw newPolicyParserException(scopeName, entitlementType, "extraneous entitlement parameter(s) " + parsedValues); } try { - return (Entitlement) entitlementConstructor.newInstance(parameterValues); + if (entitlementConstructor != null) { + return (Entitlement) entitlementConstructor.newInstance(parameterValues); + } else { + return (Entitlement) entitlementMethod.invoke(null, parameterValues); + } } catch (InvocationTargetException | InstantiationException | IllegalAccessException e) { if (e.getCause() instanceof PolicyValidationException piae) { throw newPolicyParserException(startLocation, scopeName, entitlementType, piae); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyValidationException.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyValidationException.java index a2bc49d99b44f..5f21db011884d 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyValidationException.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyValidationException.java @@ -15,13 +15,13 @@ * parser is able to wrap this exception with a line/character number for * additional useful error information. */ -class PolicyValidationException extends RuntimeException { +public class PolicyValidationException extends RuntimeException { - PolicyValidationException(String message) { + public PolicyValidationException(String message) { super(message); } - PolicyValidationException(String message, Throwable cause) { + public PolicyValidationException(String message, Throwable cause) { super(message, cause); } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java index 55e257797d603..6342a155da940 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java @@ -9,6 +9,8 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; + import java.util.List; import java.util.Objects; diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/CreateClassLoaderEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/CreateClassLoaderEntitlement.java similarity index 81% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/CreateClassLoaderEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/CreateClassLoaderEntitlement.java index 55e4b66595642..4b7137f8c7cd6 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/CreateClassLoaderEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/CreateClassLoaderEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; public record CreateClassLoaderEntitlement() implements Entitlement { @ExternalEntitlement diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Entitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/Entitlement.java similarity index 83% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Entitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/Entitlement.java index 5b53c399cc1b7..996b8a19ac8b0 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Entitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/Entitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.Policy; /** * Marker interface to ensure that only {@link Entitlement} are diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExitVMEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ExitVMEntitlement.java similarity index 90% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExitVMEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ExitVMEntitlement.java index e5c836ea22b20..470277c482461 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExitVMEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ExitVMEntitlement.java @@ -7,7 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; /** * Internal policy type (not-parseable -- not available to plugins). diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FileEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FileEntitlement.java new file mode 100644 index 0000000000000..01d882e4d9e28 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FileEntitlement.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; +import org.elasticsearch.entitlement.runtime.policy.PolicyValidationException; + +/** + * Describes entitlement to access files at a particular location. + * + * @param path the location of the files. For directories, implicitly includes access to + * all contained files and (recursively) subdirectories. + * @param mode the type of operation + */ +public record FileEntitlement(String path, Mode mode) implements Entitlement { + + public enum Mode { + READ, + READ_WRITE + } + + private static Mode parseMode(String mode) { + if (mode.equals("read")) { + return Mode.READ; + } else if (mode.equals("read_write")) { + return Mode.READ_WRITE; + } else { + throw new PolicyValidationException("invalid mode: " + mode + ", valid values: [read, read_write]"); + } + } + + @ExternalEntitlement(parameterNames = { "path", "mode" }, esModulesOnly = false) + public static FileEntitlement create(String path, String mode) { + return new FileEntitlement(path, parseMode(mode)); + } +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java new file mode 100644 index 0000000000000..953954ec3769c --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; +import org.elasticsearch.entitlement.runtime.policy.PolicyValidationException; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Describes a file entitlement with a path and mode. + */ +public record FilesEntitlement(List filesData) implements Entitlement { + + public static final FilesEntitlement EMPTY = new FilesEntitlement(List.of()); + + public enum Mode { + READ, + READ_WRITE + } + + public record FileData(String path, Mode mode) { + + } + + private static Mode parseMode(String mode) { + if (mode.equals("read")) { + return Mode.READ; + } else if (mode.equals("read_write")) { + return Mode.READ_WRITE; + } else { + throw new PolicyValidationException("invalid mode: " + mode + ", valid values: [read, read_write]"); + } + } + + @ExternalEntitlement(parameterNames = { "paths" }, esModulesOnly = false) + @SuppressWarnings("unchecked") + public static FilesEntitlement build(List paths) { + if (paths == null || paths.isEmpty()) { + throw new PolicyValidationException("must specify at least one path"); + } + List filesData = new ArrayList<>(); + for (Object object : paths) { + Map file = new HashMap<>((Map) object); + String path = file.remove("path"); + if (path == null) { + throw new PolicyValidationException("files entitlement must contain path for every listed file"); + } + String mode = file.remove("mode"); + if (mode == null) { + throw new PolicyValidationException("files entitlement must contain mode for every listed file"); + } + if (file.isEmpty() == false) { + throw new PolicyValidationException("unknown key(s) " + file + " in a listed file for files entitlement"); + } + filesData.add(new FileData(path, parseMode(mode))); + } + return new FilesEntitlement(filesData); + } +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/InboundNetworkEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/InboundNetworkEntitlement.java similarity index 83% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/InboundNetworkEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/InboundNetworkEntitlement.java index 482d4e5100c0b..7c00a53cc16cb 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/InboundNetworkEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/InboundNetworkEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; /** * Describes an entitlement for inbound network actions (listen/accept/receive) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/LoadNativeLibrariesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/LoadNativeLibrariesEntitlement.java similarity index 83% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/LoadNativeLibrariesEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/LoadNativeLibrariesEntitlement.java index 9a840c4e3e32e..b297685876925 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/LoadNativeLibrariesEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/LoadNativeLibrariesEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; /** * An Entitlement to allow loading native libraries diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ManageThreadsEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ManageThreadsEntitlement.java new file mode 100644 index 0000000000000..c75ccf26d1432 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ManageThreadsEntitlement.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; + +public record ManageThreadsEntitlement() implements Entitlement { + @ExternalEntitlement(esModulesOnly = false) + public ManageThreadsEntitlement {} +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/OutboundNetworkEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/OutboundNetworkEntitlement.java similarity index 83% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/OutboundNetworkEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/OutboundNetworkEntitlement.java index 50d9a47f580e5..dbdd6840f2ebe 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/OutboundNetworkEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/OutboundNetworkEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; /** * Describes an entitlement for outbound network actions (connect/send) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ReadStoreAttributesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ReadStoreAttributesEntitlement.java new file mode 100644 index 0000000000000..ccb84c4a68c97 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ReadStoreAttributesEntitlement.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +/** + * Describes an entitlement for reading file store attributes (e.g. disk space) + */ +public record ReadStoreAttributesEntitlement() implements Entitlement {} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/SetHttpsConnectionPropertiesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/SetHttpsConnectionPropertiesEntitlement.java similarity index 84% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/SetHttpsConnectionPropertiesEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/SetHttpsConnectionPropertiesEntitlement.java index bb2f65def9e18..abfcfdf18db20 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/SetHttpsConnectionPropertiesEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/SetHttpsConnectionPropertiesEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; /** * An Entitlement to allow setting properties to a single Https connection after this has been created diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteAllSystemPropertiesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteAllSystemPropertiesEntitlement.java similarity index 83% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteAllSystemPropertiesEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteAllSystemPropertiesEntitlement.java index f0d1d14177332..f0b02e82d3cb5 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteAllSystemPropertiesEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteAllSystemPropertiesEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; /** * An Entitlement to allow writing all properties such as system properties. diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteSystemPropertiesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteSystemPropertiesEntitlement.java similarity index 86% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteSystemPropertiesEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteSystemPropertiesEntitlement.java index 654ebbda9dab3..b7818bb14030b 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteSystemPropertiesEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteSystemPropertiesEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; import java.util.List; import java.util.Set; diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java new file mode 100644 index 0000000000000..de3e2eafb7569 --- /dev/null +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.core.PathUtils.getDefaultFileSystem; +import static org.hamcrest.Matchers.is; + +public class FileAccessTreeTests extends ESTestCase { + + static Path root; + + @BeforeClass + public static void setupRoot() { + root = createTempDir(); + } + + private static Path path(String s) { + return root.resolve(s); + } + + public void testEmpty() { + var tree = FileAccessTree.of(FilesEntitlement.EMPTY); + assertThat(tree.canRead(path("path")), is(false)); + assertThat(tree.canWrite(path("path")), is(false)); + } + + public void testRead() { + var tree = FileAccessTree.of(entitlement("foo", "read")); + assertThat(tree.canRead(path("foo")), is(true)); + assertThat(tree.canRead(path("foo/subdir")), is(true)); + assertThat(tree.canRead(path("food")), is(false)); + assertThat(tree.canWrite(path("foo")), is(false)); + assertThat(tree.canWrite(path("food")), is(false)); + + assertThat(tree.canRead(path("before")), is(false)); + assertThat(tree.canRead(path("later")), is(false)); + } + + public void testWrite() { + var tree = FileAccessTree.of(entitlement("foo", "read_write")); + assertThat(tree.canWrite(path("foo")), is(true)); + assertThat(tree.canWrite(path("foo/subdir")), is(true)); + assertThat(tree.canWrite(path("food")), is(false)); + assertThat(tree.canRead(path("foo")), is(true)); + assertThat(tree.canRead(path("food")), is(false)); + + assertThat(tree.canWrite(path("before")), is(false)); + assertThat(tree.canWrite(path("later")), is(false)); + } + + public void testTwoPaths() { + var tree = FileAccessTree.of(entitlement("foo", "read", "bar", "read")); + assertThat(tree.canRead(path("a")), is(false)); + assertThat(tree.canRead(path("bar")), is(true)); + assertThat(tree.canRead(path("bar/subdir")), is(true)); + assertThat(tree.canRead(path("c")), is(false)); + assertThat(tree.canRead(path("foo")), is(true)); + assertThat(tree.canRead(path("foo/subdir")), is(true)); + assertThat(tree.canRead(path("z")), is(false)); + } + + public void testReadWriteUnderRead() { + var tree = FileAccessTree.of(entitlement("foo", "read", "foo/bar", "read_write")); + assertThat(tree.canRead(path("foo")), is(true)); + assertThat(tree.canWrite(path("foo")), is(false)); + assertThat(tree.canRead(path("foo/bar")), is(true)); + assertThat(tree.canWrite(path("foo/bar")), is(true)); + } + + public void testNormalizePath() { + var tree = FileAccessTree.of(entitlement("foo/../bar", "read")); + assertThat(tree.canRead(path("foo/../bar")), is(true)); + assertThat(tree.canRead(path("foo")), is(false)); + assertThat(tree.canRead(path("")), is(false)); + } + + public void testForwardSlashes() { + String sep = getDefaultFileSystem().getSeparator(); + var tree = FileAccessTree.of(entitlement("a/b", "read", "m" + sep + "n", "read")); + + // Native separators work + assertThat(tree.canRead(path("a" + sep + "b")), is(true)); + assertThat(tree.canRead(path("m" + sep + "n")), is(true)); + + // Forward slashes also work + assertThat(tree.canRead(path("a/b")), is(true)); + assertThat(tree.canRead(path("m/n")), is(true)); + } + + FilesEntitlement entitlement(String... values) { + List filesData = new ArrayList<>(); + for (int i = 0; i < values.length; i += 2) { + Map fileData = new HashMap<>(); + fileData.put("path", path(values[i]).toString()); + fileData.put("mode", values[i + 1]); + filesData.add(fileData); + } + return FilesEntitlement.build(filesData); + } +} diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java index c3acefbbb323b..34d069c98c7aa 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java @@ -10,6 +10,11 @@ package org.elasticsearch.entitlement.runtime.policy; import org.elasticsearch.entitlement.runtime.policy.PolicyManager.ModuleEntitlements; +import org.elasticsearch.entitlement.runtime.policy.agent.TestAgent; +import org.elasticsearch.entitlement.runtime.policy.agent.inner.TestInnerAgent; +import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ExitVMEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.compiler.InMemoryJavaCompiler; import org.elasticsearch.test.jar.JarUtils; @@ -18,6 +23,8 @@ import java.io.IOException; import java.lang.module.Configuration; import java.lang.module.ModuleFinder; +import java.net.URL; +import java.net.URLClassLoader; import java.nio.file.Path; import java.util.Arrays; import java.util.List; @@ -27,15 +34,19 @@ import static java.util.Map.entry; import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ALL_UNNAMED; -import static org.elasticsearch.test.LambdaMatchers.transformedMatch; +import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.SERVER_COMPONENT_NAME; import static org.hamcrest.Matchers.aMapWithSize; -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.sameInstance; @ESTestCase.WithoutSecurityManager public class PolicyManagerTests extends ESTestCase { + + /** + * A test agent package name for use in tests. + */ + private static final String TEST_AGENTS_PACKAGE_NAME = "org.elasticsearch.entitlement.runtime.policy.agent"; + /** * A module you can use for test cases that don't actually care about the * entitlement module. @@ -59,6 +70,7 @@ public void testGetEntitlementsThrowsOnMissingPluginUnnamedModule() { List.of(), Map.of("plugin1", createPluginPolicy("plugin.module")), c -> "plugin1", + TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE ); @@ -66,38 +78,52 @@ public void testGetEntitlementsThrowsOnMissingPluginUnnamedModule() { var callerClass = this.getClass(); var requestingModule = callerClass.getModule(); - assertEquals("No policy for the unnamed module", ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass)); + assertEquals("No policy for the unnamed module", ModuleEntitlements.none("plugin1"), policyManager.getEntitlements(callerClass)); - assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); + assertEquals(Map.of(requestingModule, ModuleEntitlements.none("plugin1")), policyManager.moduleEntitlementsMap); } public void testGetEntitlementsThrowsOnMissingPolicyForPlugin() { - var policyManager = new PolicyManager(createEmptyTestServerPolicy(), List.of(), Map.of(), c -> "plugin1", NO_ENTITLEMENTS_MODULE); + var policyManager = new PolicyManager( + createEmptyTestServerPolicy(), + List.of(), + Map.of(), + c -> "plugin1", + TEST_AGENTS_PACKAGE_NAME, + NO_ENTITLEMENTS_MODULE + ); // Any class from the current module (unnamed) will do var callerClass = this.getClass(); var requestingModule = callerClass.getModule(); - assertEquals("No policy for this plugin", ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass)); + assertEquals("No policy for this plugin", ModuleEntitlements.none("plugin1"), policyManager.getEntitlements(callerClass)); - assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); + assertEquals(Map.of(requestingModule, ModuleEntitlements.none("plugin1")), policyManager.moduleEntitlementsMap); } public void testGetEntitlementsFailureIsCached() { - var policyManager = new PolicyManager(createEmptyTestServerPolicy(), List.of(), Map.of(), c -> "plugin1", NO_ENTITLEMENTS_MODULE); + var policyManager = new PolicyManager( + createEmptyTestServerPolicy(), + List.of(), + Map.of(), + c -> "plugin1", + TEST_AGENTS_PACKAGE_NAME, + NO_ENTITLEMENTS_MODULE + ); // Any class from the current module (unnamed) will do var callerClass = this.getClass(); var requestingModule = callerClass.getModule(); - assertEquals(ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass)); - assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); + assertEquals(ModuleEntitlements.none("plugin1"), policyManager.getEntitlements(callerClass)); + assertEquals(Map.of(requestingModule, ModuleEntitlements.none("plugin1")), policyManager.moduleEntitlementsMap); // A second time - assertEquals(ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass)); + assertEquals(ModuleEntitlements.none("plugin1"), policyManager.getEntitlements(callerClass)); // Nothing new in the map - assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); + assertEquals(Map.of(requestingModule, ModuleEntitlements.none("plugin1")), policyManager.moduleEntitlementsMap); } public void testGetEntitlementsReturnsEntitlementsForPluginUnnamedModule() { @@ -106,6 +132,7 @@ public void testGetEntitlementsReturnsEntitlementsForPluginUnnamedModule() { List.of(), Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))), c -> "plugin2", + TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE ); @@ -117,7 +144,14 @@ public void testGetEntitlementsReturnsEntitlementsForPluginUnnamedModule() { } public void testGetEntitlementsThrowsOnMissingPolicyForServer() throws ClassNotFoundException { - var policyManager = new PolicyManager(createTestServerPolicy("example"), List.of(), Map.of(), c -> null, NO_ENTITLEMENTS_MODULE); + var policyManager = new PolicyManager( + createTestServerPolicy("example"), + List.of(), + Map.of(), + c -> null, + TEST_AGENTS_PACKAGE_NAME, + NO_ENTITLEMENTS_MODULE + ); // Tests do not run modular, so we cannot use a server class. // But we know that in production code the server module and its classes are in the boot layer. @@ -126,9 +160,13 @@ public void testGetEntitlementsThrowsOnMissingPolicyForServer() throws ClassNotF var mockServerClass = ModuleLayer.boot().findLoader("jdk.httpserver").loadClass("com.sun.net.httpserver.HttpServer"); var requestingModule = mockServerClass.getModule(); - assertEquals("No policy for this module in server", ModuleEntitlements.NONE, policyManager.getEntitlements(mockServerClass)); + assertEquals( + "No policy for this module in server", + ModuleEntitlements.none(SERVER_COMPONENT_NAME), + policyManager.getEntitlements(mockServerClass) + ); - assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); + assertEquals(Map.of(requestingModule, ModuleEntitlements.none(SERVER_COMPONENT_NAME)), policyManager.moduleEntitlementsMap); } public void testGetEntitlementsReturnsEntitlementsForServerModule() throws ClassNotFoundException { @@ -137,6 +175,7 @@ public void testGetEntitlementsReturnsEntitlementsForServerModule() throws Class List.of(), Map.of(), c -> null, + TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE ); @@ -161,6 +200,7 @@ public void testGetEntitlementsReturnsEntitlementsForPluginModule() throws IOExc List.of(), Map.of("mock-plugin", createPluginPolicy("org.example.plugin")), c -> "mock-plugin", + TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE ); @@ -169,10 +209,8 @@ public void testGetEntitlementsReturnsEntitlementsForPluginModule() throws IOExc var entitlements = policyManager.getEntitlements(mockPluginClass); assertThat(entitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); - assertThat( - entitlements.getEntitlements(FileEntitlement.class).toList(), - contains(transformedMatch(FileEntitlement::toString, containsString("/test/path"))) - ); + // TODO: this can't work on Windows, we need to have the root be unknown + // assertThat(entitlements.fileAccess().canRead("/test/path"), is(true)); } public void testGetEntitlementsResultIsCached() { @@ -181,6 +219,7 @@ public void testGetEntitlementsResultIsCached() { List.of(), Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))), c -> "plugin2", + TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE ); @@ -200,7 +239,7 @@ public void testGetEntitlementsResultIsCached() { public void testRequestingClassFastPath() throws IOException, ClassNotFoundException { var callerClass = makeClassInItsOwnModule(); - assertEquals(callerClass, policyManagerWithEntitlementsModule(NO_ENTITLEMENTS_MODULE).requestingClass(callerClass)); + assertEquals(callerClass, policyManager(TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE).requestingClass(callerClass)); } public void testRequestingModuleWithStackWalk() throws IOException, ClassNotFoundException { @@ -209,7 +248,7 @@ public void testRequestingModuleWithStackWalk() throws IOException, ClassNotFoun var instrumentedClass = makeClassInItsOwnModule(); // The class that called the check method var ignorableClass = makeClassInItsOwnModule(); - var policyManager = policyManagerWithEntitlementsModule(entitlementsClass.getModule()); + var policyManager = policyManager(TEST_AGENTS_PACKAGE_NAME, entitlementsClass.getModule()); assertEquals( "Skip entitlement library and the instrumented method", @@ -229,6 +268,115 @@ public void testRequestingModuleWithStackWalk() throws IOException, ClassNotFoun ); } + public void testAgentsEntitlements() throws IOException, ClassNotFoundException { + Path home = createTempDir(); + Path unnamedJar = createMockPluginJarForUnnamedModule(home); + var notAgentClass = makeClassInItsOwnModule(); + var policyManager = new PolicyManager( + createEmptyTestServerPolicy(), + List.of(new CreateClassLoaderEntitlement()), + Map.of(), + c -> c.getPackageName().startsWith(TEST_AGENTS_PACKAGE_NAME) ? null : "test", + TEST_AGENTS_PACKAGE_NAME, + NO_ENTITLEMENTS_MODULE + ); + ModuleEntitlements agentsEntitlements = policyManager.getEntitlements(TestAgent.class); + assertThat(agentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); + agentsEntitlements = policyManager.getEntitlements(TestInnerAgent.class); + assertThat(agentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); + ModuleEntitlements notAgentsEntitlements = policyManager.getEntitlements(notAgentClass); + assertThat(notAgentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(false)); + try (URLClassLoader classLoader = new URLClassLoader(new URL[] { unnamedJar.toUri().toURL() }, getClass().getClassLoader())) { + var unnamedNotAgentClass = classLoader.loadClass("q.B"); + notAgentsEntitlements = policyManager.getEntitlements(unnamedNotAgentClass); + assertThat(notAgentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(false)); + } + } + + public void testDuplicateEntitlements() { + IllegalArgumentException iae = expectThrows( + IllegalArgumentException.class, + () -> new PolicyManager( + new Policy( + "server", + List.of(new Scope("test", List.of(new CreateClassLoaderEntitlement(), new CreateClassLoaderEntitlement()))) + ), + List.of(), + Map.of(), + c -> "test", + TEST_AGENTS_PACKAGE_NAME, + NO_ENTITLEMENTS_MODULE + ) + ); + assertEquals( + "[(server)] using module [test] found duplicate entitlement " + "[" + CreateClassLoaderEntitlement.class.getName() + "]", + iae.getMessage() + ); + + iae = expectThrows( + IllegalArgumentException.class, + () -> new PolicyManager( + createEmptyTestServerPolicy(), + List.of(new CreateClassLoaderEntitlement(), new CreateClassLoaderEntitlement()), + Map.of(), + c -> "test", + TEST_AGENTS_PACKAGE_NAME, + NO_ENTITLEMENTS_MODULE + ) + ); + assertEquals( + "[(APM agent)] using module [unnamed] found duplicate entitlement " + "[" + CreateClassLoaderEntitlement.class.getName() + "]", + iae.getMessage() + ); + + iae = expectThrows( + IllegalArgumentException.class, + () -> new PolicyManager( + createEmptyTestServerPolicy(), + List.of(), + Map.of( + "plugin1", + new Policy( + "test", + List.of( + new Scope( + "test", + List.of( + FilesEntitlement.EMPTY, + new CreateClassLoaderEntitlement(), + new FilesEntitlement(List.of(new FilesEntitlement.FileData("test", FilesEntitlement.Mode.READ))) + ) + ) + ) + ) + ), + c -> "plugin1", + TEST_AGENTS_PACKAGE_NAME, + NO_ENTITLEMENTS_MODULE + ) + ); + assertEquals( + "[plugin1] using module [test] found duplicate entitlement " + "[" + FilesEntitlement.class.getName() + "]", + iae.getMessage() + ); + } + + /** + * If the plugin resolver tells us a class is in a plugin, don't conclude that it's in an agent. + */ + public void testPluginResolverOverridesAgents() { + var policyManager = new PolicyManager( + createEmptyTestServerPolicy(), + List.of(new CreateClassLoaderEntitlement()), + Map.of(), + c -> "test", // Insist that the class is in a plugin + TEST_AGENTS_PACKAGE_NAME, + NO_ENTITLEMENTS_MODULE + ); + ModuleEntitlements notAgentsEntitlements = policyManager.getEntitlements(TestAgent.class); + assertThat(notAgentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(false)); + } + private static Class makeClassInItsOwnModule() throws IOException, ClassNotFoundException { final Path home = createTempDir(); Path jar = createMockPluginJar(home); @@ -236,8 +384,8 @@ private static Class makeClassInItsOwnModule() throws IOException, ClassNotFo return layer.findLoader("org.example.plugin").loadClass("q.B"); } - private static PolicyManager policyManagerWithEntitlementsModule(Module entitlementsModule) { - return new PolicyManager(createEmptyTestServerPolicy(), List.of(), Map.of(), c -> "test", entitlementsModule); + private static PolicyManager policyManager(String agentsPackageName, Module entitlementsModule) { + return new PolicyManager(createEmptyTestServerPolicy(), List.of(), Map.of(), c -> "test", agentsPackageName, entitlementsModule); } private static Policy createEmptyTestServerPolicy() { @@ -255,13 +403,26 @@ private static Policy createPluginPolicy(String... pluginModules) { .map( name -> new Scope( name, - List.of(new FileEntitlement("/test/path", List.of(FileEntitlement.READ)), new CreateClassLoaderEntitlement()) + List.of( + new FilesEntitlement(List.of(new FilesEntitlement.FileData("/test/path", FilesEntitlement.Mode.READ))), + new CreateClassLoaderEntitlement() + ) ) ) .toList() ); } + private static Path createMockPluginJarForUnnamedModule(Path home) throws IOException { + Path jar = home.resolve("unnamed-mock-plugin.jar"); + + Map sources = Map.ofEntries(entry("q.B", "package q; public class B { }")); + + var classToBytes = InMemoryJavaCompiler.compile(sources); + JarUtils.createJarWithEntries(jar, Map.ofEntries(entry("q/B.class", classToBytes.get("q.B")))); + return jar; + } + private static Path createMockPluginJar(Path home) throws IOException { Path jar = home.resolve("mock-plugin.jar"); diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java index dfcc5d8916f2c..4f479a9bf59ac 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java @@ -40,22 +40,12 @@ public void testEntitlementDoesNotExist() { public void testEntitlementMissingParameter() { PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - - file: {} + - files: + - path: test-path """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( - "[2:12] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " - + "for entitlement type [file]: missing entitlement parameter [path]", - ppe.getMessage() - ); - - ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" - entitlement-module-name: - - file: - path: test-path - """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); - assertEquals( - "[4:1] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " - + "for entitlement type [file]: missing entitlement parameter [actions]", + "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + + "for entitlement type [files]: files entitlement must contain mode for every listed file", ppe.getMessage() ); } @@ -63,15 +53,14 @@ public void testEntitlementMissingParameter() { public void testEntitlementExtraneousParameter() { PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - - file: - path: test-path - actions: - - read - extra: test + - files: + - path: test-path + mode: read + extra: test """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( - "[7:1] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " - + "for entitlement type [file]: extraneous entitlement parameter(s) {extra=test}", + "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + + "for entitlement type [files]: unknown key(s) {extra=test} in a listed file for files entitlement", ppe.getMessage() ); } diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java index 08185c3f82b31..e84c8ad2a83c7 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java @@ -9,6 +9,14 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.SetHttpsConnectionPropertiesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteSystemPropertiesEntitlement; import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayInputStream; @@ -32,6 +40,35 @@ public ManyConstructorsEntitlement(String s) {} public ManyConstructorsEntitlement(int i) {} } + public static class ManyMethodsEntitlement implements Entitlement { + @ExternalEntitlement + public static ManyMethodsEntitlement create(String s) { + return new ManyMethodsEntitlement(); + } + + @ExternalEntitlement + public static ManyMethodsEntitlement create(int i) { + return new ManyMethodsEntitlement(); + } + } + + public static class ConstructorAndMethodEntitlement implements Entitlement { + @ExternalEntitlement + public static ConstructorAndMethodEntitlement create(String s) { + return new ConstructorAndMethodEntitlement(s); + } + + @ExternalEntitlement + public ConstructorAndMethodEntitlement(String s) {} + } + + public static class NonStaticMethodEntitlement implements Entitlement { + @ExternalEntitlement + public NonStaticMethodEntitlement create() { + return new NonStaticMethodEntitlement(); + } + } + public void testGetEntitlementTypeName() { assertEquals("create_class_loader", PolicyParser.getEntitlementTypeName(CreateClassLoaderEntitlement.class)); @@ -47,7 +84,12 @@ public void testPolicyBuilder() throws IOException { .parsePolicy(); Policy expected = new Policy( "test-policy.yaml", - List.of(new Scope("entitlement-module-name", List.of(new FileEntitlement("test/path/to/file", List.of("read", "write"))))) + List.of( + new Scope( + "entitlement-module-name", + List.of(FilesEntitlement.build(List.of(Map.of("path", "test/path/to/file", "mode", "read_write")))) + ) + ) ); assertEquals(expected, parsedPolicy); } @@ -57,7 +99,12 @@ public void testPolicyBuilderOnExternalPlugin() throws IOException { .parsePolicy(); Policy expected = new Policy( "test-policy.yaml", - List.of(new Scope("entitlement-module-name", List.of(new FileEntitlement("test/path/to/file", List.of("read", "write"))))) + List.of( + new Scope( + "entitlement-module-name", + List.of(FilesEntitlement.build(List.of(Map.of("path", "test/path/to/file", "mode", "read_write")))) + ) + ) ); assertEquals(expected, parsedPolicy); } @@ -166,4 +213,60 @@ public void testMultipleConstructorsAnnotated() throws IOException { ) ); } + + public void testMultipleMethodsAnnotated() throws IOException { + var parser = new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - many_methods + """.getBytes(StandardCharsets.UTF_8)), "test-policy.yaml", true, Map.of("many_methods", ManyMethodsEntitlement.class)); + + var e = expectThrows(IllegalStateException.class, parser::parsePolicy); + assertThat( + e.getMessage(), + equalTo( + "entitlement class " + + "[org.elasticsearch.entitlement.runtime.policy.PolicyParserTests$ManyMethodsEntitlement]" + + " has more than one constructor and/or method annotated with ExternalEntitlement" + ) + ); + } + + public void testConstructorAndMethodAnnotated() throws IOException { + var parser = new PolicyParser( + new ByteArrayInputStream(""" + entitlement-module-name: + - constructor_and_method + """.getBytes(StandardCharsets.UTF_8)), + "test-policy.yaml", + true, + Map.of("constructor_and_method", ConstructorAndMethodEntitlement.class) + ); + + var e = expectThrows(IllegalStateException.class, parser::parsePolicy); + assertThat( + e.getMessage(), + equalTo( + "entitlement class " + + "[org.elasticsearch.entitlement.runtime.policy.PolicyParserTests$ConstructorAndMethodEntitlement]" + + " has more than one constructor and/or method annotated with ExternalEntitlement" + ) + ); + } + + public void testNonStaticMethodAnnotated() throws IOException { + var parser = new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - non_static + """.getBytes(StandardCharsets.UTF_8)), "test-policy.yaml", true, Map.of("non_static", NonStaticMethodEntitlement.class)); + + var e = expectThrows(IllegalStateException.class, parser::parsePolicy); + assertThat( + e.getMessage(), + equalTo( + "entitlement class " + + "[org.elasticsearch.entitlement.runtime.policy.PolicyParserTests$NonStaticMethodEntitlement]" + + " has non-static method annotated with ExternalEntitlement" + ) + ); + } } diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/agent/TestAgent.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/agent/TestAgent.java new file mode 100644 index 0000000000000..2935fdd6faf31 --- /dev/null +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/agent/TestAgent.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy.agent; + +/** + * Dummy class for testing agent entitlements. + */ +public class TestAgent {} diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/agent/inner/TestInnerAgent.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/agent/inner/TestInnerAgent.java new file mode 100644 index 0000000000000..d4a9531bafbb2 --- /dev/null +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/agent/inner/TestInnerAgent.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy.agent.inner; + +/** + * Dummy class for testing agent entitlements. + */ +public class TestInnerAgent {} diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java new file mode 100644 index 0000000000000..5011fe2be462b --- /dev/null +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.PolicyValidationException; +import org.elasticsearch.test.ESTestCase; + +import java.util.List; + +public class FilesEntitlementTests extends ESTestCase { + + public void testEmptyBuild() { + PolicyValidationException pve = expectThrows(PolicyValidationException.class, () -> FilesEntitlement.build(List.of())); + assertEquals(pve.getMessage(), "must specify at least one path"); + pve = expectThrows(PolicyValidationException.class, () -> FilesEntitlement.build(null)); + assertEquals(pve.getMessage(), "must specify at least one path"); + } +} diff --git a/libs/entitlement/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml b/libs/entitlement/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml index f13f574535bec..6b1a5c22993fa 100644 --- a/libs/entitlement/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml +++ b/libs/entitlement/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml @@ -1,6 +1,4 @@ entitlement-module-name: - - file: - path: "test/path/to/file" - actions: - - "read" - - "write" + - files: + - path: "test/path/to/file" + mode: "read_write" diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java index 5c1789f3aa66b..c83c9a6234a77 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java @@ -11,11 +11,15 @@ public class NativeAccessUtil { /** - * Enables native access for the provided module. No-op for JDK 21 or before. + * Enables native access for the provided module. + * We need to have this adapter even if the method is available in JDK 21, as it was in preview. + * Available to JDK 22+, required for JDK 24+ when using --illegal-native-access=deny */ - public static void enableNativeAccess(ModuleLayer.Controller controller, Module module) {} + public static void enableNativeAccess(ModuleLayer.Controller controller, Module module) { + controller.enableNativeAccess(module); + } public static boolean isNativeAccessEnabled(Module module) { - return true; + return module.isNativeAccessEnabled(); } } diff --git a/libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslConfigurationLoader.java b/libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslConfigurationLoader.java index 79bfaec6a6d11..3e1815a74fbfe 100644 --- a/libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslConfigurationLoader.java +++ b/libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslConfigurationLoader.java @@ -13,8 +13,6 @@ import java.nio.file.Path; import java.security.KeyStore; -import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.Set; @@ -25,7 +23,6 @@ import javax.net.ssl.TrustManagerFactory; import static org.elasticsearch.common.ssl.KeyStoreUtil.inferKeyStoreType; -import static org.elasticsearch.common.ssl.SslConfiguration.ORDERED_PROTOCOL_ALGORITHM_MAP; import static org.elasticsearch.common.ssl.SslConfigurationKeys.CERTIFICATE; import static org.elasticsearch.common.ssl.SslConfigurationKeys.CERTIFICATE_AUTHORITIES; import static org.elasticsearch.common.ssl.SslConfigurationKeys.CIPHERS; @@ -63,11 +60,7 @@ */ public abstract class SslConfigurationLoader { - static final List DEFAULT_PROTOCOLS = Collections.unmodifiableList( - ORDERED_PROTOCOL_ALGORITHM_MAP.containsKey("TLSv1.3") - ? Arrays.asList("TLSv1.3", "TLSv1.2", "TLSv1.1") - : Arrays.asList("TLSv1.2", "TLSv1.1") - ); + static final List DEFAULT_PROTOCOLS = List.of("TLSv1.3", "TLSv1.2"); private static final List JDK12_CIPHERS = List.of( // TLSv1.3 cipher has PFS, AEAD, hardware support diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java index 6add1b0ac4a13..abd482d8298ef 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java @@ -35,6 +35,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.tasks.TaskCancelledException; import java.io.IOException; import java.util.Collections; @@ -573,7 +574,15 @@ private void rebucket() { long[] mergeMap = new long[Math.toIntExact(oldOrds.size())]; bucketOrds = new LongKeyedBucketOrds.FromMany(bigArrays()); success = true; - for (long owningBucketOrd = 0; owningBucketOrd <= oldOrds.maxOwningBucketOrd(); owningBucketOrd++) { + long maxOwning = oldOrds.maxOwningBucketOrd(); + for (long owningBucketOrd = 0; owningBucketOrd <= maxOwning; owningBucketOrd++) { + /* + * Check for cancelation during this tight loop as it can take a while and the standard + * cancelation checks don't run during the loop. Becuase it's a tight loop. + */ + if (context.isCancelled()) { + throw new TaskCancelledException("cancelled"); + } LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = oldOrds.ordsEnum(owningBucketOrd); Rounding.Prepared preparedRounding = preparedRoundings[roundingIndexFor(owningBucketOrd)]; while (ordsEnum.next()) { diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/metric/MatrixStatsAggregatorTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/metric/MatrixStatsAggregatorTests.java index 74c1f3c16278f..2eb21cfc09650 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/metric/MatrixStatsAggregatorTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/metric/MatrixStatsAggregatorTests.java @@ -36,7 +36,7 @@ public void testNoData() throws Exception { MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg").fields( Collections.singletonList("field") ); - InternalMatrixStats stats = searchAndReduce(reader, new AggTestConfig(aggBuilder, ft)); + InternalMatrixStats stats = searchAndReduce(reader, new AggTestConfig(aggBuilder, ft).noReductionCancellation()); assertNull(stats.getStats()); assertEquals(0L, stats.getDocCount()); } @@ -54,7 +54,7 @@ public void testUnmapped() throws Exception { MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg").fields( Collections.singletonList("bogus") ); - InternalMatrixStats stats = searchAndReduce(reader, new AggTestConfig(aggBuilder, ft)); + InternalMatrixStats stats = searchAndReduce(reader, new AggTestConfig(aggBuilder, ft).noReductionCancellation()); assertNull(stats.getStats()); assertEquals(0L, stats.getDocCount()); } @@ -88,7 +88,7 @@ public void testTwoFields() throws Exception { MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg").fields( Arrays.asList(fieldA, fieldB) ); - InternalMatrixStats stats = searchAndReduce(reader, new AggTestConfig(aggBuilder, ftA, ftB)); + InternalMatrixStats stats = searchAndReduce(reader, new AggTestConfig(aggBuilder, ftA, ftB).noReductionCancellation()); multiPassStats.assertNearlyEqual(stats); assertTrue(MatrixAggregationInspectionHelper.hasValue(stats)); } diff --git a/modules/analysis-common/build.gradle b/modules/analysis-common/build.gradle index 173e1eeef60a2..0c8821f29dbf1 100644 --- a/modules/analysis-common/build.gradle +++ b/modules/analysis-common/build.gradle @@ -20,7 +20,7 @@ esplugin { restResources { restApi { - include '_common', 'indices', 'index', 'cluster', 'search', 'nodes', 'bulk', 'termvectors', 'explain', 'count' + include '_common', 'indices', 'index', 'cluster', 'search', 'nodes', 'bulk', 'termvectors', 'explain', 'count', 'capabilities' } } diff --git a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadAnalyzerTests.java b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadAnalyzerTests.java index 40a7b64bc91e7..a385db95d882a 100644 --- a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadAnalyzerTests.java +++ b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadAnalyzerTests.java @@ -207,7 +207,7 @@ public void testSynonymsInMultiplexerUpdateable() throws FileNotFoundException, public void testUpdateableSynonymsRejectedAtIndexTime() throws FileNotFoundException, IOException { String synonymsFileName = "synonyms.txt"; setupResourceFile(synonymsFileName, "foo, baz"); - Path configDir = node().getEnvironment().configFile(); + Path configDir = node().getEnvironment().configDir(); if (Files.exists(configDir) == false) { Files.createDirectory(configDir); } @@ -319,7 +319,7 @@ public void testKeywordMarkerUpdateable() throws IOException { } private Path setupResourceFile(String fileName, String... content) throws IOException { - Path configDir = node().getEnvironment().configFile(); + Path configDir = node().getEnvironment().configDir(); if (Files.exists(configDir) == false) { Files.createDirectory(configDir); } diff --git a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java index 8209d9f543a31..06f19c0d60dba 100644 --- a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java +++ b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java @@ -57,7 +57,7 @@ public void testSynonymsWithPreview() throws FileNotFoundException, IOException, } private void testSynonymsUpdate(boolean preview) throws FileNotFoundException, IOException, InterruptedException { - Path config = internalCluster().getInstance(Environment.class).configFile(); + Path config = internalCluster().getInstance(Environment.class).configDir(); String synonymsFileName = "synonyms.txt"; Path synonymsFile = config.resolve(synonymsFileName); writeFile(synonymsFile, "foo, baz"); @@ -106,7 +106,7 @@ public void testSynonymsUpdateInvalid() throws IOException { final String synonymsFileName = "synonyms.txt"; final String fieldName = "field"; - Path config = internalCluster().getInstance(Environment.class).configFile(); + Path config = internalCluster().getInstance(Environment.class).configDir(); Path synonymsFile = config.resolve(synonymsFileName); writeFile(synonymsFile, "foo, baz"); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java index e091f0175009e..92e2b3085cc29 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java @@ -40,7 +40,7 @@ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundW throw new IllegalArgumentException("hyphenation_patterns_path is a required setting."); } - Path hyphenationPatternsFile = env.configFile().resolve(hyphenationPatternsPath); + Path hyphenationPatternsFile = env.configDir().resolve(hyphenationPatternsPath); try { InputStream in = Files.newInputStream(hyphenationPatternsFile); diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/15_analyze.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/15_analyze.yml index 971f530cebeb5..24e04174cd1e4 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/15_analyze.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/15_analyze.yml @@ -59,3 +59,28 @@ - match: { detail.tokenizer.tokens.0.token: ABc } - match: { detail.tokenfilters.0.name: lowercase } - match: { detail.tokenfilters.0.tokens.0.token: abc } + +--- +"Custom analyzer is not buildable": + - requires: + test_runner_features: [ capabilities ] + reason: This capability required to run test + capabilities: + - method: GET + path: /_analyze + capabilities: [ wrong_custom_analyzer_returns_400 ] + + - do: + catch: bad_request + indices.analyze: + body: + text: the foxes jumping quickly + tokenizer: + standard + filter: + type: hunspell + locale: en_US + + - match: { status: 400 } + - match: { error.type: illegal_argument_exception } + - match: { error.reason: "Can not build a custom analyzer" } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java index 8026ec641d040..506c107b382a1 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java @@ -21,6 +21,10 @@ public class DataStreamFeatures implements FeatureSpecification { public static final NodeFeature DATA_STREAM_FAILURE_STORE_TSDB_FIX = new NodeFeature("data_stream.failure_store.tsdb_fix"); + public static final NodeFeature DOWNSAMPLE_AGGREGATE_DEFAULT_METRIC_FIX = new NodeFeature( + "data_stream.downsample.default_aggregate_metric_fix" + ); + @Override public Set getFeatures() { return Set.of(); @@ -28,6 +32,6 @@ public Set getFeatures() { @Override public Set getTestFeatures() { - return Set.of(DATA_STREAM_FAILURE_STORE_TSDB_FIX); + return Set.of(DATA_STREAM_FAILURE_STORE_TSDB_FIX, DOWNSAMPLE_AGGREGATE_DEFAULT_METRIC_FIX); } } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java index 3d08be1f24a42..c150f64e8cc4a 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java @@ -248,7 +248,7 @@ public void setup() throws Exception { MetadataCreateIndexService createIndexService; { Environment env = mock(Environment.class); - when(env.sharedDataFile()).thenReturn(null); + when(env.sharedDataDir()).thenReturn(null); AllocationService allocationService = mock(AllocationService.class); when(allocationService.reroute(any(ClusterState.class), any(String.class), any())).then(i -> i.getArguments()[0]); when(allocationService.getShardRoutingRoleStrategy()).thenReturn(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY); diff --git a/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java b/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java index 22f2a9fa394fb..68c6a5c826b34 100644 --- a/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java +++ b/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java @@ -49,7 +49,8 @@ private static ElasticsearchCluster createCluster() { .feature(FAILURE_STORE_ENABLED) .setting("xpack.security.enabled", "true") .keystore("bootstrap.password", "x-pack-test-password") - .user("x_pack_rest_user", "x-pack-test-password"); + .user("x_pack_rest_user", "x-pack-test-password") + .systemProperty("es.queryable_built_in_roles_enabled", "false"); if (initTestSeed().nextBoolean()) { clusterBuilder.setting("xpack.license.self_generated.type", "trial"); } diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml index 2f6b7a0bff34b..0b4fe28f3961d 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml @@ -786,7 +786,7 @@ teardown: - is_false: items.1.create.failure_store --- -"Test failure store status with bulk request": +"Test failure store status with bulk request failing on mappings": - do: allowed_warnings: - "index template [generic_logs_template] has index patterns [logs-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [generic_logs_template] will take precedence during new index creation" @@ -865,3 +865,90 @@ teardown: - match: { items.3.create.status: 400 } - match: { items.3.create.error.type: document_parsing_exception } - match: { items.3.create.failure_store: not_enabled } + +--- +"Test failure store status with bulk request failing in ingest": + - do: + ingest.put_pipeline: + id: "failing_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "fail": { + "message" : "error_message", + "tag": "foo-tag" + } + } + ] + } + - match: { acknowledged: true } + + - do: + allowed_warnings: + - "index template [generic_logs_template] has index patterns [logs-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [generic_logs_template] will take precedence during new index creation" + indices.put_index_template: + name: generic_logs_template + body: + index_patterns: logs-* + data_stream: {} + template: + settings: + number_of_shards: 1 + number_of_replicas: 1 + index: + default_pipeline: "failing_pipeline" + mappings: + properties: + '@timestamp': + type: date + count: + type: long + data_stream_options: + failure_store: + enabled: true + - do: + allowed_warnings: + - "index template [no-fs] has index patterns [no-fs*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [no-fs] will take precedence during new index creation" + indices.put_index_template: + name: no-fs + body: + index_patterns: no-fs* + data_stream: {} + template: + settings: + number_of_shards: 1 + number_of_replicas: 0 + index: + default_pipeline: "failing_pipeline" + mappings: + properties: + '@timestamp': + type: date + count: + type: long + data_stream_options: + failure_store: + enabled: false + + - do: + bulk: + refresh: true + body: + - '{ "create": { "_index": "logs-foobar", "_id": "1" } }' + - '{ "@timestamp": "2022-01-01", "count": 1 }' + - '{ "create": { "_index": "no-fs", "_id": "1" } }' + - '{ "@timestamp": "2022-01-01", "count": 1 }' + - is_true: errors + # Successfully indexed to backing index + - match: { items.0.create._index: '/\.fs-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { items.0.create.status: 201 } + - match: { items.0.create.failure_store: used } + + # Rejected, eligible to go to failure store, but failure store not enabled + - match: { items.1.create._index: 'no-fs' } + - match: { items.1.create.status: 500 } + - match: { items.1.create.failure_store: not_enabled } + - match: { items.1.create.error.type: fail_processor_exception } + - contains: { items.1.create.error.reason: error_message } diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index 988ca317b4730..bdda4872fb4dc 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -28,12 +28,12 @@ tasks.named('internalClusterTestTestingConventions').configure { } dependencies { - implementation('com.maxmind.geoip2:geoip2:4.2.0') + implementation('com.maxmind.geoip2:geoip2:4.2.1') // geoip2 dependencies: runtimeOnly("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") runtimeOnly("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}") runtimeOnly("com.fasterxml.jackson.core:jackson-core:${versions.jackson}") - implementation('com.maxmind.db:maxmind-db:3.1.0') + implementation('com.maxmind.db:maxmind-db:3.1.1') testImplementation 'org.elasticsearch:geolite2-databases:20191119' internalClusterTestImplementation project(':modules:reindex') diff --git a/modules/ingest-geoip/qa/file-based-update/src/javaRestTest/resources/GeoLite2-City-Test.mmdb b/modules/ingest-geoip/qa/file-based-update/src/javaRestTest/resources/GeoLite2-City-Test.mmdb index 0809201619b59..40c7eb0c47f92 100644 Binary files a/modules/ingest-geoip/qa/file-based-update/src/javaRestTest/resources/GeoLite2-City-Test.mmdb and b/modules/ingest-geoip/qa/file-based-update/src/javaRestTest/resources/GeoLite2-City-Test.mmdb differ diff --git a/modules/ingest-geoip/qa/full-cluster-restart/build.gradle b/modules/ingest-geoip/qa/full-cluster-restart/build.gradle index e53e0e080cce6..71f95a990c6c8 100644 --- a/modules/ingest-geoip/qa/full-cluster-restart/build.gradle +++ b/modules/ingest-geoip/qa/full-cluster-restart/build.gradle @@ -7,8 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java b/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java index 1dfcb524f46a0..9525eed7f2ebc 100644 --- a/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java +++ b/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java @@ -14,9 +14,16 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.WarningsHandler; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Nullable; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.upgrades.ParameterizedFullClusterRestartTestCase; @@ -25,28 +32,46 @@ import org.junit.rules.TestRule; import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Base64; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.TimeUnit; -import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.is; public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCase { private static final boolean useFixture = Boolean.getBoolean("geoip_use_service") == false; - private static GeoIpHttpFixture fixture = new GeoIpHttpFixture(useFixture); + private static final GeoIpHttpFixture fixture = new GeoIpHttpFixture(useFixture); - private static ElasticsearchCluster cluster = ElasticsearchCluster.local() + // e.g. use ./gradlew -Dtests.jvm.argline="-Dgeoip_test_with_security=false" ":modules:ingest-geoip:qa:full-cluster-restart:check" + // to set this to false, if you so desire + private static final boolean useSecurity = Boolean.parseBoolean(System.getProperty("geoip_test_with_security", "true")); + + private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(Version.fromString(OLD_CLUSTER_VERSION)) .nodes(2) .setting("ingest.geoip.downloader.endpoint", () -> fixture.getAddress(), s -> useFixture) - .setting("xpack.security.enabled", "false") + .setting("xpack.security.enabled", useSecurity ? "true" : "false") .feature(FeatureFlag.TIME_SERIES_MODE) .build(); + @Override + protected Settings restClientSettings() { + Settings settings = super.restClientSettings(); + if (useSecurity) { + String token = "Basic " + Base64.getEncoder().encodeToString("test_user:x-pack-test-password".getBytes(StandardCharsets.UTF_8)); + settings = Settings.builder().put(settings).put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + return settings; + } + @ClassRule public static TestRule ruleChain = RuleChain.outerRule(fixture).around(cluster); @@ -60,6 +85,9 @@ protected ElasticsearchCluster getUpgradeCluster() { } public void testGeoIpSystemFeaturesMigration() throws Exception { + final List maybeSecurityIndex = useSecurity ? List.of(".security-7") : List.of(); + final List maybeSecurityIndexReindexed = useSecurity ? List.of(".security-7-reindexed-for-10") : List.of(); + if (isRunningAgainstOldCluster()) { Request enableDownloader = new Request("PUT", "/_cluster/settings"); enableDownloader.setJsonEntity(""" @@ -86,15 +114,35 @@ public void testGeoIpSystemFeaturesMigration() throws Exception { assertBusy(() -> testDatabasesLoaded(), 30, TimeUnit.SECONDS); // the geoip index should be created - assertBusy(() -> testCatIndices(".geoip_databases")); + assertBusy(() -> testCatIndices(List.of(".geoip_databases"), maybeSecurityIndex)); assertBusy(() -> testIndexGeoDoc()); + + // before the upgrade, Kibana should work + assertBusy(() -> testGetStarAsKibana(List.of("my-index-00001"), maybeSecurityIndex)); + + // as should a normal get * + assertBusy(() -> testGetStar(List.of("my-index-00001"), maybeSecurityIndex)); } else { + // after the upgrade, but before the migration, Kibana should work + assertBusy(() -> testGetStarAsKibana(List.of("my-index-00001"), maybeSecurityIndex)); + + // as should a normal get * + assertBusy(() -> testGetStar(List.of("my-index-00001"), maybeSecurityIndex)); + + // migrate the system features and give the cluster a moment to settle Request migrateSystemFeatures = new Request("POST", "/_migration/system_features"); assertOK(client().performRequest(migrateSystemFeatures)); + ensureHealth(request -> request.addParameter("wait_for_status", "yellow")); - assertBusy(() -> testCatIndices(".geoip_databases-reindexed-for-10", "my-index-00001")); + assertBusy(() -> testCatIndices(List.of(".geoip_databases-reindexed-for-10", "my-index-00001"), maybeSecurityIndexReindexed)); assertBusy(() -> testIndexGeoDoc()); + // after the migration, Kibana should work + assertBusy(() -> testGetStarAsKibana(List.of("my-index-00001"), maybeSecurityIndexReindexed)); + + // as should a normal get * + assertBusy(() -> testGetStar(List.of("my-index-00001"), maybeSecurityIndexReindexed)); + Request disableDownloader = new Request("PUT", "/_cluster/settings"); disableDownloader.setJsonEntity(""" {"persistent": {"ingest.geoip.downloader.enabled": false}} @@ -102,7 +150,7 @@ public void testGeoIpSystemFeaturesMigration() throws Exception { assertOK(client().performRequest(disableDownloader)); // the geoip index should be deleted - assertBusy(() -> testCatIndices("my-index-00001")); + assertBusy(() -> testCatIndices(List.of("my-index-00001"), maybeSecurityIndexReindexed)); Request enableDownloader = new Request("PUT", "/_cluster/settings"); enableDownloader.setJsonEntity(""" @@ -114,7 +162,7 @@ public void testGeoIpSystemFeaturesMigration() throws Exception { assertBusy(() -> testDatabasesLoaded(), 30, TimeUnit.SECONDS); // the geoip index should be recreated - assertBusy(() -> testCatIndices(".geoip_databases", "my-index-00001")); + assertBusy(() -> testCatIndices(List.of(".geoip_databases", "my-index-00001"), maybeSecurityIndexReindexed)); assertBusy(() -> testIndexGeoDoc()); } } @@ -146,11 +194,17 @@ private void testDatabasesLoaded() throws IOException { } } - private void testCatIndices(String... indexNames) throws IOException { + private void testCatIndices(List indexNames, @Nullable List additionalIndexNames) throws IOException { Request catIndices = new Request("GET", "_cat/indices/*?s=index&h=index&expand_wildcards=all"); String response = EntityUtils.toString(client().performRequest(catIndices).getEntity()); List indices = List.of(response.trim().split("\\s+")); - assertThat(indices, contains(indexNames)); + + if (additionalIndexNames != null && additionalIndexNames.isEmpty() == false) { + indexNames = new ArrayList<>(indexNames); // recopy into a mutable list + indexNames.addAll(additionalIndexNames); + } + + assertThat(new HashSet<>(indices), is(new HashSet<>(indexNames))); } private void testIndexGeoDoc() throws IOException { @@ -165,4 +219,40 @@ private void testIndexGeoDoc() throws IOException { assertNull(doc.evaluate("_source.tags")); assertEquals("Sweden", doc.evaluate("_source.geo.country_name")); } + + private void testGetStar(List indexNames, @Nullable List additionalIndexNames) throws IOException { + Request getStar = new Request("GET", "*?expand_wildcards=all"); + getStar.setOptions( + RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE) // we don't care about warnings, just errors + ); + Response response = client().performRequest(getStar); + assertOK(response); + + if (additionalIndexNames != null && additionalIndexNames.isEmpty() == false) { + indexNames = new ArrayList<>(indexNames); // recopy into a mutable list + indexNames.addAll(additionalIndexNames); + } + + Map map = responseAsMap(response); + assertThat(map.keySet(), is(new HashSet<>(indexNames))); + } + + private void testGetStarAsKibana(List indexNames, @Nullable List additionalIndexNames) throws IOException { + Request getStar = new Request("GET", "*?expand_wildcards=all"); + getStar.setOptions( + RequestOptions.DEFAULT.toBuilder() + .addHeader("X-elastic-product-origin", "kibana") + .setWarningsHandler(WarningsHandler.PERMISSIVE) // we don't care about warnings, just errors + ); + Response response = client().performRequest(getStar); + assertOK(response); + + if (additionalIndexNames != null && additionalIndexNames.isEmpty() == false) { + indexNames = new ArrayList<>(indexNames); // recopy into a mutable list + indexNames.addAll(additionalIndexNames); + } + + Map map = responseAsMap(response); + assertThat(map.keySet(), is(new HashSet<>(indexNames))); + } } diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index dd177fed5732a..daba9c4e5e156 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -70,6 +70,7 @@ import static org.elasticsearch.ingest.geoip.GeoIpTestUtils.copyDefaultDatabases; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -172,10 +173,15 @@ public void testInvalidTimestamp() throws Exception { for (Path geoIpTmpDir : geoIpTmpDirs) { try (Stream files = Files.list(geoIpTmpDir)) { Set names = files.map(f -> f.getFileName().toString()).collect(Collectors.toSet()); - assertThat(names, not(hasItem("GeoLite2-ASN.mmdb"))); - assertThat(names, not(hasItem("GeoLite2-City.mmdb"))); - assertThat(names, not(hasItem("GeoLite2-Country.mmdb"))); - assertThat(names, not(hasItem("MyCustomGeoLite2-City.mmdb"))); + assertThat( + names, + allOf( + not(hasItem("GeoLite2-ASN.mmdb")), + not(hasItem("GeoLite2-City.mmdb")), + not(hasItem("GeoLite2-Country.mmdb")), + not(hasItem("MyCustomGeoLite2-City.mmdb")) + ) + ); } } }); @@ -385,18 +391,18 @@ public void testUseGeoIpProcessorWithDownloadedDBs() throws Exception { assertThat( files, containsInAnyOrder( - "GeoLite2-City.mmdb", - "GeoLite2-Country.mmdb", "GeoLite2-ASN.mmdb", - "MyCustomGeoLite2-City.mmdb", - "GeoLite2-City.mmdb_COPYRIGHT.txt", - "GeoLite2-Country.mmdb_COPYRIGHT.txt", "GeoLite2-ASN.mmdb_COPYRIGHT.txt", - "MyCustomGeoLite2-City.mmdb_COPYRIGHT.txt", + "GeoLite2-ASN.mmdb_LICENSE.txt", + "GeoLite2-City.mmdb", + "GeoLite2-City.mmdb_COPYRIGHT.txt", "GeoLite2-City.mmdb_LICENSE.txt", + "GeoLite2-City.mmdb_README.txt", + "GeoLite2-Country.mmdb", + "GeoLite2-Country.mmdb_COPYRIGHT.txt", "GeoLite2-Country.mmdb_LICENSE.txt", - "GeoLite2-ASN.mmdb_LICENSE.txt", - "GeoLite2-ASN.mmdb_README.txt", + "MyCustomGeoLite2-City.mmdb", + "MyCustomGeoLite2-City.mmdb_COPYRIGHT.txt", "MyCustomGeoLite2-City.mmdb_LICENSE.txt" ) ); @@ -658,7 +664,7 @@ private List getGeoIpTmpDirs() throws IOException { .map(DiscoveryNode::getId) .collect(Collectors.toSet()); // All nodes share the same geoip base dir in the shared tmp dir: - Path geoipBaseTmpDir = internalCluster().getDataNodeInstance(Environment.class).tmpFile().resolve("geoip-databases"); + Path geoipBaseTmpDir = internalCluster().getDataNodeInstance(Environment.class).tmpDir().resolve("geoip-databases"); assertThat(Files.exists(geoipBaseTmpDir), is(true)); final List geoipTmpDirs; try (Stream files = Files.list(geoipBaseTmpDir)) { @@ -670,7 +676,7 @@ private List getGeoIpTmpDirs() throws IOException { private void setupDatabasesInConfigDirectory() throws Exception { StreamSupport.stream(internalCluster().getInstances(Environment.class).spliterator(), false) - .map(Environment::configFile) + .map(Environment::configDir) .map(path -> path.resolve("ingest-geoip")) .distinct() .forEach(path -> { @@ -698,7 +704,7 @@ private void setupDatabasesInConfigDirectory() throws Exception { private void deleteDatabasesInConfigDirectory() throws Exception { StreamSupport.stream(internalCluster().getInstances(Environment.class).spliterator(), false) - .map(Environment::configFile) + .map(Environment::configDir) .map(path -> path.resolve("ingest-geoip")) .distinct() .forEach(path -> { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java index 3d2b54b04695f..289008236a852 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java @@ -42,7 +42,7 @@ final class ConfigDatabases implements Closeable { private final ConcurrentMap configDatabases; ConfigDatabases(Environment environment, GeoIpCache cache) { - this(environment.configFile().resolve("ingest-geoip"), cache); + this(environment.configDir().resolve("ingest-geoip"), cache); } ConfigDatabases(Path geoipConfigDir, GeoIpCache cache) { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java index 940231b12c894..13958254b9020 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java @@ -114,7 +114,7 @@ public final class DatabaseNodeService implements IpDatabaseProvider { ClusterService clusterService ) { this( - environment.tmpFile(), + environment.tmpDir(), new OriginSettingClient(client, IngestService.INGEST_ORIGIN), cache, new ConfigDatabases(environment, cache), diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java index 120afe0e9e815..fb4fadf043b05 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java @@ -53,6 +53,10 @@ public class DatabaseReaderLazyLoader implements IpDatabase { private volatile boolean deleteDatabaseFileOnShutdown; private final AtomicInteger currentUsages = new AtomicInteger(0); + // it seems insane, especially if you read the code for UnixPath, but calling toString on a path in advance here is faster enough + // than calling it on every call to cache.putIfAbsent that it makes the slight additional internal complication worth it + private final String cachedDatabasePathToString; + DatabaseReaderLazyLoader(GeoIpCache cache, Path databasePath, String md5) { this.cache = cache; this.databasePath = Objects.requireNonNull(databasePath); @@ -61,6 +65,9 @@ public class DatabaseReaderLazyLoader implements IpDatabase { this.databaseReader = new SetOnce<>(); this.databaseType = new SetOnce<>(); this.buildDate = new SetOnce<>(); + + // cache the toString on construction + this.cachedDatabasePathToString = databasePath.toString(); } /** @@ -99,7 +106,7 @@ int current() { @Override @Nullable public RESPONSE getResponse(String ipAddress, CheckedBiFunction responseProvider) { - return cache.putIfAbsent(ipAddress, databasePath.toString(), ip -> { + return cache.putIfAbsent(ipAddress, cachedDatabasePathToString, ip -> { try { return responseProvider.apply(get(), ipAddress); } catch (Exception e) { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java index 96525d427d3e8..91b040e8699bb 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java @@ -206,12 +206,32 @@ public static Metadata fromXContent(XContentParser parser) { } public boolean isCloseToExpiration() { - return Instant.ofEpochMilli(lastCheck).isBefore(Instant.now().minus(25, ChronoUnit.DAYS)); + final Instant now = Instant.ofEpochMilli(System.currentTimeMillis()); // millisecond precision is sufficient (and faster) + return Instant.ofEpochMilli(lastCheck).isBefore(now.minus(25, ChronoUnit.DAYS)); } + // these constants support the micro optimization below, see that note + private static final TimeValue THIRTY_DAYS = TimeValue.timeValueDays(30); + private static final long THIRTY_DAYS_MILLIS = THIRTY_DAYS.millis(); + public boolean isNewEnough(Settings settings) { - TimeValue valid = settings.getAsTime("ingest.geoip.database_validity", TimeValue.timeValueDays(30)); - return Instant.ofEpochMilli(lastCheck).isAfter(Instant.now().minus(valid.getMillis(), ChronoUnit.MILLIS)); + // micro optimization: this looks a little silly, but the expected case is that database_validity is only used in tests. + // we run this code on every document, though, so the argument checking and other bits that getAsTime does is enough + // to show up in a flame graph. + + // if you grep for "ingest.geoip.database_validity" and you'll see that it's not a 'real' setting -- it's only defined in + // AbstractGeoIpIT, that's why it's an inline string constant here and no some static final, and also why it cannot + // be the case that this setting exists in a real running cluster + + final long valid; + if (settings.hasValue("ingest.geoip.database_validity")) { + valid = settings.getAsTime("ingest.geoip.database_validity", THIRTY_DAYS).millis(); + } else { + valid = THIRTY_DAYS_MILLIS; + } + + final Instant now = Instant.ofEpochMilli(System.currentTimeMillis()); // millisecond precision is sufficient (and faster) + return Instant.ofEpochMilli(lastCheck).isAfter(now.minus(valid, ChronoUnit.MILLIS)); } @Override diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java index 11aa123824d18..5d45ff5d55855 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java @@ -104,16 +104,16 @@ public void testParseLocationDouble() { public void testAsnFree() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); String databaseName = "ip_asn_sample.mmdb"; - String ip = "23.32.184.0"; + String ip = "23.200.217.1"; assertExpectedLookupResults( databaseName, ip, new IpinfoIpDataLookups.Asn(Database.AsnV2.properties()), Map.ofEntries( entry("ip", ip), - entry("organization_name", "Akamai Technologies, Inc."), - entry("asn", 16625L), - entry("network", "23.32.184.0/21"), + entry("organization_name", "Akamai Technologies Tokyo ASN"), + entry("asn", 24319L), + entry("network", "23.200.217.0/24"), entry("domain", "akamai.com") ), Map.ofEntries(entry("name", "organization_name"), entry("asn", "asn"), entry("network", "network"), entry("domain", "domain")), @@ -125,17 +125,17 @@ public void testAsnFree() { public void testAsnStandard() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); String databaseName = "asn_sample.mmdb"; - String ip = "69.19.224.0"; + String ip = "207.244.150.1"; assertExpectedLookupResults( databaseName, ip, new IpinfoIpDataLookups.Asn(Database.AsnV2.properties()), Map.ofEntries( entry("ip", ip), - entry("organization_name", "TPx Communications"), - entry("asn", 14265L), - entry("network", "69.19.224.0/22"), - entry("domain", "tpx.com"), + entry("organization_name", "Wowrack.com"), + entry("asn", 23033L), + entry("network", "207.244.150.0/23"), + entry("domain", "wowrack.com"), entry("type", "hosting"), entry("country_iso_code", "US") ), @@ -192,15 +192,15 @@ public void testAsnInvariants() { public void testCountryFree() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); String databaseName = "ip_country_sample.mmdb"; - String ip = "20.33.76.0"; + String ip = "149.7.32.1"; assertExpectedLookupResults( databaseName, ip, new IpinfoIpDataLookups.Country(Database.CountryV2.properties()), Map.ofEntries( entry("ip", ip), - entry("country_name", "Ireland"), - entry("country_iso_code", "IE"), + entry("country_name", "United Kingdom"), + entry("country_iso_code", "GB"), entry("continent_name", "Europe"), entry("continent_code", "EU") ), @@ -289,14 +289,14 @@ public void testGeolocationInvariants() { public void testPrivacyDetectionStandard() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); String databaseName = "privacy_detection_sample.mmdb"; - String ip = "2.57.109.154"; + String ip = "20.102.24.249"; assertExpectedLookupResults( databaseName, ip, new IpinfoIpDataLookups.PrivacyDetection(Database.PrivacyDetection.properties()), Map.ofEntries( entry("ip", ip), - entry("hosting", false), + entry("hosting", true), entry("proxy", false), entry("relay", false), entry("tor", false), @@ -317,7 +317,7 @@ public void testPrivacyDetectionStandard() { public void testPrivacyDetectionStandardNonEmptyService() { assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); String databaseName = "privacy_detection_sample.mmdb"; - String ip = "59.29.201.246"; + String ip = "14.52.64.231"; assertExpectedLookupResults( databaseName, ip, diff --git a/modules/ingest-geoip/src/test/resources/GeoIP2-Anonymous-IP-Test.mmdb b/modules/ingest-geoip/src/test/resources/GeoIP2-Anonymous-IP-Test.mmdb index 1b142d0001b9c..460e58ee56592 100644 Binary files a/modules/ingest-geoip/src/test/resources/GeoIP2-Anonymous-IP-Test.mmdb and b/modules/ingest-geoip/src/test/resources/GeoIP2-Anonymous-IP-Test.mmdb differ diff --git a/modules/ingest-geoip/src/test/resources/GeoIP2-City-Test.mmdb b/modules/ingest-geoip/src/test/resources/GeoIP2-City-Test.mmdb index 04220ff4b6411..67dcd4a8e5a17 100644 Binary files a/modules/ingest-geoip/src/test/resources/GeoIP2-City-Test.mmdb and b/modules/ingest-geoip/src/test/resources/GeoIP2-City-Test.mmdb differ diff --git a/modules/ingest-geoip/src/test/resources/GeoIP2-Connection-Type-Test.mmdb b/modules/ingest-geoip/src/test/resources/GeoIP2-Connection-Type-Test.mmdb index c49ca3ad48f39..4d0a477a623a4 100644 Binary files a/modules/ingest-geoip/src/test/resources/GeoIP2-Connection-Type-Test.mmdb and b/modules/ingest-geoip/src/test/resources/GeoIP2-Connection-Type-Test.mmdb differ diff --git a/modules/ingest-geoip/src/test/resources/GeoIP2-Country-Test.mmdb b/modules/ingest-geoip/src/test/resources/GeoIP2-Country-Test.mmdb index 0b1f6cf50b2a0..8b1436b0c387f 100644 Binary files a/modules/ingest-geoip/src/test/resources/GeoIP2-Country-Test.mmdb and b/modules/ingest-geoip/src/test/resources/GeoIP2-Country-Test.mmdb differ diff --git a/modules/ingest-geoip/src/test/resources/GeoIP2-Domain-Test.mmdb b/modules/ingest-geoip/src/test/resources/GeoIP2-Domain-Test.mmdb index 596a96617f241..0aaccefa7fec4 100644 Binary files a/modules/ingest-geoip/src/test/resources/GeoIP2-Domain-Test.mmdb and b/modules/ingest-geoip/src/test/resources/GeoIP2-Domain-Test.mmdb differ diff --git a/modules/ingest-geoip/src/test/resources/GeoIP2-Enterprise-Test.mmdb b/modules/ingest-geoip/src/test/resources/GeoIP2-Enterprise-Test.mmdb index 16c1acf800260..e0a99f6ba94e1 100644 Binary files a/modules/ingest-geoip/src/test/resources/GeoIP2-Enterprise-Test.mmdb and b/modules/ingest-geoip/src/test/resources/GeoIP2-Enterprise-Test.mmdb differ diff --git a/modules/ingest-geoip/src/test/resources/GeoIP2-ISP-Test.mmdb b/modules/ingest-geoip/src/test/resources/GeoIP2-ISP-Test.mmdb index a4277d0a55c47..f9eaa14cee3aa 100644 Binary files a/modules/ingest-geoip/src/test/resources/GeoIP2-ISP-Test.mmdb and b/modules/ingest-geoip/src/test/resources/GeoIP2-ISP-Test.mmdb differ diff --git a/modules/ingest-geoip/src/test/resources/GeoLite2-ASN-Test.mmdb b/modules/ingest-geoip/src/test/resources/GeoLite2-ASN-Test.mmdb index 2614d1a7aa235..af37b8ae278fd 100644 Binary files a/modules/ingest-geoip/src/test/resources/GeoLite2-ASN-Test.mmdb and b/modules/ingest-geoip/src/test/resources/GeoLite2-ASN-Test.mmdb differ diff --git a/modules/ingest-geoip/src/test/resources/GeoLite2-City-Test.mmdb b/modules/ingest-geoip/src/test/resources/GeoLite2-City-Test.mmdb index 393efe464b610..40c7eb0c47f92 100644 Binary files a/modules/ingest-geoip/src/test/resources/GeoLite2-City-Test.mmdb and b/modules/ingest-geoip/src/test/resources/GeoLite2-City-Test.mmdb differ diff --git a/modules/ingest-geoip/src/test/resources/GeoLite2-Country-Test.mmdb b/modules/ingest-geoip/src/test/resources/GeoLite2-Country-Test.mmdb index 44b5ff1a3f1be..ef614ec16c3fd 100644 Binary files a/modules/ingest-geoip/src/test/resources/GeoLite2-Country-Test.mmdb and b/modules/ingest-geoip/src/test/resources/GeoLite2-Country-Test.mmdb differ diff --git a/modules/ingest-geoip/src/test/resources/ipinfo/asn_sample.mmdb b/modules/ingest-geoip/src/test/resources/ipinfo/asn_sample.mmdb index 289318a124d75..6f93d59731f1b 100644 Binary files a/modules/ingest-geoip/src/test/resources/ipinfo/asn_sample.mmdb and b/modules/ingest-geoip/src/test/resources/ipinfo/asn_sample.mmdb differ diff --git a/modules/ingest-geoip/src/test/resources/ipinfo/ip_asn_sample.mmdb b/modules/ingest-geoip/src/test/resources/ipinfo/ip_asn_sample.mmdb index d2bac8452a0f2..fb52368827f6c 100644 Binary files a/modules/ingest-geoip/src/test/resources/ipinfo/ip_asn_sample.mmdb and b/modules/ingest-geoip/src/test/resources/ipinfo/ip_asn_sample.mmdb differ diff --git a/modules/ingest-geoip/src/test/resources/ipinfo/ip_country_sample.mmdb b/modules/ingest-geoip/src/test/resources/ipinfo/ip_country_sample.mmdb index caa218f02770b..507ac2b5eac2a 100644 Binary files a/modules/ingest-geoip/src/test/resources/ipinfo/ip_country_sample.mmdb and b/modules/ingest-geoip/src/test/resources/ipinfo/ip_country_sample.mmdb differ diff --git a/modules/ingest-geoip/src/test/resources/ipinfo/privacy_detection_sample.mmdb b/modules/ingest-geoip/src/test/resources/ipinfo/privacy_detection_sample.mmdb index 4f2fca5559e14..9ab67cc88ba24 100644 Binary files a/modules/ingest-geoip/src/test/resources/ipinfo/privacy_detection_sample.mmdb and b/modules/ingest-geoip/src/test/resources/ipinfo/privacy_detection_sample.mmdb differ diff --git a/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java b/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java index 4d71417ec982c..53f8e5c4ea7d1 100644 --- a/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java +++ b/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java @@ -41,7 +41,7 @@ public class IngestUserAgentPlugin extends Plugin implements IngestPlugin { @Override public Map getProcessors(Processor.Parameters parameters) { - Path userAgentConfigDirectory = parameters.env.configFile().resolve("ingest-user-agent"); + Path userAgentConfigDirectory = parameters.env.configDir().resolve("ingest-user-agent"); if (Files.exists(userAgentConfigDirectory) == false && Files.isDirectory(userAgentConfigDirectory)) { throw new IllegalStateException( diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java index bc1cd30ad45bf..c327ba49e6d1c 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java @@ -423,7 +423,7 @@ public void testResultSizeLimit() throws IOException { ex.getCause().getCause(), allOf( instanceOf(SizeLimitingStringWriter.SizeLimitExceededException.class), - transformedMatch(Throwable::getMessage, endsWith("has exceeded the size limit [1024]")) + transformedMatch(Throwable::getMessage, endsWith("has size [1030] which exceeds the size limit [1024]")) ) ); } diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java index a992f68d93d9e..153ca8c832ceb 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java @@ -12,6 +12,7 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.FieldInvertState; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermStates; import org.apache.lucene.index.memory.MemoryIndex; @@ -23,7 +24,6 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.LeafSimScorer; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Matches; @@ -214,7 +214,6 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo // No need to ever look at the _source for non-scoring term queries return in.createWeight(searcher, scoreMode, boost); } - // We use a LinkedHashSet here to preserve the ordering of terms to ensure that // later summing of float scores per term is consistent final Set terms = new LinkedHashSet<>(); @@ -267,6 +266,7 @@ public boolean isCacheable(LeafReaderContext ctx) { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { + NumericDocValues norms = context.reader().getNormValues(field); RuntimePhraseScorer scorer = (RuntimePhraseScorer) scorerSupplier(context).get(0); if (scorer == null) { return Explanation.noMatch("No matching phrase"); @@ -277,8 +277,7 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } float phraseFreq = scorer.freq(); Explanation freqExplanation = Explanation.match(phraseFreq, "phraseFreq=" + phraseFreq); - final LeafSimScorer leafSimScorer = new LeafSimScorer(simScorer, context.reader(), field, scoreMode.needsScores()); - Explanation scoreExplanation = leafSimScorer.explain(doc, freqExplanation); + Explanation scoreExplanation = simScorer.explain(freqExplanation, getNormValue(norms, doc)); return Explanation.match( scoreExplanation.getValue(), "weight(" + getQuery() + " in " + doc + ") [" + searcher.getSimilarity().getClass().getSimpleName() + "], result of:", @@ -297,9 +296,9 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti public Scorer get(long leadCost) throws IOException { final Scorer approximationScorer = approximationSupplier.get(leadCost); final DocIdSetIterator approximation = approximationScorer.iterator(); - final LeafSimScorer leafSimScorer = new LeafSimScorer(simScorer, context.reader(), field, scoreMode.needsScores()); final CheckedIntFunction, IOException> valueFetcher = valueFetcherProvider.apply(context); - return new RuntimePhraseScorer(approximation, leafSimScorer, valueFetcher, field, in); + NumericDocValues norms = context.reader().getNormValues(field); + return new RuntimePhraseScorer(approximation, simScorer, norms, valueFetcher, field, in); } @Override @@ -335,12 +334,23 @@ public Matches matches(LeafReaderContext context, int doc) throws IOException { }; } + private static long getNormValue(NumericDocValues norms, int doc) throws IOException { + if (norms != null) { + boolean found = norms.advanceExact(doc); + assert found; + return norms.longValue(); + } else { + return 1L; // default norm + } + } + private class RuntimePhraseScorer extends Scorer { - private final LeafSimScorer scorer; + private final SimScorer scorer; private final CheckedIntFunction, IOException> valueFetcher; private final String field; private final Query query; private final TwoPhaseIterator twoPhase; + private final NumericDocValues norms; private final MemoryIndexEntry cacheEntry = new MemoryIndexEntry(); @@ -349,12 +359,14 @@ private class RuntimePhraseScorer extends Scorer { private RuntimePhraseScorer( DocIdSetIterator approximation, - LeafSimScorer scorer, + SimScorer scorer, + NumericDocValues norms, CheckedIntFunction, IOException> valueFetcher, String field, Query query ) { this.scorer = scorer; + this.norms = norms; this.valueFetcher = valueFetcher; this.field = field; this.query = query; @@ -386,12 +398,12 @@ public TwoPhaseIterator twoPhaseIterator() { @Override public float getMaxScore(int upTo) throws IOException { - return scorer.getSimScorer().score(Float.MAX_VALUE, 1L); + return scorer.score(Float.MAX_VALUE, 1L); } @Override public float score() throws IOException { - return scorer.score(docID(), freq()); + return scorer.score(freq(), getNormValue(norms, doc)); } @Override diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java index db2762a028e6a..a1174c4448315 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java @@ -86,7 +86,8 @@ public TokenCountFieldMapper build(MapperBuilderContext context) { store.getValue(), hasDocValues.getValue(), nullValue.getValue(), - meta.getValue() + meta.getValue(), + context.isSourceSynthetic() ); return new TokenCountFieldMapper(leafName(), ft, builderParams(this, context), this); } @@ -100,7 +101,8 @@ static class TokenCountFieldType extends NumberFieldMapper.NumberFieldType { boolean isStored, boolean hasDocValues, Number nullValue, - Map meta + Map meta, + boolean isSyntheticSource ) { super( name, @@ -114,7 +116,8 @@ static class TokenCountFieldType extends NumberFieldMapper.NumberFieldType { null, false, null, - null + null, + isSyntheticSource ); } diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java index 860d63000f124..84e45024b69ff 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java @@ -9,14 +9,17 @@ package org.elasticsearch.migration; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.admin.cluster.migration.TransportGetFeatureUpgradeStatusAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; @@ -28,6 +31,7 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.AssociatedIndexDescriptor; import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.SystemIndexPlugin; @@ -50,6 +54,10 @@ import java.util.function.BiConsumer; import java.util.function.Function; +import static java.util.Collections.emptySet; +import static java.util.Collections.singletonList; +import static java.util.Collections.unmodifiableSet; +import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; @@ -255,12 +263,18 @@ protected void assertIndexHasCorrectProperties( assertThat(thisIndexStats.getTotal().getDocs().getCount(), is((long) INDEX_DOC_COUNT)); } - public static class TestPlugin extends Plugin implements SystemIndexPlugin { + public static class TestPlugin extends Plugin implements SystemIndexPlugin, ActionPlugin { public final AtomicReference>> preMigrationHook = new AtomicReference<>(); public final AtomicReference>> postMigrationHook = new AtomicReference<>(); + private final BlockingActionFilter blockingActionFilter; public TestPlugin() { + blockingActionFilter = new BlockingActionFilter(); + } + @Override + public List getActionFilters() { + return singletonList(blockingActionFilter); } @Override @@ -299,5 +313,26 @@ public void indicesMigrationComplete( postMigrationHook.get().accept(clusterService.state(), preUpgradeMetadata); listener.onResponse(true); } + + public static class BlockingActionFilter extends org.elasticsearch.action.support.ActionFilter.Simple { + private Set blockedActions = emptySet(); + + @Override + protected boolean apply(String action, ActionRequest request, ActionListener listener) { + if (blockedActions.contains(action)) { + throw new ElasticsearchException("force exception on [" + action + "]"); + } + return true; + } + + @Override + public int order() { + return 0; + } + + public void blockActions(String... actions) { + blockedActions = unmodifiableSet(newHashSet(actions)); + } + } } } diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java index cdf817a6b17b8..efca437d14eb4 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java @@ -17,11 +17,14 @@ import org.elasticsearch.action.admin.cluster.migration.PostFeatureUpgradeRequest; import org.elasticsearch.action.admin.cluster.migration.PostFeatureUpgradeResponse; import org.elasticsearch.action.admin.indices.alias.Alias; +import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.template.put.PutComponentTemplateAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.support.ActionFilter; +import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; @@ -36,10 +39,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.migration.AbstractFeatureMigrationIntegTest.TestPlugin.BlockingActionFilter; import org.elasticsearch.painless.PainlessPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.upgrades.FeatureMigrationResults; import org.elasticsearch.upgrades.SingleFeatureMigrationResult; @@ -272,6 +277,59 @@ public void testMigrateIndexWithWriteBlock() throws Exception { }); } + public void testIndexBlockIsRemovedWhenAliasRequestFails() throws Exception { + createSystemIndexForDescriptor(INTERNAL_UNMANAGED); + ensureGreen(); + + // Block the alias request to simulate a failure + InternalTestCluster internalTestCluster = internalCluster(); + ActionFilters actionFilters = internalTestCluster.getInstance(ActionFilters.class, internalTestCluster.getMasterName()); + BlockingActionFilter blockingActionFilter = null; + for (ActionFilter filter : actionFilters.filters()) { + if (filter instanceof BlockingActionFilter) { + blockingActionFilter = (BlockingActionFilter) filter; + break; + } + } + assertNotNull("BlockingActionFilter should exist", blockingActionFilter); + blockingActionFilter.blockActions(TransportIndicesAliasesAction.NAME); + + // Start the migration + client().execute(PostFeatureUpgradeAction.INSTANCE, new PostFeatureUpgradeRequest(TEST_REQUEST_TIMEOUT)).get(); + + // Wait till the migration fails + assertBusy(() -> { + GetFeatureUpgradeStatusResponse statusResp = client().execute( + GetFeatureUpgradeStatusAction.INSTANCE, + new GetFeatureUpgradeStatusRequest(TEST_REQUEST_TIMEOUT) + ).get(); + logger.info(Strings.toString(statusResp)); + assertThat(statusResp.getUpgradeStatus(), equalTo(GetFeatureUpgradeStatusResponse.UpgradeStatus.ERROR)); + }); + + // Get the settings to see if the write block was removed + var allsettings = client().admin().indices().prepareGetSettings(INTERNAL_UNMANAGED.getIndexPattern()).get().getIndexToSettings(); + var internalUnmanagedOldIndexSettings = allsettings.get(".int-unman-old"); + var writeBlock = internalUnmanagedOldIndexSettings.get(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey()); + assertThat("Write block on old index should be removed on migration ERROR status", writeBlock, equalTo("false")); + + // Unblock the alias request + blockingActionFilter.blockActions(); + + // Retry the migration + client().execute(PostFeatureUpgradeAction.INSTANCE, new PostFeatureUpgradeRequest(TEST_REQUEST_TIMEOUT)).get(); + + // Ensure that the migration is successful after the alias request is unblocked + assertBusy(() -> { + GetFeatureUpgradeStatusResponse statusResp = client().execute( + GetFeatureUpgradeStatusAction.INSTANCE, + new GetFeatureUpgradeStatusRequest(TEST_REQUEST_TIMEOUT) + ).get(); + logger.info(Strings.toString(statusResp)); + assertThat(statusResp.getUpgradeStatus(), equalTo(GetFeatureUpgradeStatusResponse.UpgradeStatus.NO_MIGRATION_NEEDED)); + }); + } + public void testMigrationWillRunAfterError() throws Exception { createSystemIndexForDescriptor(INTERNAL_MANAGED); diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexSslConfig.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexSslConfig.java index 914311e1190c1..ceba20570e7e5 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexSslConfig.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexSslConfig.java @@ -106,7 +106,7 @@ protected List getSettingAsList(String key) throws Exception { return settings.getAsList(key); } }; - configuration = loader.load(environment.configFile()); + configuration = loader.load(environment.configDir()); reload(); final FileChangesListener listener = new FileChangesListener() { diff --git a/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml b/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml index df557f9944253..394e5e38d9f59 100644 --- a/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,2 +1,3 @@ ALL-UNNAMED: + - manage_threads - outbound_network diff --git a/modules/repository-azure/src/main/plugin-metadata/entitlement-policy.yaml b/modules/repository-azure/src/main/plugin-metadata/entitlement-policy.yaml index 74197fb3ed9ae..53049c74b6a47 100644 --- a/modules/repository-azure/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/repository-azure/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,2 +1,3 @@ io.netty.common: - outbound_network + - manage_threads diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java index a8a6986ccbb7a..f1369bae6e306 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java @@ -369,7 +369,7 @@ static class CustomWebIdentityTokenCredentialsProvider implements AWSCredentials } // Make sure that a readable symlink to the token file exists in the plugin config directory // AWS_WEB_IDENTITY_TOKEN_FILE exists but we only use Web Identity Tokens if a corresponding symlink exists and is readable - Path webIdentityTokenFileSymlink = environment.configFile().resolve(WEB_IDENTITY_TOKEN_FILE_LOCATION); + Path webIdentityTokenFileSymlink = environment.configDir().resolve(WEB_IDENTITY_TOKEN_FILE_LOCATION); if (Files.exists(webIdentityTokenFileSymlink) == false) { LOGGER.warn( "Cannot use AWS Web Identity Tokens: AWS_WEB_IDENTITY_TOKEN_FILE is defined but no corresponding symlink exists " diff --git a/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml b/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml index df557f9944253..394e5e38d9f59 100644 --- a/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,2 +1,3 @@ ALL-UNNAMED: + - manage_threads - outbound_network diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java index 69fd0c0f5d6a7..2698eb718ded0 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java @@ -65,7 +65,7 @@ private static Environment getEnvironment() throws IOException { Files.createDirectory(configDirectory.resolve("repository-s3")); Files.writeString(configDirectory.resolve("repository-s3/aws-web-identity-token-file"), "YXdzLXdlYi1pZGVudGl0eS10b2tlbi1maWxl"); Environment environment = Mockito.mock(Environment.class); - Mockito.when(environment.configFile()).thenReturn(configDirectory); + Mockito.when(environment.configDir()).thenReturn(configDirectory); return environment; } @@ -212,7 +212,7 @@ public void testPickUpNewWebIdentityTokenWhenItsChanged() throws Exception { latch.countDown(); } }); - Files.writeString(environment.configFile().resolve("repository-s3/aws-web-identity-token-file"), newWebIdentityToken); + Files.writeString(environment.configDir().resolve("repository-s3/aws-web-identity-token-file"), newWebIdentityToken); safeAwait(latch); assertCredentials(awsCredentialsProvider.getCredentials()); diff --git a/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java b/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java index 51a223315644a..eca846f955bfd 100644 --- a/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java +++ b/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java @@ -158,7 +158,7 @@ private URL checkURL(URL urlToCheck) { if (normalizedUrl == null) { String logMessage = "The specified url [{}] doesn't start with any repository paths specified by the " + "path.repo setting or by {} setting: [{}] "; - logger.warn(logMessage, urlToCheck, ALLOWED_URLS_SETTING.getKey(), environment.repoFiles()); + logger.warn(logMessage, urlToCheck, ALLOWED_URLS_SETTING.getKey(), environment.repoDirs()); String exceptionMessage = "file url [" + urlToCheck + "] doesn't match any of the locations specified by path.repo or " diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java index d825ec0a83f53..0158384b47aa4 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java @@ -54,7 +54,6 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.http.HttpBodyTracer; -import org.elasticsearch.http.HttpHandlingSettings; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.plugins.ActionPlugin; @@ -93,10 +92,15 @@ @ESIntegTestCase.ClusterScope(numDataNodes = 1) public class Netty4IncrementalRequestHandlingIT extends ESNetty4IntegTestCase { + private static final int MAX_CONTENT_LENGTH = ByteSizeUnit.MB.toIntBytes(50); + @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); - builder.put(HttpTransportSettings.SETTING_HTTP_MAX_CONTENT_LENGTH.getKey(), ByteSizeValue.of(50, ByteSizeUnit.MB)); + builder.put( + HttpTransportSettings.SETTING_HTTP_MAX_CONTENT_LENGTH.getKey(), + ByteSizeValue.of(MAX_CONTENT_LENGTH, ByteSizeUnit.BYTES) + ); return builder.build(); } @@ -135,7 +139,7 @@ public void testReceiveAllChunks() throws Exception { var opaqueId = opaqueId(reqNo); // this dataset will be compared with one on server side - var dataSize = randomIntBetween(1024, maxContentLength()); + var dataSize = randomIntBetween(1024, MAX_CONTENT_LENGTH); var sendData = Unpooled.wrappedBuffer(randomByteArrayOfLength(dataSize)); sendData.retain(); ctx.clientChannel.writeAndFlush(fullHttpRequest(opaqueId, sendData)); @@ -243,7 +247,7 @@ public void testServerExceptionMidStream() throws Exception { public void testClientBackpressure() throws Exception { try (var ctx = setupClientCtx()) { var opaqueId = opaqueId(0); - var payloadSize = maxContentLength(); + var payloadSize = MAX_CONTENT_LENGTH; var totalParts = 10; var partSize = payloadSize / totalParts; ctx.clientChannel.writeAndFlush(httpRequest(opaqueId, payloadSize)); @@ -285,7 +289,7 @@ public void test100Continue() throws Exception { try (var ctx = setupClientCtx()) { for (int reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { var id = opaqueId(reqNo); - var acceptableContentLength = randomIntBetween(0, maxContentLength()); + var acceptableContentLength = randomIntBetween(0, MAX_CONTENT_LENGTH); // send request header and await 100-continue var req = httpRequest(id, acceptableContentLength); @@ -317,7 +321,7 @@ public void test413TooLargeOnExpect100Continue() throws Exception { try (var ctx = setupClientCtx()) { for (int reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { var id = opaqueId(reqNo); - var oversized = maxContentLength() + 1; + var oversized = MAX_CONTENT_LENGTH + 1; // send request header and await 413 too large var req = httpRequest(id, oversized); @@ -333,32 +337,28 @@ public void test413TooLargeOnExpect100Continue() throws Exception { } } - // ensures that oversized chunked encoded request has no limits at http layer - // rest handler is responsible for oversized requests - public void testOversizedChunkedEncodingNoLimits() throws Exception { + // ensures that oversized chunked encoded request has maxContentLength limit and returns 413 + public void testOversizedChunkedEncoding() throws Exception { try (var ctx = setupClientCtx()) { - for (var reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { - var id = opaqueId(reqNo); - var contentSize = maxContentLength() + 1; - var content = randomByteArrayOfLength(contentSize); - var is = new ByteBufInputStream(Unpooled.wrappedBuffer(content)); - var chunkedIs = new ChunkedStream(is); - var httpChunkedIs = new HttpChunkedInput(chunkedIs, LastHttpContent.EMPTY_LAST_CONTENT); - var req = httpRequest(id, 0); - HttpUtil.setTransferEncodingChunked(req, true); - - ctx.clientChannel.pipeline().addLast(new ChunkedWriteHandler()); - ctx.clientChannel.writeAndFlush(req); - ctx.clientChannel.writeAndFlush(httpChunkedIs); - var handler = ctx.awaitRestChannelAccepted(id); - var consumed = handler.readAllBytes(); - assertEquals(contentSize, consumed); - handler.sendResponse(new RestResponse(RestStatus.OK, "")); - - var resp = (FullHttpResponse) safePoll(ctx.clientRespQueue); - assertEquals(HttpResponseStatus.OK, resp.status()); - resp.release(); - } + var id = opaqueId(0); + var contentSize = MAX_CONTENT_LENGTH + 1; + var content = randomByteArrayOfLength(contentSize); + var is = new ByteBufInputStream(Unpooled.wrappedBuffer(content)); + var chunkedIs = new ChunkedStream(is); + var httpChunkedIs = new HttpChunkedInput(chunkedIs, LastHttpContent.EMPTY_LAST_CONTENT); + var req = httpRequest(id, 0); + HttpUtil.setTransferEncodingChunked(req, true); + + ctx.clientChannel.pipeline().addLast(new ChunkedWriteHandler()); + ctx.clientChannel.writeAndFlush(req); + ctx.clientChannel.writeAndFlush(httpChunkedIs); + var handler = ctx.awaitRestChannelAccepted(id); + var consumed = handler.readAllBytes(); + assertTrue(consumed <= MAX_CONTENT_LENGTH); + + var resp = (FullHttpResponse) safePoll(ctx.clientRespQueue); + assertEquals(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); + resp.release(); } } @@ -369,7 +369,7 @@ public void testBadRequestReleaseQueuedChunks() throws Exception { try (var ctx = setupClientCtx()) { for (var reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { var id = opaqueId(reqNo); - var contentSize = randomIntBetween(0, maxContentLength()); + var contentSize = randomIntBetween(0, MAX_CONTENT_LENGTH); var req = httpRequest(id, contentSize); var content = randomContent(contentSize, true); @@ -405,7 +405,7 @@ public void testHttpClientStats() throws Exception { for (var reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { var id = opaqueId(reqNo); - var contentSize = randomIntBetween(0, maxContentLength()); + var contentSize = randomIntBetween(0, MAX_CONTENT_LENGTH); totalBytesSent += contentSize; ctx.clientChannel.writeAndFlush(httpRequest(id, contentSize)); ctx.clientChannel.writeAndFlush(randomContent(contentSize, true)); @@ -485,10 +485,6 @@ private void assertHttpBodyLogging(Function test) throws Exceptio } } - private int maxContentLength() { - return HttpHandlingSettings.fromSettings(internalCluster().getInstance(Settings.class)).maxContentLength(); - } - private String opaqueId(int reqNo) { return getTestName() + "-" + reqNo; } @@ -658,14 +654,22 @@ void sendResponse(RestResponse response) { int readBytes(int bytes) { var consumed = 0; if (recvLast == false) { - while (consumed < bytes) { - stream.next(); - var recvChunk = safePoll(recvChunks); - consumed += recvChunk.chunk.length(); - recvChunk.chunk.close(); - if (recvChunk.isLast) { - recvLast = true; - break; + stream.next(); + while (consumed < bytes && streamClosed == false) { + try { + var recvChunk = recvChunks.poll(10, TimeUnit.MILLISECONDS); + if (recvChunk != null) { + consumed += recvChunk.chunk.length(); + recvChunk.chunk.close(); + if (recvChunk.isLast) { + recvLast = true; + break; + } + stream.next(); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new AssertionError(e); } } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpAggregator.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpAggregator.java index 021ce09e0ed8e..0294b4626496c 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpAggregator.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpAggregator.java @@ -11,13 +11,10 @@ import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http.FullHttpRequest; -import io.netty.handler.codec.http.FullHttpResponse; -import io.netty.handler.codec.http.HttpContent; import io.netty.handler.codec.http.HttpObject; import io.netty.handler.codec.http.HttpObjectAggregator; import io.netty.handler.codec.http.HttpRequest; -import io.netty.handler.codec.http.HttpResponseStatus; -import io.netty.handler.codec.http.HttpUtil; +import io.netty.handler.codec.http.HttpRequestDecoder; import org.elasticsearch.http.HttpPreRequest; import org.elasticsearch.http.netty4.internal.HttpHeadersAuthenticatorUtils; @@ -27,18 +24,19 @@ /** * A wrapper around {@link HttpObjectAggregator}. Provides optional content aggregation based on * predicate. {@link HttpObjectAggregator} also handles Expect: 100-continue and oversized content. - * Unfortunately, Netty does not provide handlers for oversized messages beyond HttpObjectAggregator. + * Provides content size handling for non-aggregated requests too. */ public class Netty4HttpAggregator extends HttpObjectAggregator { private static final Predicate IGNORE_TEST = (req) -> req.uri().startsWith("/_test/request-stream") == false; private final Predicate decider; + private final Netty4HttpContentSizeHandler streamContentSizeHandler; private boolean aggregating = true; - private boolean ignoreContentAfterContinueResponse = false; - public Netty4HttpAggregator(int maxContentLength, Predicate decider) { + public Netty4HttpAggregator(int maxContentLength, Predicate decider, HttpRequestDecoder decoder) { super(maxContentLength); this.decider = decider; + this.streamContentSizeHandler = new Netty4HttpContentSizeHandler(decoder, maxContentLength); } @Override @@ -51,34 +49,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception if (aggregating || msg instanceof FullHttpRequest) { super.channelRead(ctx, msg); } else { - handle(ctx, (HttpObject) msg); - } - } - - private void handle(ChannelHandlerContext ctx, HttpObject msg) { - if (msg instanceof HttpRequest request) { - var continueResponse = newContinueResponse(request, maxContentLength(), ctx.pipeline()); - if (continueResponse != null) { - // there are 3 responses expected: 100, 413, 417 - // on 100 we pass request further and reply to client to continue - // on 413/417 we ignore following content - ctx.writeAndFlush(continueResponse); - var resp = (FullHttpResponse) continueResponse; - if (resp.status() != HttpResponseStatus.CONTINUE) { - ignoreContentAfterContinueResponse = true; - return; - } - HttpUtil.set100ContinueExpected(request, false); - } - ignoreContentAfterContinueResponse = false; - ctx.fireChannelRead(msg); - } else { - var httpContent = (HttpContent) msg; - if (ignoreContentAfterContinueResponse) { - httpContent.release(); - } else { - ctx.fireChannelRead(msg); - } + streamContentSizeHandler.channelRead(ctx, msg); } } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandler.java new file mode 100644 index 0000000000000..fee9d227d8310 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandler.java @@ -0,0 +1,164 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.http.netty4; + +import io.netty.buffer.Unpooled; +import io.netty.channel.ChannelFutureListener; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import io.netty.handler.codec.http.DefaultFullHttpResponse; +import io.netty.handler.codec.http.DefaultHttpHeaders; +import io.netty.handler.codec.http.EmptyHttpHeaders; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.HttpContent; +import io.netty.handler.codec.http.HttpHeaderNames; +import io.netty.handler.codec.http.HttpHeaderValues; +import io.netty.handler.codec.http.HttpObject; +import io.netty.handler.codec.http.HttpRequest; +import io.netty.handler.codec.http.HttpRequestDecoder; +import io.netty.handler.codec.http.HttpResponseStatus; +import io.netty.handler.codec.http.HttpUtil; +import io.netty.handler.codec.http.HttpVersion; + +import org.elasticsearch.core.SuppressForbidden; + +import static io.netty.handler.codec.http.HttpHeaderNames.CONNECTION; +import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH; + +/** + * Provides handling for 'Expect' header and content size. Implements HTTP1.1 spec. + * Allows {@code Expect: 100-continue} header only. Other 'Expect' headers will be rejected with + * {@code 417 Expectation Failed} reason. + *
+ * Replies {@code 100 Continue} to requests with allowed maxContentLength. + *
+ * Replies {@code 413 Request Entity Too Large} when content size exceeds maxContentLength. + * + * Channel can be reused for requests with "Expect:100-Continue" header that exceed allowed content length, + * as long as request does not include content. If oversized request already contains content then + * we cannot safely proceed and connection will be closed. + *

+ * TODO: move to RestController to allow content limits per RestHandler. + * Ideally we should be able to handle Continue and oversized request in the RestController. + *
    + *
  • + * 100 Continue is interim response, means RestChannel will send 2 responses for a single request. See + * rfc9110.html#status.100 + *
  • + *
  • + * RestChannel should be able to close underlying HTTP channel connection. + *
  • + *
+ */ +@SuppressForbidden(reason = "use of default ChannelFutureListener's CLOSE and CLOSE_ON_FAILURE") +public class Netty4HttpContentSizeHandler extends ChannelInboundHandlerAdapter { + + // copied from netty's HttpObjectAggregator + static final FullHttpResponse CONTINUE = new DefaultFullHttpResponse( + HttpVersion.HTTP_1_1, + HttpResponseStatus.CONTINUE, + Unpooled.EMPTY_BUFFER + ); + static final FullHttpResponse EXPECTATION_FAILED_CLOSE = new DefaultFullHttpResponse( + HttpVersion.HTTP_1_1, + HttpResponseStatus.EXPECTATION_FAILED, + Unpooled.EMPTY_BUFFER, + new DefaultHttpHeaders().add(CONTENT_LENGTH, 0).add(CONNECTION, HttpHeaderValues.CLOSE), + EmptyHttpHeaders.INSTANCE + ); + static final FullHttpResponse TOO_LARGE_CLOSE = new DefaultFullHttpResponse( + HttpVersion.HTTP_1_1, + HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, + Unpooled.EMPTY_BUFFER, + new DefaultHttpHeaders().add(CONTENT_LENGTH, 0).add(CONNECTION, HttpHeaderValues.CLOSE), + EmptyHttpHeaders.INSTANCE + ); + static final FullHttpResponse TOO_LARGE = new DefaultFullHttpResponse( + HttpVersion.HTTP_1_1, + HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, + Unpooled.EMPTY_BUFFER, + new DefaultHttpHeaders().add(CONTENT_LENGTH, 0), + EmptyHttpHeaders.INSTANCE + ); + + private final int maxContentLength; + private final HttpRequestDecoder decoder; // need to reset decoder after sending 413 + private int currentContentLength; // chunked encoding does not provide content length, need to track actual length + private boolean ignoreContent; + + public Netty4HttpContentSizeHandler(HttpRequestDecoder decoder, int maxContentLength) { + this.maxContentLength = maxContentLength; + this.decoder = decoder; + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) { + assert msg instanceof HttpObject; + if (msg instanceof HttpRequest request) { + handleRequest(ctx, request); + } else { + handleContent(ctx, (HttpContent) msg); + } + } + + private void handleRequest(ChannelHandlerContext ctx, HttpRequest request) { + ignoreContent = true; + if (request.decoderResult().isFailure()) { + ctx.fireChannelRead(request); + return; + } + + final var expectValue = request.headers().get(HttpHeaderNames.EXPECT); + boolean isContinueExpected = false; + // Only "Expect: 100-Continue" header is supported + if (expectValue != null) { + if (HttpHeaderValues.CONTINUE.toString().equalsIgnoreCase(expectValue)) { + isContinueExpected = true; + } else { + ctx.writeAndFlush(EXPECTATION_FAILED_CLOSE.retainedDuplicate()).addListener(ChannelFutureListener.CLOSE); + return; + } + } + + boolean isOversized = HttpUtil.getContentLength(request, -1) > maxContentLength; + if (isOversized) { + if (isContinueExpected) { + // Client is allowed to send content without waiting for Continue. + // See https://www.rfc-editor.org/rfc/rfc9110.html#section-10.1.1-11.3 + // this content will result in HttpRequestDecoder failure and send downstream + decoder.reset(); + } + ctx.writeAndFlush(TOO_LARGE.retainedDuplicate()).addListener(ChannelFutureListener.CLOSE_ON_FAILURE); + } else { + ignoreContent = false; + currentContentLength = 0; + if (isContinueExpected) { + ctx.writeAndFlush(CONTINUE.retainedDuplicate()); + HttpUtil.set100ContinueExpected(request, false); + } + ctx.fireChannelRead(request); + } + } + + private void handleContent(ChannelHandlerContext ctx, HttpContent msg) { + if (ignoreContent) { + msg.release(); + } else { + currentContentLength += msg.content().readableBytes(); + if (currentContentLength > maxContentLength) { + msg.release(); + ctx.writeAndFlush(TOO_LARGE_CLOSE.retainedDuplicate()).addListener(ChannelFutureListener.CLOSE); + } else { + ctx.fireChannelRead(msg); + } + } + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 36c860f1fb90b..9ffa4b479be17 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -381,7 +381,8 @@ protected HttpMessage createMessage(String[] initialLine) throws Exception { handlingSettings.maxContentLength(), httpPreRequest -> enabled.get() == false || ((httpPreRequest.rawPath().endsWith("/_bulk") == false) - || httpPreRequest.rawPath().startsWith("/_xpack/monitoring/_bulk")) + || httpPreRequest.rawPath().startsWith("/_xpack/monitoring/_bulk")), + decoder ); aggregator.setMaxCumulationBufferComponents(transport.maxCompositeBufferComponents); ch.pipeline() diff --git a/modules/transport-netty4/src/main/plugin-metadata/entitlement-policy.yaml b/modules/transport-netty4/src/main/plugin-metadata/entitlement-policy.yaml index eb772a06423a3..1562b806a82d8 100644 --- a/modules/transport-netty4/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/transport-netty4/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,6 +1,8 @@ io.netty.transport: - inbound_network - outbound_network + - manage_threads io.netty.common: - inbound_network - outbound_network + - manage_threads diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandlerTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandlerTests.java new file mode 100644 index 0000000000000..36399c8d6d7a5 --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandlerTests.java @@ -0,0 +1,240 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.http.netty4; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.channel.embedded.EmbeddedChannel; +import io.netty.handler.codec.http.DefaultHttpContent; +import io.netty.handler.codec.http.DefaultHttpRequest; +import io.netty.handler.codec.http.DefaultLastHttpContent; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.HttpContent; +import io.netty.handler.codec.http.HttpHeaderNames; +import io.netty.handler.codec.http.HttpHeaderValues; +import io.netty.handler.codec.http.HttpMethod; +import io.netty.handler.codec.http.HttpObject; +import io.netty.handler.codec.http.HttpRequest; +import io.netty.handler.codec.http.HttpRequestDecoder; +import io.netty.handler.codec.http.HttpRequestEncoder; +import io.netty.handler.codec.http.HttpResponseStatus; +import io.netty.handler.codec.http.HttpUtil; +import io.netty.handler.codec.http.HttpVersion; +import io.netty.handler.codec.http.LastHttpContent; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; + +public class Netty4HttpContentSizeHandlerTests extends ESTestCase { + + private static final int MAX_CONTENT_LENGTH = 1024; + private static final int OVERSIZED_LENGTH = MAX_CONTENT_LENGTH + 1; + private static final int REPS = 1000; + private EmbeddedChannel channel; + private EmbeddedChannel encoder; // channel to encode HTTP objects into bytes + + private static HttpContent httpContent(int size) { + return new DefaultHttpContent(Unpooled.wrappedBuffer(randomByteArrayOfLength(size))); + } + + private static LastHttpContent lastHttpContent(int size) { + return new DefaultLastHttpContent(Unpooled.wrappedBuffer(randomByteArrayOfLength(size))); + } + + private HttpRequest httpRequest() { + return new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/"); + } + + // encodes multiple HTTP objects into single ByteBuf + private ByteBuf encode(HttpObject... objs) { + var out = Unpooled.compositeBuffer(); + Arrays.stream(objs).forEach(encoder::writeOutbound); + while (encoder.outboundMessages().isEmpty() == false) { + out.addComponent(true, encoder.readOutbound()); + } + return out; + } + + @Override + public void setUp() throws Exception { + super.setUp(); + var decoder = new HttpRequestDecoder(); + encoder = new EmbeddedChannel(new HttpRequestEncoder()); + channel = new EmbeddedChannel(decoder, new Netty4HttpContentSizeHandler(decoder, MAX_CONTENT_LENGTH)); + } + + /** + * Assert that handler replies 100-continue for acceptable request and pass request further. + */ + public void testContinue() { + for (var i = 0; i < REPS; i++) { + var sendRequest = httpRequest(); + HttpUtil.set100ContinueExpected(sendRequest, true); + channel.writeInbound(encode(sendRequest)); + assertEquals("should send back 100-continue", Netty4HttpContentSizeHandler.CONTINUE, channel.readOutbound()); + var recvRequest = (HttpRequest) channel.readInbound(); + assertNotNull(recvRequest); + assertFalse(HttpUtil.is100ContinueExpected(recvRequest)); + channel.writeInbound(encode(LastHttpContent.EMPTY_LAST_CONTENT)); + assertEquals(LastHttpContent.EMPTY_LAST_CONTENT, channel.readInbound()); + } + } + + /** + * Assert that handler pass through acceptable request. + */ + public void testWithoutContinue() { + for (var i = 0; i < REPS; i++) { + var sendRequest = httpRequest(); + channel.writeInbound(encode(sendRequest)); + assertNull("should not receive response", channel.readOutbound()); + assertNotNull("request should pass", channel.readInbound()); + channel.writeInbound(encode(LastHttpContent.EMPTY_LAST_CONTENT)); + assertEquals(LastHttpContent.EMPTY_LAST_CONTENT, channel.readInbound()); + } + } + + /** + * Assert that handler pass through request and content for acceptable request. + */ + public void testContinueWithContent() { + for (var i = 0; i < REPS; i++) { + var sendRequest = httpRequest(); + HttpUtil.set100ContinueExpected(sendRequest, true); + HttpUtil.setContentLength(sendRequest, MAX_CONTENT_LENGTH); + var sendContent = lastHttpContent(MAX_CONTENT_LENGTH); + channel.writeInbound(encode(sendRequest, sendContent)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals("should send back 100-continue", Netty4HttpContentSizeHandler.CONTINUE, resp); + resp.release(); + var recvRequest = (HttpRequest) channel.readInbound(); + assertNotNull(recvRequest); + var recvContent = (HttpContent) channel.readInbound(); + assertNotNull(recvContent); + assertEquals(MAX_CONTENT_LENGTH, recvContent.content().readableBytes()); + recvContent.release(); + } + } + + /** + * Assert that handler return 417 Expectation Failed and closes channel on request + * with "Expect" header other than "100-Continue". + */ + public void testExpectationFailed() { + var sendRequest = httpRequest(); + sendRequest.headers().set(HttpHeaderNames.EXPECT, randomValueOtherThan(HttpHeaderValues.CONTINUE, ESTestCase::randomIdentifier)); + channel.writeInbound(encode(sendRequest)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals(HttpResponseStatus.EXPECTATION_FAILED, resp.status()); + assertFalse(channel.isOpen()); + resp.release(); + } + + /** + * Assert that handler returns 413 Request Entity Too Large for oversized request + * and does not close channel if following content is not present. + */ + public void testEntityTooLarge() { + for (var i = 0; i < REPS; i++) { + var sendRequest = httpRequest(); + HttpUtil.set100ContinueExpected(sendRequest, true); + HttpUtil.setContentLength(sendRequest, OVERSIZED_LENGTH); + channel.writeInbound(encode(sendRequest, LastHttpContent.EMPTY_LAST_CONTENT)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); + assertNull("request should not pass", channel.readInbound()); + assertTrue("should not close channel", channel.isOpen()); + resp.release(); + } + } + + /** + * Mixed load of oversized and normal requests with Exepct:100-Continue. + */ + public void testMixedContent() { + for (int i = 0; i < REPS; i++) { + var isOversized = randomBoolean(); + var sendRequest = httpRequest(); + HttpUtil.set100ContinueExpected(sendRequest, true); + if (isOversized) { + HttpUtil.setContentLength(sendRequest, OVERSIZED_LENGTH); + channel.writeInbound(encode(sendRequest)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); + channel.writeInbound(encode(LastHttpContent.EMPTY_LAST_CONTENT)); // terminate + assertNull(channel.readInbound()); + resp.release(); + } else { + var normalSize = between(1, MAX_CONTENT_LENGTH); + HttpUtil.setContentLength(sendRequest, normalSize); + channel.writeInbound(encode(sendRequest)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals(HttpResponseStatus.CONTINUE, resp.status()); + resp.release(); + var sendContent = lastHttpContent(normalSize); + channel.writeInbound(encode(sendContent)); + var recvRequest = (HttpRequest) channel.readInbound(); + var recvContent = (LastHttpContent) channel.readInbound(); + assertEquals("content length header should match", normalSize, HttpUtil.getContentLength(recvRequest)); + assertFalse("should remove expect header", HttpUtil.is100ContinueExpected(recvRequest)); + assertEquals("actual content size should match", normalSize, recvContent.content().readableBytes()); + recvContent.release(); + } + } + } + + /** + * Assert that handler returns 413 Request Entity Too Large and skip following content. + */ + public void testEntityTooLargeWithContentWithoutExpect() { + for (int i = 0; i < REPS; i++) { + var sendRequest = httpRequest(); + HttpUtil.setContentLength(sendRequest, OVERSIZED_LENGTH); + var unexpectedContent = lastHttpContent(OVERSIZED_LENGTH); + channel.writeInbound(encode(sendRequest, unexpectedContent)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); + resp.release(); + assertNull("request and content should not pass", channel.readInbound()); + assertTrue("should not close channel", channel.isOpen()); + } + } + + /** + * Assert that handler return 413 Request Entity Too Large and closes channel for oversized + * requests with chunked content. + */ + public void testEntityTooLargeWithChunkedContent() { + var sendRequest = httpRequest(); + HttpUtil.setTransferEncodingChunked(sendRequest, true); + channel.writeInbound(encode(sendRequest)); + assertTrue("request should pass", channel.readInbound() instanceof HttpRequest); + + int contentBytesSent = 0; + do { + var thisPartSize = between(1, MAX_CONTENT_LENGTH * 2); + channel.writeInbound(encode(httpContent(thisPartSize))); + contentBytesSent += thisPartSize; + + if (contentBytesSent <= MAX_CONTENT_LENGTH) { + ((HttpContent) channel.readInbound()).release(); + } else { + break; + } + } while (true); + + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals("should respond with 413", HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); + assertFalse("should close channel", channel.isOpen()); + resp.release(); + } + +} diff --git a/muted-tests.yml b/muted-tests.yml index 0684e17e16adb..1810e98f6ca08 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -46,9 +46,6 @@ tests: - class: org.elasticsearch.packaging.test.WindowsServiceTests method: test81JavaOptsInJvmOptions issue: https://github.com/elastic/elasticsearch/issues/113313 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=mtermvectors/10_basic/Tests catching other exceptions per item} - issue: https://github.com/elastic/elasticsearch/issues/113325 - class: org.elasticsearch.xpack.transform.integration.TransformIT method: testStopWaitForCheckpoint issue: https://github.com/elastic/elasticsearch/issues/106113 @@ -107,9 +104,6 @@ tests: - class: org.elasticsearch.discovery.ClusterDisruptionIT method: testAckedIndexing issue: https://github.com/elastic/elasticsearch/issues/117024 -- class: org.elasticsearch.xpack.test.rest.XPackRestIT - method: test {p0=snapshot/10_basic/Create a source only snapshot and then restore it} - issue: https://github.com/elastic/elasticsearch/issues/117295 - class: org.elasticsearch.xpack.inference.InferenceRestIT method: test {p0=inference/40_semantic_text_query/Query a field that uses the default ELSER 2 endpoint} issue: https://github.com/elastic/elasticsearch/issues/117027 @@ -125,15 +119,9 @@ tests: - class: org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT method: test {p0=search.highlight/50_synthetic_source/text multi unified from vectors} issue: https://github.com/elastic/elasticsearch/issues/117815 -- class: org.elasticsearch.xpack.esql.plugin.ClusterRequestTests - method: testFallbackIndicesOptions - issue: https://github.com/elastic/elasticsearch/issues/117937 - class: org.elasticsearch.xpack.ml.integration.RegressionIT method: testTwoJobsWithSameRandomizeSeedUseSameTrainingSet issue: https://github.com/elastic/elasticsearch/issues/117805 -- class: org.elasticsearch.xpack.esql.action.EsqlActionTaskIT - method: testCancelRequestWhenFailingFetchingPages - issue: https://github.com/elastic/elasticsearch/issues/118193 - class: org.elasticsearch.packaging.test.ArchiveTests method: test44AutoConfigurationNotTriggeredOnNotWriteableConfDir issue: https://github.com/elastic/elasticsearch/issues/118208 @@ -143,28 +131,17 @@ tests: - class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT method: test {p0=data_stream/120_data_streams_stats/Multiple data stream} issue: https://github.com/elastic/elasticsearch/issues/118217 -- class: org.elasticsearch.action.search.SearchQueryThenFetchAsyncActionTests - method: testBottomFieldSort - issue: https://github.com/elastic/elasticsearch/issues/118214 - class: org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests method: testSearcherId issue: https://github.com/elastic/elasticsearch/issues/118374 -- class: org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT - issue: https://github.com/elastic/elasticsearch/issues/118238 - class: org.elasticsearch.xpack.ccr.rest.ShardChangesRestIT method: testShardChangesNoOperation issue: https://github.com/elastic/elasticsearch/issues/118800 -- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT - method: test {yaml=reference/indices/shard-stores/line_150} - issue: https://github.com/elastic/elasticsearch/issues/118896 - class: org.elasticsearch.cluster.service.MasterServiceTests method: testThreadContext issue: https://github.com/elastic/elasticsearch/issues/118914 - class: org.elasticsearch.xpack.security.authc.ldap.ActiveDirectoryRunAsIT issue: https://github.com/elastic/elasticsearch/issues/115727 -- class: org.elasticsearch.xpack.esql.action.EsqlNodeFailureIT - method: testFailureLoadingFields - issue: https://github.com/elastic/elasticsearch/issues/118000 - class: org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapperTests method: testCartesianBoundsBlockLoader issue: https://github.com/elastic/elasticsearch/issues/119201 @@ -221,25 +198,11 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/120668 - class: org.elasticsearch.xpack.security.authc.ldap.ADLdapUserSearchSessionFactoryTests issue: https://github.com/elastic/elasticsearch/issues/119882 -- class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncEnrichStopIT - method: testEnrichAfterStop - issue: https://github.com/elastic/elasticsearch/issues/120757 -- class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryIT - method: testStopQuery - issue: https://github.com/elastic/elasticsearch/issues/120767 -- class: org.elasticsearch.search.fieldcaps.FieldCapabilitiesIT - issue: https://github.com/elastic/elasticsearch/issues/120772 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=ml/3rd_party_deployment/Test start deployment fails while model download in progress} issue: https://github.com/elastic/elasticsearch/issues/120810 - class: org.elasticsearch.indices.mapping.UpdateMappingIntegrationIT issue: https://github.com/elastic/elasticsearch/issues/116126 -- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT - method: test {p0=data_stream/140_data_stream_aliases/Create data stream aliases using wildcard expression} - issue: https://github.com/elastic/elasticsearch/issues/120890 -- class: org.elasticsearch.xpack.test.rest.XPackRestIT - method: test {p0=ml/inference_crud/*} - issue: https://github.com/elastic/elasticsearch/issues/120816 - class: org.elasticsearch.xpack.security.authc.service.ServiceAccountIT method: testAuthenticateShouldNotFallThroughInCaseOfFailure issue: https://github.com/elastic/elasticsearch/issues/120902 @@ -249,52 +212,177 @@ tests: - class: org.elasticsearch.packaging.test.DockerTests method: test140CgroupOsStatsAreAvailable issue: https://github.com/elastic/elasticsearch/issues/120914 -- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT - method: test {p0=data_stream/140_data_stream_aliases/Create data stream alias} - issue: https://github.com/elastic/elasticsearch/issues/120920 - class: org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT method: testReservedStatePersistsOnRestart issue: https://github.com/elastic/elasticsearch/issues/120923 - class: org.elasticsearch.packaging.test.DockerTests method: test070BindMountCustomPathConfAndJvmOptions issue: https://github.com/elastic/elasticsearch/issues/120910 -- class: org.elasticsearch.xpack.esql.qa.multi_node.RestEsqlIT - issue: https://github.com/elastic/elasticsearch/issues/120948 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=nodes.stats/11_indices_metrics/indices mappings exact count test for indices level} - issue: https://github.com/elastic/elasticsearch/issues/120950 - class: org.elasticsearch.packaging.test.DockerTests method: test071BindMountCustomPathWithDifferentUID issue: https://github.com/elastic/elasticsearch/issues/120918 - class: org.elasticsearch.packaging.test.DockerTests method: test171AdditionalCliOptionsAreForwarded issue: https://github.com/elastic/elasticsearch/issues/120925 -- class: org.elasticsearch.xpack.inference.InferenceGetServicesIT - issue: https://github.com/elastic/elasticsearch/issues/120986 - class: org.elasticsearch.action.search.SearchProgressActionListenerIT method: testSearchProgressWithQuery issue: https://github.com/elastic/elasticsearch/issues/120994 -- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT - method: test {p0=data_stream/80_resolve_index_data_streams/Resolve index with hidden and closed indices} - issue: https://github.com/elastic/elasticsearch/issues/120965 -- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT - method: test {p0=data_stream/140_data_stream_aliases/Create data stream alias with filter} - issue: https://github.com/elastic/elasticsearch/issues/121014 -- class: org.elasticsearch.xpack.esql.parser.StatementParserTests - method: testNamedFunctionArgumentInMap - issue: https://github.com/elastic/elasticsearch/issues/121020 -- class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityEsqlIT - method: testCrossClusterAsyncQuery - issue: https://github.com/elastic/elasticsearch/issues/121021 -- class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityEsqlIT - method: testCrossClusterAsyncQueryStop - issue: https://github.com/elastic/elasticsearch/issues/121021 - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testSuggestProfilesWithName issue: https://github.com/elastic/elasticsearch/issues/121022 -- class: org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilterIT - method: testBulkOperations {p0=true} - issue: https://github.com/elastic/elasticsearch/issues/120969 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testProfileAPIsWhenIndexNotCreated + issue: https://github.com/elastic/elasticsearch/issues/121096 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testGetProfiles + issue: https://github.com/elastic/elasticsearch/issues/121101 +- class: org.elasticsearch.xpack.security.authc.service.ServiceAccountSingleNodeTests + method: testAuthenticateWithServiceFileToken + issue: https://github.com/elastic/elasticsearch/issues/120988 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testUpdateProfileData + issue: https://github.com/elastic/elasticsearch/issues/121108 +- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT + method: test {p0=nodes.stats/11_indices_metrics/indices mappings exact count test for indices level} + issue: https://github.com/elastic/elasticsearch/issues/120950 +- class: org.elasticsearch.xpack.security.authc.jwt.JwtRealmSingleNodeTests + method: testActivateProfileForJWT + issue: https://github.com/elastic/elasticsearch/issues/120983 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testProfileIndexAutoCreation + issue: https://github.com/elastic/elasticsearch/issues/120987 +- class: org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT + method: testFileSettingsReprocessedOnRestartWithoutVersionChange + issue: https://github.com/elastic/elasticsearch/issues/120964 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testGetUsersWithProfileUidWhenProfileIndexDoesNotExists + issue: https://github.com/elastic/elasticsearch/issues/121179 +- class: org.elasticsearch.xpack.ml.integration.PyTorchModelIT + issue: https://github.com/elastic/elasticsearch/issues/121165 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testSetEnabled + issue: https://github.com/elastic/elasticsearch/issues/121183 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=transform/*} + issue: https://github.com/elastic/elasticsearch/issues/120816 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=ml/*} + issue: https://github.com/elastic/elasticsearch/issues/120816 +- class: org.elasticsearch.upgrades.VectorSearchIT + method: testBBQVectorSearch {upgradedNodes=0} + issue: https://github.com/elastic/elasticsearch/issues/121253 +- class: org.elasticsearch.upgrades.VectorSearchIT + method: testBBQVectorSearch {upgradedNodes=1} + issue: https://github.com/elastic/elasticsearch/issues/121271 +- class: org.elasticsearch.upgrades.VectorSearchIT + method: testBBQVectorSearch {upgradedNodes=2} + issue: https://github.com/elastic/elasticsearch/issues/121272 +- class: org.elasticsearch.upgrades.VectorSearchIT + method: testBBQVectorSearch {upgradedNodes=3} + issue: https://github.com/elastic/elasticsearch/issues/121273 +- class: org.elasticsearch.xpack.security.authc.ldap.ActiveDirectorySessionFactoryTests + issue: https://github.com/elastic/elasticsearch/issues/121285 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/snapshot-restore/apis/get-snapshot-api/line_357} + issue: https://github.com/elastic/elasticsearch/issues/121287 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/index-modules/slowlog/line_102} + issue: https://github.com/elastic/elasticsearch/issues/121288 +- class: org.elasticsearch.env.NodeEnvironmentTests + method: testGetBestDowngradeVersion + issue: https://github.com/elastic/elasticsearch/issues/121316 +- class: org.elasticsearch.index.engine.ShuffleForcedMergePolicyTests + method: testDiagnostics + issue: https://github.com/elastic/elasticsearch/issues/121336 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/rest-api/security/invalidate-tokens/line_194} + issue: https://github.com/elastic/elasticsearch/issues/121337 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/rest-api/common-options/line_125} + issue: https://github.com/elastic/elasticsearch/issues/121338 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/snapshot-restore/apis/get-snapshot-api/line_751} + issue: https://github.com/elastic/elasticsearch/issues/121345 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testHasPrivileges + issue: https://github.com/elastic/elasticsearch/issues/121346 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testActivateProfile + issue: https://github.com/elastic/elasticsearch/issues/121151 +- class: org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT + issue: https://github.com/elastic/elasticsearch/issues/121407 +- class: org.elasticsearch.xpack.security.authc.jwt.JwtRealmSingleNodeTests + method: testClientSecretRotation + issue: https://github.com/elastic/elasticsearch/issues/120985 +- class: org.elasticsearch.xpack.security.authc.jwt.JwtRealmSingleNodeTests + method: testGrantApiKeyForJWT + issue: https://github.com/elastic/elasticsearch/issues/121039 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testGetUsersWithProfileUid + issue: https://github.com/elastic/elasticsearch/issues/121483 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testSuggestProfilesWithHint + issue: https://github.com/elastic/elasticsearch/issues/121116 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testSuggestProfileWithData + issue: https://github.com/elastic/elasticsearch/issues/121258 +- class: org.elasticsearch.ingest.geoip.FullClusterRestartIT + method: testGeoIpSystemFeaturesMigration {cluster=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/121115 +- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT + method: test {p0=search.vectors/42_knn_search_int4_flat/Vector similarity with filter only} + issue: https://github.com/elastic/elasticsearch/issues/121412 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/cat/health/cat-health-no-timestamp-example} + issue: https://github.com/elastic/elasticsearch/issues/121867 +- class: org.elasticsearch.analysis.common.CommonAnalysisClientYamlTestSuiteIT + method: test {yaml=analysis-common/40_token_filters/stemmer_override file access} + issue: https://github.com/elastic/elasticsearch/issues/121625 +- class: org.elasticsearch.xpack.searchablesnapshots.hdfs.SecureHdfsSearchableSnapshotsIT + issue: https://github.com/elastic/elasticsearch/issues/121967 +- class: org.elasticsearch.xpack.application.CohereServiceUpgradeIT + issue: https://github.com/elastic/elasticsearch/issues/121537 +- class: org.elasticsearch.xpack.restart.FullClusterRestartIT + method: testWatcherWithApiKey {cluster=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/122061 +- class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT + method: test {yaml=snapshot.delete/10_basic/Delete a snapshot asynchronously} + issue: https://github.com/elastic/elasticsearch/issues/122102 +- class: org.elasticsearch.search.SearchCancellationIT + method: testCancelFailedSearchWhenPartialResultDisallowed + issue: https://github.com/elastic/elasticsearch/issues/121719 +- class: org.elasticsearch.datastreams.TSDBPassthroughIndexingIT + issue: https://github.com/elastic/elasticsearch/issues/121716 +- class: org.elasticsearch.smoketest.SmokeTestMonitoringWithSecurityIT + method: testHTTPExporterWithSSL + issue: https://github.com/elastic/elasticsearch/issues/122220 +- class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryStopIT + method: testStopQueryLocal + issue: https://github.com/elastic/elasticsearch/issues/121672 +- class: org.elasticsearch.xpack.security.authz.IndexAliasesTests + method: testRemoveIndex + issue: https://github.com/elastic/elasticsearch/issues/122221 +- class: org.elasticsearch.blocks.SimpleBlocksIT + method: testConcurrentAddBlock + issue: https://github.com/elastic/elasticsearch/issues/122324 +- class: org.elasticsearch.xpack.searchablesnapshots.hdfs.HdfsSearchableSnapshotsIT + issue: https://github.com/elastic/elasticsearch/issues/122024 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/cat/health/cat-health-example} + issue: https://github.com/elastic/elasticsearch/issues/122335 +- class: org.elasticsearch.xpack.esql.action.CrossClusterCancellationIT + method: testCloseSkipUnavailable + issue: https://github.com/elastic/elasticsearch/issues/122336 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/alias/line_260} + issue: https://github.com/elastic/elasticsearch/issues/122343 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/snapshot-restore/apis/get-snapshot-api/line_488} + issue: https://github.com/elastic/elasticsearch/issues/121611 +- class: org.elasticsearch.repositories.blobstore.testkit.analyze.SecureHdfsRepositoryAnalysisRestIT + issue: https://github.com/elastic/elasticsearch/issues/122377 +- class: org.elasticsearch.repositories.blobstore.testkit.analyze.HdfsRepositoryAnalysisRestIT + issue: https://github.com/elastic/elasticsearch/issues/122378 # Examples: # diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuCollationTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuCollationTokenFilterFactory.java index fe0b3a00b2bbb..6854984c49c26 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuCollationTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuCollationTokenFilterFactory.java @@ -51,7 +51,7 @@ public IcuCollationTokenFilterFactory(IndexSettings indexSettings, Environment e if (rules != null) { Exception failureToResolve = null; try { - rules = Streams.copyToString(Files.newBufferedReader(environment.configFile().resolve(rules), Charset.forName("UTF-8"))); + rules = Streams.copyToString(Files.newBufferedReader(environment.configDir().resolve(rules), Charset.forName("UTF-8"))); } catch (IOException | SecurityException | InvalidPathException e) { failureToResolve = e; } diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTokenizerFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTokenizerFactory.java index c66d25ffa2f3b..4a0ead6a893e8 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTokenizerFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTokenizerFactory.java @@ -99,7 +99,7 @@ public RuleBasedBreakIterator getBreakIterator(int script) { // parse a single RBBi rule file private static BreakIterator parseRules(String filename, Environment env) throws IOException { - final Path path = env.configFile().resolve(filename); + final Path path = env.configDir().resolve(filename); String rules = Files.readAllLines(path).stream().filter((v) -> v.startsWith("#") == false).collect(Collectors.joining("\n")); return new RuleBasedBreakIterator(rules.toString()); diff --git a/plugins/discovery-ec2/src/main/plugin-metadata/entitlement-policy.yaml b/plugins/discovery-ec2/src/main/plugin-metadata/entitlement-policy.yaml index df557f9944253..394e5e38d9f59 100644 --- a/plugins/discovery-ec2/src/main/plugin-metadata/entitlement-policy.yaml +++ b/plugins/discovery-ec2/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,2 +1,3 @@ ALL-UNNAMED: + - manage_threads - outbound_network diff --git a/plugins/examples/custom-settings/src/main/java/org/elasticsearch/example/customsettings/ExampleCustomSettingsConfig.java b/plugins/examples/custom-settings/src/main/java/org/elasticsearch/example/customsettings/ExampleCustomSettingsConfig.java index 243201f632c8f..a085973e82b0a 100644 --- a/plugins/examples/custom-settings/src/main/java/org/elasticsearch/example/customsettings/ExampleCustomSettingsConfig.java +++ b/plugins/examples/custom-settings/src/main/java/org/elasticsearch/example/customsettings/ExampleCustomSettingsConfig.java @@ -70,7 +70,7 @@ public class ExampleCustomSettingsConfig { public ExampleCustomSettingsConfig(final Environment environment) { // Elasticsearch config directory - final Path configDir = environment.configFile(); + final Path configDir = environment.configDir(); // Resolve the plugin's custom settings file final Path customSettingsYamlFile = configDir.resolve("custom-settings/custom.yml"); diff --git a/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java b/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java index 523fdc51f6423..5c4580cac4f27 100644 --- a/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java +++ b/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.metadata.IndexAbstraction; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse.Indices; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -85,10 +86,13 @@ public void authorizeClusterAction(RequestInfo requestInfo, AuthorizationInfo au } @Override - public void authorizeIndexAction(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, - AsyncSupplier indicesAsyncSupplier, - Map aliasOrIndexLookup, - ActionListener listener) { + public void authorizeIndexAction( + RequestInfo requestInfo, + AuthorizationInfo authorizationInfo, + AsyncSupplier indicesAsyncSupplier, + Metadata metadata, + ActionListener listener + ) { if (isSuperuser(requestInfo.getAuthentication().getEffectiveSubject().getUser())) { indicesAsyncSupplier.getAsync(ActionListener.wrap(resolvedIndices -> { Map indexAccessControlMap = new HashMap<>(); diff --git a/plugins/examples/security-authorization-engine/src/test/java/org/elasticsearch/example/CustomAuthorizationEngineTests.java b/plugins/examples/security-authorization-engine/src/test/java/org/elasticsearch/example/CustomAuthorizationEngineTests.java index dbf797e3d0899..d57af86531865 100644 --- a/plugins/examples/security-authorization-engine/src/test/java/org/elasticsearch/example/CustomAuthorizationEngineTests.java +++ b/plugins/examples/security-authorization-engine/src/test/java/org/elasticsearch/example/CustomAuthorizationEngineTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexAbstraction.ConcreteIndex; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.test.ESTestCase; @@ -31,6 +32,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.stream.Stream; import static org.hamcrest.Matchers.is; @@ -117,12 +119,13 @@ public void testAuthorizeClusterAction() { public void testAuthorizeIndexAction() { CustomAuthorizationEngine engine = new CustomAuthorizationEngine(); - Map indicesMap = new HashMap<>(); - indicesMap.put("index", new ConcreteIndex(IndexMetadata.builder("index") - .settings(Settings.builder().put("index.version.created", IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - .build(), null)); + Metadata metadata = Metadata.builder().put(IndexMetadata.builder("index") + .settings(Settings.builder().put("index.version.created", IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(0) + .build(), + false + ).build(); // authorized { RequestInfo requestInfo = @@ -136,7 +139,7 @@ public void testAuthorizeIndexAction() { PlainActionFuture resultFuture = new PlainActionFuture<>(); engine.authorizeIndexAction(requestInfo, authzInfo, listener -> listener.onResponse(new ResolvedIndices(Collections.singletonList("index"), Collections.emptyList())), - indicesMap, resultFuture); + metadata, resultFuture); IndexAuthorizationResult result = resultFuture.actionGet(); assertThat(result.isGranted(), is(true)); IndicesAccessControl indicesAccessControl = result.getIndicesAccessControl(); @@ -156,7 +159,7 @@ public void testAuthorizeIndexAction() { PlainActionFuture resultFuture = new PlainActionFuture<>(); engine.authorizeIndexAction(requestInfo, authzInfo, listener -> listener.onResponse(new ResolvedIndices(Collections.singletonList("index"), Collections.emptyList())), - indicesMap, resultFuture); + metadata, resultFuture); IndexAuthorizationResult result = resultFuture.actionGet(); assertThat(result.isGranted(), is(false)); IndicesAccessControl indicesAccessControl = result.getIndicesAccessControl(); diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java index 68b93db39646f..11dbf34f6c791 100644 --- a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java +++ b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java @@ -50,7 +50,7 @@ public SizeFieldMapper build() { private static class SizeFieldType extends NumberFieldType { SizeFieldType() { - super(NAME, NumberType.INTEGER, true, true, true, false, null, Collections.emptyMap(), null, false, null, null); + super(NAME, NumberType.INTEGER, true, true, true, false, null, Collections.emptyMap(), null, false, null, null, false); } @Override diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java index ce6acd79a0bb9..e74d1a87959f2 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java @@ -81,7 +81,7 @@ class HdfsSecurityContext { * Expects keytab file to exist at {@code $CONFIG_DIR$/repository-hdfs/krb5.keytab} */ static Path locateKeytabFile(Environment environment) { - Path keytabPath = environment.configFile().resolve("repository-hdfs").resolve("krb5.keytab"); + Path keytabPath = environment.configDir().resolve("repository-hdfs").resolve("krb5.keytab"); try { if (Files.exists(keytabPath) == false) { throw new RuntimeException("Could not locate keytab at [" + keytabPath + "]."); diff --git a/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml b/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml index b5020dc1b7468..30e61739a0633 100644 --- a/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml +++ b/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,5 +1,7 @@ ALL-UNNAMED: + - manage_threads - outbound_network + - load_native_libraries - write_system_properties: properties: - hadoop.home.dir diff --git a/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index 780f3994ce627..d912ccbe07454 100644 --- a/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -101,8 +101,8 @@ private static MockTransportService startTransport( TransportSearchAction.TYPE.name(), EsExecutors.DIRECT_EXECUTOR_SERVICE, SearchRequest::new, - (request, channel, task) -> channel.sendResponse( - new SearchResponse( + (request, channel, task) -> { + var searchResponse = new SearchResponse( SearchHits.empty(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN), InternalAggregations.EMPTY, null, @@ -117,8 +117,13 @@ private static MockTransportService startTransport( 100, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY - ) - ) + ); + try { + channel.sendResponse(searchResponse); + } finally { + searchResponse.decRef(); + } + } ); newService.registerRequestHandler( ClusterStateAction.NAME, diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java index 08d7e3b45702b..bc8308f48e52d 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java @@ -103,23 +103,23 @@ public void testEnvironmentPaths() throws Exception { // check that all directories got permissions: // bin file: ro - assertExactPermissions(new FilePermission(environment.binFile().toString(), "read,readlink"), permissions); + assertExactPermissions(new FilePermission(environment.binDir().toString(), "read,readlink"), permissions); // lib file: ro - assertExactPermissions(new FilePermission(environment.libFile().toString(), "read,readlink"), permissions); + assertExactPermissions(new FilePermission(environment.libDir().toString(), "read,readlink"), permissions); // modules file: ro - assertExactPermissions(new FilePermission(environment.modulesFile().toString(), "read,readlink"), permissions); + assertExactPermissions(new FilePermission(environment.modulesDir().toString(), "read,readlink"), permissions); // config file: ro - assertExactPermissions(new FilePermission(environment.configFile().toString(), "read,readlink"), permissions); + assertExactPermissions(new FilePermission(environment.configDir().toString(), "read,readlink"), permissions); // plugins: ro - assertExactPermissions(new FilePermission(environment.pluginsFile().toString(), "read,readlink"), permissions); + assertExactPermissions(new FilePermission(environment.pluginsDir().toString(), "read,readlink"), permissions); // data paths: r/w - for (Path dataPath : environment.dataFiles()) { + for (Path dataPath : environment.dataDirs()) { assertExactPermissions(new FilePermission(dataPath.toString(), "read,readlink,write,delete"), permissions); } - assertExactPermissions(new FilePermission(environment.sharedDataFile().toString(), "read,readlink,write,delete"), permissions); + assertExactPermissions(new FilePermission(environment.sharedDataDir().toString(), "read,readlink,write,delete"), permissions); // logs: r/w - assertExactPermissions(new FilePermission(environment.logsFile().toString(), "read,readlink,write,delete"), permissions); + assertExactPermissions(new FilePermission(environment.logsDir().toString(), "read,readlink,write,delete"), permissions); // temp dir: r/w assertExactPermissions(new FilePermission(fakeTmpDir.toString(), "read,readlink,write,delete"), permissions); } diff --git a/qa/full-cluster-restart/build.gradle b/qa/full-cluster-restart/build.gradle index 5e68c4d1ad26b..d23267bb352fc 100644 --- a/qa/full-cluster-restart/build.gradle +++ b/qa/full-cluster-restart/build.gradle @@ -7,6 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' @@ -20,6 +21,13 @@ buildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> } } +tasks.register("luceneBwcTest", StandaloneRestIntegTestTask) { + // We use a phony version here as the real version is provided via `tests.bwc.main.version` system property + usesBwcDistribution(Version.fromString("0.0.0")) + systemProperty("tests.old_cluster_version", "0.0.0") + onlyIf("tests.bwc.main.version system property exists") { System.getProperty("tests.bwc.main.version") != null } +} + tasks.withType(Test).configureEach { // CI doesn't like it when there's multiple clusters running at once maxParallelForks = 1 diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartArchivedSettingsIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartArchivedSettingsIT.java index caa57f1e605a2..6a2fe9ec84528 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartArchivedSettingsIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartArchivedSettingsIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.rest.ObjectPath; import org.junit.ClassRule; import org.junit.rules.RuleChain; @@ -44,7 +45,7 @@ public class FullClusterRestartArchivedSettingsIT extends ParameterizedFullClust private static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(Version.fromString(OLD_CLUSTER_VERSION)) .nodes(2) .setting("path.repo", () -> repoDirectory.getRoot().getPath()) .setting("xpack.security.enabled", "false") diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java index f907870fc8254..1d6254aed7045 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; import org.junit.Before; import org.junit.ClassRule; import org.junit.rules.RuleChain; @@ -45,7 +46,7 @@ public class FullClusterRestartDownsampleIT extends ParameterizedFullClusterRest private static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(Version.fromString(OLD_CLUSTER_VERSION)) .nodes(2) .setting("xpack.security.enabled", "false") .setting("indices.lifecycle.poll_interval", "5s") diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 0f41712abe927..a5e1041dab279 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -39,6 +39,7 @@ import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.xcontent.ToXContent; @@ -103,7 +104,7 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas private static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(Version.fromString(OLD_CLUSTER_VERSION)) .nodes(2) .setting("path.repo", () -> repoDirectory.getRoot().getPath()) .setting("xpack.security.enabled", "false") diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java index 9866d94dccc3c..26006bc70b866 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.time.FormatNames; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.junit.ClassRule; @@ -34,7 +35,7 @@ public class LogsIndexModeFullClusterRestartIT extends ParameterizedFullClusterR @ClassRule public static final ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(Version.fromString(OLD_CLUSTER_VERSION)) .module("constant-keyword") .module("data-streams") .module("mapper-extras") @@ -120,6 +121,10 @@ protected ElasticsearchCluster getUpgradeCluster() { }"""; public void testLogsIndexing() throws IOException { + assumeTrue( + "otherwise first backing index of logs-apache-production will be in logsdb mode", + getOldClusterTestVersion().before("9.0.0") + ); if (isRunningAgainstOldCluster()) { assertOK(client().performRequest(putTemplate(client(), "logs-template", STANDARD_TEMPLATE))); assertOK(client().performRequest(createDataStream("logs-apache-production"))); diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java index 6de960a0fd7ed..7518a799540b8 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java @@ -38,8 +38,10 @@ @TestCaseOrdering(FullClusterRestartTestOrdering.class) public abstract class ParameterizedFullClusterRestartTestCase extends ESRestTestCase { - private static final Version MINIMUM_WIRE_COMPATIBLE_VERSION = Version.fromString(System.getProperty("tests.minimum.wire.compatible")); - private static final String OLD_CLUSTER_VERSION = System.getProperty("tests.old_cluster_version"); + protected static final Version MINIMUM_WIRE_COMPATIBLE_VERSION = Version.fromString( + System.getProperty("tests.minimum.wire.compatible") + ); + protected static final String OLD_CLUSTER_VERSION = System.getProperty("tests.old_cluster_version"); private static IndexVersion oldIndexVersion; private static boolean upgradeFailed = false; private static boolean upgraded = false; @@ -133,7 +135,7 @@ public boolean isRunningAgainstOldCluster() { } public static String getOldClusterVersion() { - return OLD_CLUSTER_VERSION; + return System.getProperty("tests.bwc.main.version", OLD_CLUSTER_VERSION); } protected static boolean oldClusterHasFeature(String featureId) { @@ -152,7 +154,7 @@ public static IndexVersion getOldClusterIndexVersion() { } public static Version getOldClusterTestVersion() { - return Version.fromString(OLD_CLUSTER_VERSION); + return Version.fromString(System.getProperty("tests.bwc.main.version", OLD_CLUSTER_VERSION)); } protected abstract ElasticsearchCluster getUpgradeCluster(); diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java index 8b74657becb24..02bea9a35f5f4 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java @@ -75,7 +75,7 @@ public class QueryBuilderBWCIT extends ParameterizedFullClusterRestartTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(org.elasticsearch.test.cluster.util.Version.fromString(OLD_CLUSTER_VERSION)) .nodes(2) .setting("xpack.security.enabled", "false") .feature(FeatureFlag.FAILURE_STORE_ENABLED) diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/AbstractIndexCompatibilityTestCase.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/AbstractIndexCompatibilityTestCase.java index ac4e1d9175885..1f30505e00104 100644 --- a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/AbstractIndexCompatibilityTestCase.java +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/AbstractIndexCompatibilityTestCase.java @@ -12,8 +12,14 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.InputStreamEntity; import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.WarningsHandler; +import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.MetadataIndexStateService; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.MapperService; @@ -23,7 +29,9 @@ import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.xcontent.XContentType; +import org.hamcrest.Matcher; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; @@ -32,16 +40,18 @@ import org.junit.rules.TestRule; import java.io.IOException; +import java.util.Comparator; import java.util.HashMap; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.stream.IntStream; -import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING; import static org.elasticsearch.test.cluster.util.Version.CURRENT; import static org.elasticsearch.test.cluster.util.Version.fromString; import static org.elasticsearch.test.rest.ObjectPath.createFromResponse; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; @@ -154,8 +164,21 @@ protected static boolean isFullyUpgradedTo(Version version) throws Exception { } protected static Version indexVersion(String indexName) throws Exception { - var response = assertOK(client().performRequest(new Request("GET", "/" + indexName + "/_settings"))); - int id = Integer.parseInt(createFromResponse(response).evaluate(indexName + ".settings.index.version.created")); + return indexVersion(indexName, false); + } + + protected static Version indexVersion(String indexName, boolean ignoreWarnings) throws Exception { + Request request = new Request("GET", "/" + indexName + "/_settings"); + request.addParameter("flat_settings", "true"); + if (ignoreWarnings) { + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.setWarningsHandler(WarningsHandler.PERMISSIVE); + request.setOptions(options); + } + var response = assertOK(client().performRequest(request)); + ObjectPath fromResponse = createFromResponse(response); + Map settings = fromResponse.evaluateExact(indexName, "settings"); + int id = Integer.parseInt((String) settings.get("index.version.created")); return new Version((byte) ((id / 1000000) % 100), (byte) ((id / 10000) % 100), (byte) ((id / 100) % 100)); } @@ -272,9 +295,51 @@ protected void addIndexBlock(String indexName, IndexMetadata.APIBlock apiBlock) assertAcknowledged(client().performRequest(request)); } - protected void assertThatIndexBlock(String indexName, IndexMetadata.APIBlock apiBlock) throws Exception { + private static ClusterBlock toIndexBlock(String blockId) { + int block = Integer.parseInt(blockId); + for (var indexBlock : List.of( + IndexMetadata.INDEX_READ_ONLY_BLOCK, + IndexMetadata.INDEX_READ_BLOCK, + IndexMetadata.INDEX_WRITE_BLOCK, + IndexMetadata.INDEX_METADATA_BLOCK, + IndexMetadata.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK, + IndexMetadata.INDEX_REFRESH_BLOCK, + MetadataIndexStateService.INDEX_CLOSED_BLOCK + )) { + if (block == indexBlock.id()) { + return indexBlock; + } + } + throw new AssertionError("No index block found with id [" + blockId + ']'); + } + + @SuppressWarnings("unchecked") + protected static List indexBlocks(String indexName) throws Exception { + var responseBody = createFromResponse(client().performRequest(new Request("GET", "_cluster/state/blocks/" + indexName))); + var blocks = (Map) responseBody.evaluate("blocks.indices." + indexName); + if (blocks == null || blocks.isEmpty()) { + return List.of(); + } + return blocks.keySet() + .stream() + .map(AbstractIndexCompatibilityTestCase::toIndexBlock) + .sorted(Comparator.comparing(ClusterBlock::id)) + .toList(); + } + + @SuppressWarnings("unchecked") + protected static void assertIndexSetting(String indexName, Setting setting, Matcher matcher) throws Exception { var indexSettings = getIndexSettingsAsMap(indexName); - assertThat(indexSettings.get(VERIFIED_READ_ONLY_SETTING.getKey()), equalTo(Boolean.TRUE.toString())); - assertThat(indexSettings.get(apiBlock.settingName()), equalTo(Boolean.TRUE.toString())); + assertThat(Boolean.parseBoolean((String) indexSettings.get(setting.getKey())), matcher); + } + + protected static ResponseException expectUpdateIndexSettingsThrows(String indexName, Settings.Builder settings) { + var exception = expectThrows(ResponseException.class, () -> updateIndexSettings(indexName, settings)); + assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(400)); + return exception; + } + + protected static Matcher containsStringCannotRemoveBlockOnReadOnlyIndex(String indexName) { + return allOf(containsString("Can't remove the write block on read-only compatible index"), containsString(indexName)); } } diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java index d7829d8225034..501a46deca9d1 100644 --- a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java @@ -11,11 +11,19 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.test.cluster.util.Version; +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_READ_ONLY_BLOCK; +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_WRITE_BLOCK; +import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.INDEX_CLOSED_BLOCK; +import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.VERIFIED_BEFORE_CLOSE_SETTING; +import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; public class FullClusterRestartLuceneIndexCompatibilityIT extends FullClusterRestartIndexCompatibilityTestCase { @@ -28,46 +36,98 @@ public FullClusterRestartLuceneIndexCompatibilityIT(Version version) { } /** - * Creates an index on N-2, upgrades to N -1 and marks as read-only, then upgrades to N. + * Creates an index on N-2, upgrades to N-1 and marks as read-only, then upgrades to N. */ public void testIndexUpgrade() throws Exception { final String index = suffix("index"); final int numDocs = 2431; if (isFullyUpgradedTo(VERSION_MINUS_2)) { - logger.debug("--> creating index [{}]", index); createIndex( client(), index, Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, randomInt(2)) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) .build() ); - - logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); indexDocs(index, numDocs); return; } - if (isFullyUpgradedTo(VERSION_MINUS_1)) { - ensureGreen(index); + assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); + ensureGreen(index); - assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); + if (isIndexClosed(index) == false) { assertDocCount(client(), index, numDocs); + } - addIndexBlock(index, IndexMetadata.APIBlock.WRITE); + if (isFullyUpgradedTo(VERSION_MINUS_1)) { + final boolean maybeClose = randomBoolean(); + if (maybeClose) { + logger.debug("--> closing index [{}] before upgrade", index); + closeIndex(index); + } + + final var block = randomFrom(IndexMetadata.APIBlock.WRITE, IndexMetadata.APIBlock.READ_ONLY); + addIndexBlock(index, block); + + assertThat(indexBlocks(index), maybeClose ? contains(INDEX_CLOSED_BLOCK, block.getBlock()) : contains(block.getBlock())); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(maybeClose)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); return; } if (isFullyUpgradedTo(VERSION_CURRENT)) { - ensureGreen(index); - - assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); - assertDocCount(client(), index, numDocs); + final var isClosed = isIndexClosed(index); + logger.debug("--> upgraded index [{}] is in [{}] state", index, isClosed ? "closed" : "open"); + assertThat( + indexBlocks(index), + isClosed + ? either(contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK)).or(contains(INDEX_CLOSED_BLOCK, INDEX_READ_ONLY_BLOCK)) + : either(contains(INDEX_WRITE_BLOCK)).or(contains(INDEX_READ_ONLY_BLOCK)) + ); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(isClosed)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + + if (isClosed == false) { + logger.debug("--> write/read_only API blocks cannot be removed on an opened index"); + var ex = expectUpdateIndexSettingsThrows( + index, + Settings.builder() + .putNull(IndexMetadata.APIBlock.WRITE.settingName()) + .putNull(IndexMetadata.APIBlock.READ_ONLY.settingName()) + ); + assertThat(ex.getMessage(), containsStringCannotRemoveBlockOnReadOnlyIndex(index)); + + } else if (randomBoolean()) { + logger.debug("--> write/read_only API blocks can be removed on a closed index: INDEX_CLOSED_BLOCK already blocks writes"); + updateIndexSettings( + index, + Settings.builder() + .putNull(IndexMetadata.APIBlock.WRITE.settingName()) + .putNull(IndexMetadata.APIBlock.READ_ONLY.settingName()) + ); + + assertThat(indexBlocks(index), contains(INDEX_CLOSED_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + } - assertThatIndexBlock(index, IndexMetadata.APIBlock.WRITE); + var block = indexBlocks(index).stream().filter(c -> c.equals(INDEX_WRITE_BLOCK) || c.equals(INDEX_READ_ONLY_BLOCK)).findFirst(); + if (block.isPresent() && block.get().equals(INDEX_READ_ONLY_BLOCK)) { + logger.debug("--> read_only API block can be replaced by a write block (required for the remaining tests)"); + updateIndexSettings( + index, + Settings.builder() + .putNull(IndexMetadata.APIBlock.READ_ONLY.settingName()) + .put(IndexMetadata.APIBlock.WRITE.settingName(), true) + ); + + assertThat(indexBlocks(index), isClosed ? contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK) : contains(INDEX_WRITE_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(isClosed)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + } var numberOfReplicas = getNumberOfReplicas(index); if (0 < numberOfReplicas) { @@ -82,66 +142,93 @@ public void testIndexUpgrade() throws Exception { updateIndexSettings(index, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)); ensureGreen(index); - logger.debug("--> closing restored index [{}]", index); - closeIndex(index); - ensureGreen(index); + if (isClosed) { + logger.debug("--> re-opening index [{}]", index); + openIndex(index); + ensureGreen(index); - logger.debug("--> adding replica to test peer-recovery for closed shards"); - updateIndexSettings(index, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 2)); - ensureGreen(index); + assertDocCount(client(), index, numDocs); + } else { + logger.debug("--> closing index [{}]", index); + closeIndex(index); + ensureGreen(index); + } - logger.debug("--> re-opening restored index [{}]", index); - openIndex(index); + logger.debug("--> adding more replicas to test peer-recovery"); + updateIndexSettings(index, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 2)); ensureGreen(index); - assertDocCount(client(), index, numDocs); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + assertThat( + indexBlocks(index), + isIndexClosed(index) ? contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK) : contains(INDEX_WRITE_BLOCK) + ); - logger.debug("--> deleting index [{}]", index); deleteIndex(index); } } /** - * Similar to {@link #testIndexUpgrade()} but with a read_only block. + * Creates an index on N-2, closes it on N-1 (without marking it as read-only), then upgrades to N. */ - public void testIndexUpgradeReadOnlyBlock() throws Exception { + public void testClosedIndexUpgrade() throws Exception { final String index = suffix("index"); - final int numDocs = 2531; + final int numDocs = 2437; if (isFullyUpgradedTo(VERSION_MINUS_2)) { - logger.debug("--> creating index [{}]", index); createIndex( client(), index, Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, randomInt(2)) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) .build() ); - - logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); indexDocs(index, numDocs); return; } - if (isFullyUpgradedTo(VERSION_MINUS_1)) { - ensureGreen(index); + assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); + ensureGreen(index); - assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); + if (isIndexClosed(index) == false) { assertDocCount(client(), index, numDocs); + } - addIndexBlock(index, IndexMetadata.APIBlock.READ_ONLY); + if (isFullyUpgradedTo(VERSION_MINUS_1)) { + logger.debug("--> [{}] closing index before upgrade without adding a read_only/write block", index); + closeIndex(index); + + assertThat(indexBlocks(index), contains(INDEX_CLOSED_BLOCK)); + assertThat(indexBlocks(index), not(contains(INDEX_WRITE_BLOCK))); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(false)); return; } if (isFullyUpgradedTo(VERSION_CURRENT)) { + assertThat(indexBlocks(index), contains(INDEX_CLOSED_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(false)); + + logger.debug("--> re-opening index [{}] will add a write block", index); + openIndex(index); ensureGreen(index); - assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); + assertThat(indexBlocks(index), contains(INDEX_WRITE_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(false)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); assertDocCount(client(), index, numDocs); - assertThatIndexBlock(index, IndexMetadata.APIBlock.READ_ONLY); + logger.debug("--> closing index [{}]", index); + closeIndex(index); + ensureGreen(index); + + assertThat(indexBlocks(index), contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + + deleteIndex(index); } } @@ -162,11 +249,7 @@ public void testRestoreIndex() throws Exception { createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); @@ -196,7 +279,8 @@ public void testRestoreIndex() throws Exception { restoreIndex(repository, snapshot, index, restoredIndex); ensureGreen(restoredIndex); - assertThatIndexBlock(restoredIndex, IndexMetadata.APIBlock.WRITE); + assertIndexSetting(restoredIndex, VERIFIED_READ_ONLY_SETTING, is(true)); + assertThat(indexBlocks(restoredIndex), contains(INDEX_WRITE_BLOCK)); assertThat(indexVersion(restoredIndex), equalTo(VERSION_MINUS_2)); assertDocCount(client(), restoredIndex, numDocs); @@ -243,11 +327,7 @@ public void testRestoreIndexOverClosedIndex() throws Exception { createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); @@ -277,7 +357,8 @@ public void testRestoreIndexOverClosedIndex() throws Exception { if (isFullyUpgradedTo(VERSION_CURRENT)) { assertThat(isIndexClosed(index), equalTo(true)); - assertThatIndexBlock(index, IndexMetadata.APIBlock.WRITE); + assertThat(indexBlocks(index), contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); logger.debug("--> restoring index [{}] over existing closed index", index); restoreIndex(repository, snapshot, index, index); diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartSearchableSnapshotIndexCompatibilityIT.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartSearchableSnapshotIndexCompatibilityIT.java index 477f2099477cc..8566c568e9f47 100644 --- a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartSearchableSnapshotIndexCompatibilityIT.java +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartSearchableSnapshotIndexCompatibilityIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.test.cluster.util.Version; @@ -46,11 +45,7 @@ public void testSearchableSnapshot() throws Exception { createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); @@ -125,11 +120,7 @@ public void testSearchableSnapshotUpgrade() throws Exception { createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartSystemIndexCompatibilityIT.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartSystemIndexCompatibilityIT.java new file mode 100644 index 0000000000000..985a073bd6034 --- /dev/null +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartSystemIndexCompatibilityIT.java @@ -0,0 +1,154 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.lucene; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.WarningsHandler; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.cluster.util.Version; +import org.elasticsearch.test.rest.ObjectPath; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class FullClusterRestartSystemIndexCompatibilityIT extends FullClusterRestartIndexCompatibilityTestCase { + + static { + clusterConfig = config -> config.setting("xpack.license.self_generated.type", "trial"); + } + + public FullClusterRestartSystemIndexCompatibilityIT(Version version) { + super(version); + } + + // we need a place to store async_search ids across cluster restarts + private static Map async_search_ids = new HashMap<>(3); + + /** + * 1. creates an index on N-2 and performs async_search on it that is kept in system index + * 2. After update to N-1 (latest) perform a system index migration step, also write block the index + * 3. on N, check that async search results are still retrievable and we can write to the system index + */ + public void testAsyncSearchIndexMigration() throws Exception { + final String index = suffix("index"); + final String asyncSearchIndex = ".async-search"; + final int numDocs = 2431; + + final Request asyncSearchRequest = new Request("POST", "/" + index + "/_async_search?size=100&keep_on_completion=true"); + + if (isFullyUpgradedTo(VERSION_MINUS_2)) { + createIndex( + client(), + index, + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, randomInt(2)) + .build() + ); + indexDocs(index, numDocs); + ensureGreen(index); + + assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); + String asyncId = searchAsyncAndStoreId(asyncSearchRequest, "n-2_id"); + ensureGreen(asyncSearchIndex); + + assertAsyncSearchHitCount(asyncId, numDocs); + assertBusy(() -> assertDocCountNoWarnings(client(), asyncSearchIndex, 1)); + assertThat(indexVersion(asyncSearchIndex, true), equalTo(VERSION_MINUS_2)); + return; + } + + if (isFullyUpgradedTo(VERSION_MINUS_1)) { + // check .async-search index is readable + assertThat(indexVersion(asyncSearchIndex, true), equalTo(VERSION_MINUS_2)); + assertAsyncSearchHitCount(async_search_ids.get("n-2_id"), numDocs); + + // migrate system indices + Request migrateRequest = new Request("POST", "/_migration/system_features"); + assertThat( + ObjectPath.createFromResponse(client().performRequest(migrateRequest)).evaluate("features.0.feature_name"), + equalTo("async_search") + ); + assertBusy(() -> { + Request checkMigrateProgress = new Request("GET", "/_migration/system_features"); + Response resp = null; + try { + assertFalse( + ObjectPath.createFromResponse(client().performRequest(checkMigrateProgress)) + .evaluate("migration_status") + .equals("IN_PROGRESS") + ); + } catch (IOException e) { + throw new AssertionError("System feature migration failed", e); + } + }); + + // check search results from n-2 search are still readable + assertAsyncSearchHitCount(async_search_ids.get("n-2_id"), numDocs); + + // perform new async search and check its readable + String asyncId = searchAsyncAndStoreId(asyncSearchRequest, "n-1_id"); + assertAsyncSearchHitCount(asyncId, numDocs); + assertBusy(() -> assertDocCountNoWarnings(client(), asyncSearchIndex, 2)); + + // in order to move to current version we need write block for n-2 index + addIndexBlock(index, IndexMetadata.APIBlock.WRITE); + } + + if (isFullyUpgradedTo(VERSION_CURRENT)) { + assertThat(indexVersion(index, true), equalTo(VERSION_MINUS_2)); + assertAsyncSearchHitCount(async_search_ids.get("n-2_id"), numDocs); + assertAsyncSearchHitCount(async_search_ids.get("n-1_id"), numDocs); + + // check system index is still writeable + String asyncId = searchAsyncAndStoreId(asyncSearchRequest, "n_id"); + assertAsyncSearchHitCount(asyncId, numDocs); + assertBusy(() -> assertDocCountNoWarnings(client(), asyncSearchIndex, 3)); + } + + } + + private static String searchAsyncAndStoreId(Request asyncSearchRequest, String asyncIdName) throws IOException { + ObjectPath resp = ObjectPath.createFromResponse(client().performRequest(asyncSearchRequest)); + String asyncId = resp.evaluate("id"); + assertNotNull(asyncId); + async_search_ids.put(asyncIdName, asyncId); + return asyncId; + } + + private static void assertAsyncSearchHitCount(String asyncId, int numDocs) throws IOException { + var asyncGet = new Request("GET", "/_async_search/" + asyncId); + ObjectPath resp = ObjectPath.createFromResponse(client().performRequest(asyncGet)); + assertEquals(Integer.valueOf(numDocs), resp.evaluate("response.hits.total.value")); + } + + /** + * Assert that the index in question has the given number of documents present + */ + private static void assertDocCountNoWarnings(RestClient client, String indexName, long docCount) throws IOException { + Request countReq = new Request("GET", "/" + indexName + "/_count"); + RequestOptions.Builder options = countReq.getOptions().toBuilder(); + options.setWarningsHandler(WarningsHandler.PERMISSIVE); + countReq.setOptions(options); + ObjectPath resp = ObjectPath.createFromResponse(client.performRequest(countReq)); + assertEquals( + "expected " + docCount + " documents but it was a different number", + docCount, + Long.parseLong(resp.evaluate("count").toString()) + ); + } +} diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java index b145b1e08c71d..7b9e2d64bbae4 100644 --- a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java @@ -13,13 +13,24 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.test.cluster.util.Version; import java.util.List; +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_READ_ONLY_BLOCK; +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_WRITE_BLOCK; +import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.INDEX_CLOSED_BLOCK; +import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.VERIFIED_BEFORE_CLOSE_SETTING; +import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; public class RollingUpgradeLuceneIndexCompatibilityTestCase extends RollingUpgradeIndexCompatibilityTestCase { @@ -39,36 +50,113 @@ public void testIndexUpgrade() throws Exception { final int numDocs = 2543; if (isFullyUpgradedTo(VERSION_MINUS_2)) { - logger.debug("--> creating index [{}]", index); createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); - - logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); indexDocs(index, numDocs); return; } + assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); ensureGreen(index); - if (isFullyUpgradedTo(VERSION_MINUS_1)) { - assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); + if (isIndexClosed(index) == false) { assertDocCount(client(), index, numDocs); + } - addIndexBlock(index, IndexMetadata.APIBlock.WRITE); + if (isFullyUpgradedTo(VERSION_MINUS_1)) { + final var maybeClose = randomBoolean(); + if (maybeClose) { + logger.debug("--> closing index [{}] before upgrade", index); + closeIndex(index); + } + + final var randomBlocks = randomFrom( + List.of(IndexMetadata.APIBlock.WRITE, IndexMetadata.APIBlock.READ_ONLY), + List.of(IndexMetadata.APIBlock.READ_ONLY), + List.of(IndexMetadata.APIBlock.WRITE) + ); + for (var randomBlock : randomBlocks) { + addIndexBlock(index, randomBlock); + assertThat(indexBlocks(index), hasItem(randomBlock.getBlock())); + } + + assertThat(indexBlocks(index), maybeClose ? hasItem(INDEX_CLOSED_BLOCK) : not(hasItem(INDEX_CLOSED_BLOCK))); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(maybeClose)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); return; } if (nodesVersions().values().stream().anyMatch(v -> v.onOrAfter(VERSION_CURRENT))) { - assertThatIndexBlock(index, IndexMetadata.APIBlock.WRITE); + final var isClosed = isIndexClosed(index); + logger.debug("--> upgraded index [{}] is now in [{}] state", index, isClosed ? "closed" : "open"); + assertThat( + indexBlocks(index), + allOf( + either(hasItem(INDEX_READ_ONLY_BLOCK)).or(hasItem(INDEX_WRITE_BLOCK)), + isClosed ? hasItem(INDEX_CLOSED_BLOCK) : not(hasItem(INDEX_CLOSED_BLOCK)) + ) + ); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(isClosed)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + + var blocks = indexBlocks(index).stream().filter(c -> c.equals(INDEX_WRITE_BLOCK) || c.equals(INDEX_READ_ONLY_BLOCK)).toList(); + if (blocks.size() == 2) { + switch (randomInt(2)) { + case 0: + updateIndexSettings( + index, + Settings.builder() + .putNull(IndexMetadata.APIBlock.WRITE.settingName()) + .put(IndexMetadata.APIBlock.READ_ONLY.settingName(), true) + ); + assertThat( + indexBlocks(index), + isClosed ? contains(INDEX_CLOSED_BLOCK, INDEX_READ_ONLY_BLOCK) : contains(INDEX_READ_ONLY_BLOCK) + ); + break; + case 1: + updateIndexSettings( + index, + Settings.builder() + .putNull(IndexMetadata.APIBlock.READ_ONLY.settingName()) + .put(IndexMetadata.APIBlock.WRITE.settingName(), true) + ); + assertThat( + indexBlocks(index), + isClosed ? contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK) : contains(INDEX_WRITE_BLOCK) + ); + break; + case 2: + updateIndexSettings(index, Settings.builder().put(IndexMetadata.APIBlock.READ_ONLY.settingName(), false)); + assertThat( + indexBlocks(index), + isClosed ? contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK) : contains(INDEX_WRITE_BLOCK) + ); + break; + default: + throw new AssertionError(); + } + } + + blocks = indexBlocks(index).stream().filter(c -> c.equals(INDEX_WRITE_BLOCK) || c.equals(INDEX_READ_ONLY_BLOCK)).toList(); + if (blocks.contains(INDEX_READ_ONLY_BLOCK)) { + logger.debug("--> read_only API block can be replaced by a write block (required for the remaining tests)"); + updateIndexSettings( + index, + Settings.builder() + .putNull(IndexMetadata.APIBlock.READ_ONLY.settingName()) + .put(IndexMetadata.APIBlock.WRITE.settingName(), true) + ); + } - if (isIndexClosed(index)) { + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(isClosed)); + assertThat(indexBlocks(index), isClosed ? contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK) : contains(INDEX_WRITE_BLOCK)); + + if (isClosed) { logger.debug("--> re-opening index [{}] after upgrade", index); openIndex(index); ensureGreen(index); @@ -89,44 +177,67 @@ public void testIndexUpgrade() throws Exception { } /** - * Similar to {@link #testIndexUpgrade()} but with a read_only block. + * Creates an index on N-2, closes it on N-1 and then upgrades the cluster. */ - public void testIndexUpgradeReadOnlyBlock() throws Exception { - final String index = suffix("index-"); - final int numDocs = 2573; + public void testClosedIndexUpgrade() throws Exception { + final String index = suffix("closed-rolling-upgraded"); + final int numDocs = 1543; if (isFullyUpgradedTo(VERSION_MINUS_2)) { - logger.debug("--> creating index [{}]", index); createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); - - logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); indexDocs(index, numDocs); return; } + assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); ensureGreen(index); - if (isFullyUpgradedTo(VERSION_MINUS_1)) { - assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); + if (isIndexClosed(index) == false) { assertDocCount(client(), index, numDocs); + } + + if (isFullyUpgradedTo(VERSION_MINUS_1)) { + logger.debug("--> closing index [{}]", index); + closeIndex(index); - addIndexBlock(index, IndexMetadata.APIBlock.READ_ONLY); + assertThat(indexBlocks(index), contains(INDEX_CLOSED_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(false)); return; } if (nodesVersions().values().stream().anyMatch(v -> v.onOrAfter(VERSION_CURRENT))) { - assertThatIndexBlock(index, IndexMetadata.APIBlock.READ_ONLY); + long upgradedNodes = nodesVersions().values().stream().filter(v -> v.onOrAfter(VERSION_CURRENT)).count(); + if (upgradedNodes == 1) { + // Mixed cluster with 1 of the 3 nodes upgraded: the index hasn't been reopened yet + assertThat(indexBlocks(index), contains(INDEX_CLOSED_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(false)); + + } else { + // Index has been reopened at least once, it should have an additional write block and the verified-read-only setting + assertThat(indexBlocks(index), contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + } - assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); + openIndex(index); + ensureGreen(index); + + assertThat(indexBlocks(index), contains(INDEX_WRITE_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(false)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); assertDocCount(client(), index, numDocs); + + updateRandomIndexSettings(index); + updateRandomMappings(index); + + closeIndex(index); + ensureGreen(index); } } @@ -147,11 +258,7 @@ public void testRestoreIndex() throws Exception { createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); @@ -174,16 +281,24 @@ public void testRestoreIndex() throws Exception { deleteIndex(index); return; } + if (nodesVersions().values().stream().anyMatch(v -> v.onOrAfter(VERSION_CURRENT))) { var restoredIndex = suffix("index-restored-rolling"); boolean success = false; try { - logger.debug("--> restoring index [{}] as [{}]", index, restoredIndex); restoreIndex(repository, snapshot, index, restoredIndex); ensureGreen(restoredIndex); - assertThatIndexBlock(restoredIndex, IndexMetadata.APIBlock.WRITE); + assertThat(indexBlocks(restoredIndex), contains(INDEX_WRITE_BLOCK)); + assertIndexSetting(restoredIndex, VERIFIED_READ_ONLY_SETTING, is(true)); + + var ex = expectUpdateIndexSettingsThrows( + restoredIndex, + Settings.builder().putNull(IndexMetadata.APIBlock.WRITE.settingName()) + ); + assertThat(ex.getMessage(), containsStringCannotRemoveBlockOnReadOnlyIndex(restoredIndex)); + assertThat(indexVersion(restoredIndex), equalTo(VERSION_MINUS_2)); assertDocCount(client(), restoredIndex, numDocs); @@ -194,10 +309,29 @@ public void testRestoreIndex() throws Exception { closeIndex(restoredIndex); ensureGreen(restoredIndex); + assertThat(indexBlocks(restoredIndex), contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK)); + assertIndexSetting(restoredIndex, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(restoredIndex, VERIFIED_READ_ONLY_SETTING, is(true)); + + logger.debug("--> write API block can be removed on a closed index: INDEX_CLOSED_BLOCK already blocks writes"); + updateIndexSettings(restoredIndex, Settings.builder().putNull(IndexMetadata.APIBlock.WRITE.settingName())); + + assertThat(indexBlocks(restoredIndex), contains(INDEX_CLOSED_BLOCK)); + assertIndexSetting(restoredIndex, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(restoredIndex, VERIFIED_READ_ONLY_SETTING, is(true)); + + if (randomBoolean()) { + addIndexBlock(restoredIndex, IndexMetadata.APIBlock.WRITE); + } + logger.debug("--> re-opening restored index [{}]", restoredIndex); openIndex(restoredIndex); ensureGreen(restoredIndex); + assertThat(indexBlocks(restoredIndex), contains(INDEX_WRITE_BLOCK)); + assertIndexSetting(restoredIndex, VERIFIED_BEFORE_CLOSE_SETTING, is(false)); + assertIndexSetting(restoredIndex, VERIFIED_READ_ONLY_SETTING, is(true)); + assertDocCount(client(), restoredIndex, numDocs); logger.debug("--> deleting restored index [{}]", restoredIndex); @@ -214,5 +348,20 @@ public void testRestoreIndex() throws Exception { } } } + + if (isFullyUpgradedTo(VERSION_CURRENT)) { + var exception = expectThrows( + ResponseException.class, + () -> restoreIndex( + repository, + snapshot, + index, + suffix("unrestorable"), + Settings.builder().put(IndexMetadata.APIBlock.WRITE.settingName(), false).build() + ) + ); + assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(500)); + assertThat(exception.getMessage(), containsString("must be marked as read-only using the setting")); + } } } diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeSearchableSnapshotIndexCompatibilityIT.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeSearchableSnapshotIndexCompatibilityIT.java index 1117d36024bf0..f1b27d5c34c1b 100644 --- a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeSearchableSnapshotIndexCompatibilityIT.java +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeSearchableSnapshotIndexCompatibilityIT.java @@ -13,7 +13,6 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.test.cluster.util.Version; @@ -51,11 +50,7 @@ public void testMountSearchableSnapshot() throws Exception { createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); @@ -122,11 +117,7 @@ public void testSearchableSnapshotUpgrade() throws Exception { createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index 168493eb52f60..5c26a744b2fbf 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -80,8 +80,8 @@ public void testNoControllerSpawn() throws IOException { Environment environment = TestEnvironment.newEnvironment(settings); // This plugin will NOT have a controller daemon - Path plugin = environment.modulesFile().resolve("a_plugin"); - Files.createDirectories(environment.modulesFile()); + Path plugin = environment.modulesDir().resolve("a_plugin"); + Files.createDirectories(environment.modulesDir()); Files.createDirectories(plugin); PluginTestUtil.writePluginProperties( plugin, @@ -111,8 +111,8 @@ public void testNoControllerSpawn() throws IOException { * Two plugins - one with a controller daemon and one without. */ public void testControllerSpawn() throws Exception { - assertControllerSpawns(Environment::pluginsFile, false); - assertControllerSpawns(Environment::modulesFile, true); + assertControllerSpawns(Environment::pluginsDir, false); + assertControllerSpawns(Environment::modulesDir, true); } private void assertControllerSpawns(final Function pluginsDirFinder, boolean expectSpawn) throws Exception { @@ -131,8 +131,8 @@ private void assertControllerSpawns(final Function pluginsDir // this plugin will have a controller daemon Path plugin = pluginsDirFinder.apply(environment).resolve("test_plugin"); - Files.createDirectories(environment.modulesFile()); - Files.createDirectories(environment.pluginsFile()); + Files.createDirectories(environment.modulesDir()); + Files.createDirectories(environment.pluginsDir()); Files.createDirectories(plugin); PluginTestUtil.writePluginProperties( plugin, @@ -217,7 +217,7 @@ public void testControllerSpawnWithIncorrectDescriptor() throws IOException { Environment environment = TestEnvironment.newEnvironment(settings); - Path plugin = environment.modulesFile().resolve("test_plugin"); + Path plugin = environment.modulesDir().resolve("test_plugin"); Files.createDirectories(plugin); PluginTestUtil.writePluginProperties( plugin, @@ -250,10 +250,10 @@ public void testSpawnerHandlingOfDesktopServicesStoreFiles() throws IOException final Environment environment = TestEnvironment.newEnvironment(settings); - Files.createDirectories(environment.modulesFile()); - Files.createDirectories(environment.pluginsFile()); + Files.createDirectories(environment.modulesDir()); + Files.createDirectories(environment.pluginsDir()); - final Path desktopServicesStore = environment.modulesFile().resolve(".DS_Store"); + final Path desktopServicesStore = environment.modulesDir().resolve(".DS_Store"); Files.createFile(desktopServicesStore); final Spawner spawner = new Spawner(); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java index 30367bf55d8cc..c0f12f95269ef 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java @@ -82,8 +82,7 @@ private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int ve Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), randomDoubleProcessorCount(), ByteSizeValue.ofGb(randomIntBetween(10, 24)), - ByteSizeValue.ofGb(randomIntBetween(128, 256)), - null + ByteSizeValue.ofGb(randomIntBetween(128, 256)) ) ) .toList(); @@ -94,8 +93,7 @@ private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int ve Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), new DesiredNode.ProcessorsRange(minProcessors, minProcessors + randomIntBetween(10, 20)), ByteSizeValue.ofGb(randomIntBetween(10, 24)), - ByteSizeValue.ofGb(randomIntBetween(128, 256)), - null + ByteSizeValue.ofGb(randomIntBetween(128, 256)) ); }).toList(); } diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsUsageRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsUsageRollingUpgradeIT.java new file mode 100644 index 0000000000000..ab9855b7398fe --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsUsageRollingUpgradeIT.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; + +import java.io.IOException; +import java.time.Instant; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.upgrades.LogsdbIndexingRollingUpgradeIT.bulkIndex; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; + +public class LogsUsageRollingUpgradeIT extends AbstractRollingUpgradeTestCase { + + public LogsUsageRollingUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + public void testUsage() throws Exception { + assumeTrue("logsdb.prior_logs_usage only gets set in 8.x", getOldClusterTestVersion().before("9.0.0")); + String dataStreamName = "logs-mysql-error"; + if (isOldCluster()) { + bulkIndex(dataStreamName, 4, 256, Instant.now()); + ensureGreen(dataStreamName); + assertBusy(() -> { + var getClusterSettingsResponse = getClusterSettings(); + Map persistentSettings = (Map) getClusterSettingsResponse.get("persistent"); + assertThat(persistentSettings, hasEntry("logsdb.prior_logs_usage", "true")); + }, 2, TimeUnit.MINUTES); + } else { + String newIndex = rolloverDataStream(dataStreamName); + bulkIndex(dataStreamName, 4, 256, Instant.now()); + Map indexResponse = (Map) getIndexSettings(newIndex, true).get(newIndex); + Map settings = (Map) indexResponse.get("settings"); + Map defaults = (Map) indexResponse.get("defaults"); + assertThat(settings, not(hasKey("index.mode"))); + assertThat(defaults, hasEntry("index.mode", "standard")); + } + } + + static Map getClusterSettings() throws IOException { + var request = new Request("GET", "/_cluster/settings"); + request.addParameter("flat_settings", "true"); + request.addParameter("include_defaults", "true"); + var response = client().performRequest(request); + assertOK(response); + return entityAsMap(response); + } + + static String rolloverDataStream(String dataStreamName) throws IOException { + var request = new Request("POST", "/" + dataStreamName + "/_rollover"); + var response = client().performRequest(request); + assertOK(response); + var responseBody = entityAsMap(response); + return (String) responseBody.get("new_index"); + } + +} diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsdbIndexingRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsdbIndexingRollingUpgradeIT.java index 9cb91438e09c0..6b2a889d3c1ac 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsdbIndexingRollingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsdbIndexingRollingUpgradeIT.java @@ -22,13 +22,18 @@ import java.io.IOException; import java.io.InputStream; import java.time.Instant; +import java.util.List; import java.util.Locale; import java.util.Map; import static org.elasticsearch.upgrades.LogsIndexModeRollingUpgradeIT.enableLogsdbByDefault; import static org.elasticsearch.upgrades.LogsIndexModeRollingUpgradeIT.getWriteBackingIndex; import static org.elasticsearch.upgrades.TsdbIT.formatInstant; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; public class LogsdbIndexingRollingUpgradeIT extends AbstractRollingUpgradeTestCase { @@ -122,7 +127,8 @@ static void createTemplate(String dataStreamName, String id, String template) th assertOK(client().performRequest(putIndexTemplateRequest)); } - static void bulkIndex(String dataStreamName, int numRequest, int numDocs, Instant startTime) throws Exception { + static String bulkIndex(String dataStreamName, int numRequest, int numDocs, Instant startTime) throws Exception { + String firstIndex = null; for (int i = 0; i < numRequest; i++) { var bulkRequest = new Request("POST", "/" + dataStreamName + "/_bulk"); StringBuilder requestBody = new StringBuilder(); @@ -155,7 +161,11 @@ static void bulkIndex(String dataStreamName, int numRequest, int numDocs, Instan assertOK(response); var responseBody = entityAsMap(response); assertThat("errors in response:\n " + responseBody, responseBody.get("errors"), equalTo(false)); + if (firstIndex == null) { + firstIndex = (String) ((Map) ((Map) ((List) responseBody.get("items")).get(0)).get("create")).get("_index"); + } } + return firstIndex; } void search(String dataStreamName) throws Exception { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NoLogsUsageRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NoLogsUsageRollingUpgradeIT.java new file mode 100644 index 0000000000000..57e5655fda3b6 --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NoLogsUsageRollingUpgradeIT.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import java.time.Instant; +import java.util.Map; + +import static org.elasticsearch.upgrades.LogsUsageRollingUpgradeIT.getClusterSettings; +import static org.elasticsearch.upgrades.LogsdbIndexingRollingUpgradeIT.bulkIndex; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; + +public class NoLogsUsageRollingUpgradeIT extends AbstractRollingUpgradeTestCase { + + public NoLogsUsageRollingUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + public void testUsage() throws Exception { + String dataStreamName = "logs-mysql-error"; + if (isOldCluster()) { + dataStreamName = dataStreamName.replace("logs-", "log-"); + bulkIndex(dataStreamName, 4, 256, Instant.now()); + ensureGreen(dataStreamName); + } else if (isUpgradedCluster()) { + String newIndex = bulkIndex(dataStreamName, 4, 256, Instant.now()); + ensureGreen(dataStreamName); + Map indexResponse = (Map) getIndexSettings(newIndex, true).get(newIndex); + Map settings = (Map) indexResponse.get("settings"); + assertThat(settings, hasEntry("index.mode", "logsdb")); + var getClusterSettingsResponse = getClusterSettings(); + Map defaults = (Map) getClusterSettingsResponse.get("defaults"); + Map persistentSettings = (Map) getClusterSettingsResponse.get("persistent"); + assertThat(persistentSettings, not(hasKey("logsdb.prior_logs_usage"))); + assertThat(defaults, hasEntry("cluster.logsdb.enabled", "true")); + } + } + +} diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java index 98572de6618ee..55a03da7c9e90 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java @@ -18,7 +18,6 @@ import java.util.List; import java.util.Map; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class SourceModeRollingUpgradeIT extends AbstractRollingUpgradeTestCase { @@ -83,20 +82,16 @@ public void testConfigureStoredSourceWhenIndexIsCreatedLegacy() throws IOExcepti private void assertDeprecationWarningForTemplate(String templateName) throws IOException { var request = new Request("GET", "/_migration/deprecations"); var response = entityAsMap(client().performRequest(request)); - if (response.containsKey("templates")) { - // Check the newer version of the deprecation API that contains the templates section - Map issuesByTemplate = (Map) response.get("templates"); - assertThat(issuesByTemplate.containsKey(templateName), equalTo(true)); - var templateIssues = (List) issuesByTemplate.get(templateName); - assertThat(((Map) templateIssues.getFirst()).get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); + assertThat(response.containsKey("templates"), equalTo(true)); + Map issuesByTemplate = (Map) response.get("templates"); + assertThat(issuesByTemplate.containsKey(templateName), equalTo(true)); + var templateIssue = (Map) ((List) issuesByTemplate.get(templateName)).getFirst(); + // Bwc compatible logic until backports are complete. + if (templateIssue.containsKey("details")) { + assertThat(templateIssue.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING_TITLE)); + assertThat(templateIssue.get("details"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); } else { - // Bwc version with 8.18 until https://github.com/elastic/elasticsearch/pull/120505/ gets backported, clean up after backport - var nodeSettings = (Map) ((List) response.get("node_settings")).getFirst(); - assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); - assertThat( - (String) nodeSettings.get("details"), - containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [" + templateName + "]") - ); + assertThat(templateIssue.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); } } } diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java index df46ed0379980..5308fe85c1cab 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java @@ -33,11 +33,14 @@ public VectorSearchIT(@Name("upgradedNodes") int upgradedNodes) { private static final String SCRIPT_BYTE_INDEX_NAME = "script_byte_vector_index"; private static final String BYTE_INDEX_NAME = "byte_vector_index"; private static final String QUANTIZED_INDEX_NAME = "quantized_vector_index"; + private static final String BBQ_INDEX_NAME = "bbq_vector_index"; private static final String FLAT_QUANTIZED_INDEX_NAME = "flat_quantized_vector_index"; + private static final String FLAT_BBQ_INDEX_NAME = "flat_bbq_vector_index"; private static final String FLOAT_VECTOR_SEARCH_VERSION = "8.4.0"; private static final String BYTE_VECTOR_SEARCH_VERSION = "8.6.0"; private static final String QUANTIZED_VECTOR_SEARCH_VERSION = "8.12.1"; private static final String FLAT_QUANTIZED_VECTOR_SEARCH_VERSION = "8.13.0"; + private static final String BBQ_VECTOR_SEARCH_VERSION = "8.18.0"; public void testScriptByteVectorSearch() throws Exception { assumeTrue("byte vector search is not supported on this version", getOldClusterTestVersion().onOrAfter(BYTE_VECTOR_SEARCH_VERSION)); @@ -429,6 +432,182 @@ public void testFlatQuantizedVectorSearch() throws Exception { assertThat((double) hits.get(0).get("_score"), closeTo(0.9934857, 0.005)); } + public void testBBQVectorSearch() throws Exception { + assumeTrue( + "Quantized vector search is not supported on this version", + getOldClusterTestVersion().onOrAfter(BBQ_VECTOR_SEARCH_VERSION) + ); + if (isOldCluster()) { + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 64, + "index": true, + "similarity": "cosine", + "index_options": { + "type": "bbq_hnsw", + "ef_construction": 100, + "m": 16 + } + } + } + } + """; + // create index and index 10 random floating point vectors + createIndex(BBQ_INDEX_NAME, Settings.EMPTY, mapping); + index64DimVectors(BBQ_INDEX_NAME); + // force merge the index + client().performRequest(new Request("POST", "/" + BBQ_INDEX_NAME + "/_forcemerge?max_num_segments=1")); + } + Request searchRequest = new Request("POST", "/" + BBQ_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + + // search with knn + searchRequest = new Request("POST", "/" + BBQ_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "knn": { + "field": "vector", + "query_vector": [4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6], + "k": 2, + "num_candidates": 5 + } + } + """); + response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(2)); + hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(0.9934857, 0.005)); + } + + public void testFlatBBQVectorSearch() throws Exception { + assumeTrue( + "Quantized vector search is not supported on this version", + getOldClusterTestVersion().onOrAfter(BBQ_VECTOR_SEARCH_VERSION) + ); + if (isOldCluster()) { + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 64, + "index": true, + "similarity": "cosine", + "index_options": { + "type": "bbq_flat" + } + } + } + } + """; + // create index and index 10 random floating point vectors + createIndex(FLAT_BBQ_INDEX_NAME, Settings.EMPTY, mapping); + index64DimVectors(FLAT_BBQ_INDEX_NAME); + // force merge the index + client().performRequest(new Request("POST", "/" + FLAT_BBQ_INDEX_NAME + "/_forcemerge?max_num_segments=1")); + } + Request searchRequest = new Request("POST", "/" + FLAT_BBQ_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + + // search with knn + searchRequest = new Request("POST", "/" + FLAT_BBQ_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "knn": { + "field": "vector", + "query_vector": [4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6], + "k": 2, + "num_candidates": 5 + } + } + """); + response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(2)); + hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(0.9934857, 0.005)); + } + + private void index64DimVectors(String indexName) throws Exception { + String[] vectors = new String[] { + "{\"vector\":[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, " + + "1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]}", + "{\"vector\":[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, " + + "1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]}", + "{\"vector\":[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, " + + "1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3]}", + "{\"vector\":[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, " + + "2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]}", + "{\"vector\":[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, " + + "3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]}", + "{\"vector\":[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, " + + "1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]}", + "{\"vector\":[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, " + + "1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]}", + "{}" }; + for (int i = 0; i < vectors.length; i++) { + Request indexRequest = new Request("PUT", "/" + indexName + "/_doc/" + i); + indexRequest.setJsonEntity(vectors[i]); + assertOK(client().performRequest(indexRequest)); + } + // always refresh to ensure the data is visible + refresh(indexName); + } + private void indexVectors(String indexName) throws Exception { String[] vectors = new String[] { "{\"vector\":[1, 1, 1]}", diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SearchErrorTraceIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SearchErrorTraceIT.java index 6f9ab8ccdfdec..99e89f0e31cc5 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SearchErrorTraceIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SearchErrorTraceIT.java @@ -11,43 +11,26 @@ import org.apache.http.entity.ContentType; import org.apache.http.nio.entity.NByteArrayEntity; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.Request; +import org.elasticsearch.search.ErrorTraceHelper; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.transport.TransportMessageListener; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.XContentType; import org.junit.Before; import java.io.IOException; import java.nio.charset.Charset; -import java.util.Optional; -import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.BooleanSupplier; import static org.elasticsearch.index.query.QueryBuilders.simpleQueryStringQuery; public class SearchErrorTraceIT extends HttpSmokeTestCase { - private AtomicBoolean hasStackTrace; + private BooleanSupplier hasStackTrace; @Before - private void setupMessageListener() { - internalCluster().getDataNodeInstances(TransportService.class).forEach(ts -> { - ts.addMessageListener(new TransportMessageListener() { - @Override - public void onResponseSent(long requestId, String action, Exception error) { - TransportMessageListener.super.onResponseSent(requestId, action, error); - if (action.startsWith("indices:data/read/search")) { - Optional throwable = ExceptionsHelper.unwrapCausesAndSuppressed( - error, - t -> t.getStackTrace().length > 0 - ); - hasStackTrace.set(throwable.isPresent()); - } - } - }); - }); + public void setupMessageListener() { + hasStackTrace = ErrorTraceHelper.setupErrorTraceListener(internalCluster()); } private void setupIndexWithDocs() { @@ -61,7 +44,6 @@ private void setupIndexWithDocs() { } public void testSearchFailingQueryErrorTraceDefault() throws IOException { - hasStackTrace = new AtomicBoolean(); setupIndexWithDocs(); Request searchRequest = new Request("POST", "/_search"); @@ -76,11 +58,10 @@ public void testSearchFailingQueryErrorTraceDefault() throws IOException { } """); getRestClient().performRequest(searchRequest); - assertFalse(hasStackTrace.get()); + assertFalse(hasStackTrace.getAsBoolean()); } public void testSearchFailingQueryErrorTraceTrue() throws IOException { - hasStackTrace = new AtomicBoolean(); setupIndexWithDocs(); Request searchRequest = new Request("POST", "/_search"); @@ -96,11 +77,10 @@ public void testSearchFailingQueryErrorTraceTrue() throws IOException { """); searchRequest.addParameter("error_trace", "true"); getRestClient().performRequest(searchRequest); - assertTrue(hasStackTrace.get()); + assertTrue(hasStackTrace.getAsBoolean()); } public void testSearchFailingQueryErrorTraceFalse() throws IOException { - hasStackTrace = new AtomicBoolean(); setupIndexWithDocs(); Request searchRequest = new Request("POST", "/_search"); @@ -116,11 +96,10 @@ public void testSearchFailingQueryErrorTraceFalse() throws IOException { """); searchRequest.addParameter("error_trace", "false"); getRestClient().performRequest(searchRequest); - assertFalse(hasStackTrace.get()); + assertFalse(hasStackTrace.getAsBoolean()); } public void testMultiSearchFailingQueryErrorTraceDefault() throws IOException { - hasStackTrace = new AtomicBoolean(); setupIndexWithDocs(); XContentType contentType = XContentType.JSON; @@ -133,11 +112,10 @@ public void testMultiSearchFailingQueryErrorTraceDefault() throws IOException { new NByteArrayEntity(requestBody, ContentType.create(contentType.mediaTypeWithoutParameters(), (Charset) null)) ); getRestClient().performRequest(searchRequest); - assertFalse(hasStackTrace.get()); + assertFalse(hasStackTrace.getAsBoolean()); } public void testMultiSearchFailingQueryErrorTraceTrue() throws IOException { - hasStackTrace = new AtomicBoolean(); setupIndexWithDocs(); XContentType contentType = XContentType.JSON; @@ -151,11 +129,10 @@ public void testMultiSearchFailingQueryErrorTraceTrue() throws IOException { ); searchRequest.addParameter("error_trace", "true"); getRestClient().performRequest(searchRequest); - assertTrue(hasStackTrace.get()); + assertTrue(hasStackTrace.getAsBoolean()); } public void testMultiSearchFailingQueryErrorTraceFalse() throws IOException { - hasStackTrace = new AtomicBoolean(); setupIndexWithDocs(); XContentType contentType = XContentType.JSON; @@ -170,6 +147,6 @@ public void testMultiSearchFailingQueryErrorTraceFalse() throws IOException { searchRequest.addParameter("error_trace", "false"); getRestClient().performRequest(searchRequest); - assertFalse(hasStackTrace.get()); + assertFalse(hasStackTrace.getAsBoolean()); } } diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java index b1e28de1a5264..683990d51d4a8 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java @@ -10,7 +10,6 @@ package org.elasticsearch.http.snapshots; import org.apache.http.client.methods.HttpGet; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; @@ -37,7 +36,6 @@ import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; -import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -516,10 +514,9 @@ private static GetSnapshotsResponse sortedWithLimit( true, (args) -> new GetSnapshotsResponse( (List) args[0], - (Map) args[1], - (String) args[2], - args[3] == null ? UNKNOWN_COUNT : (int) args[3], - args[4] == null ? UNKNOWN_COUNT : (int) args[4] + (String) args[1], + args[2] == null ? UNKNOWN_COUNT : (int) args[2], + args[3] == null ? UNKNOWN_COUNT : (int) args[3] ) ); @@ -529,11 +526,6 @@ private static GetSnapshotsResponse sortedWithLimit( (p, c) -> SnapshotInfoUtils.snapshotInfoFromXContent(p), new ParseField("snapshots") ); - GET_SNAPSHOT_PARSER.declareObject( - ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> p.map(HashMap::new, ElasticsearchException::fromXContent), - new ParseField("failures") - ); GET_SNAPSHOT_PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), new ParseField("next")); GET_SNAPSHOT_PARSER.declareIntOrNull(ConstructingObjectParser.optionalConstructorArg(), UNKNOWN_COUNT, new ParseField("total")); GET_SNAPSHOT_PARSER.declareIntOrNull(ConstructingObjectParser.optionalConstructorArg(), UNKNOWN_COUNT, new ParseField("remaining")); diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/GeoLite2-City.mmdb b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/GeoLite2-City.mmdb index 0809201619b59..67dcd4a8e5a17 100644 Binary files a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/GeoLite2-City.mmdb and b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/GeoLite2-City.mmdb differ diff --git a/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java b/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java index e53c0564be297..5943d4a86ab92 100644 --- a/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java +++ b/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java @@ -36,6 +36,7 @@ public class SmokeTestMultiNodeClientYamlTestSuiteIT extends ESClientYamlSuiteTe .node(0, n -> n.setting("node.roles", "[master,data,ml,remote_cluster_client,transform]")) .feature(FeatureFlag.TIME_SERIES_MODE) .feature(FeatureFlag.SUB_OBJECTS_AUTO_ENABLED) + .feature(FeatureFlag.INDEX_RECOVERY_USE_SYNTHETIC_SOURCE) .build(); public SmokeTestMultiNodeClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { diff --git a/renovate.json b/renovate.json index 71c6301f8e0c2..53919e027dc7a 100644 --- a/renovate.json +++ b/renovate.json @@ -7,8 +7,21 @@ "schedule": [ "after 1pm on tuesday" ], - "labels": [">non-issue", ":Delivery/Packaging", "Team:Delivery", "auto-merge-without-approval"], - "baseBranches": ["main", "8.x", "8.17", "8.16"], + "labels": [ + ">non-issue", + ":Delivery/Packaging", + "Team:Delivery", + "auto-merge-without-approval" + ], + "baseBranches": [ + "main", + "8.16", + "9.0", + "8.18", + "8.17", + "8.x", + "7.17" + ], "packageRules": [ { "groupName": "wolfi (versioned)", @@ -17,8 +30,8 @@ "matchDatasources": [ "docker" ], - "matchPackagePatterns": [ - "^docker.elastic.co/wolfi/chainguard-base$" + "matchPackageNames": [ + "/^docker.elastic.co/wolfi/chainguard-base$/" ] } ], diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 68da320923898..205b02a8936bb 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -57,36 +57,12 @@ tasks.named("precommit").configure { tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling") task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling with vector_operations_count") + task.replaceValueInMatch("profile.shards.0.dfs.knn.0.collector.0.name", "TopScoreDocCollector", "dfs knn vector profiling") + task.replaceValueInMatch("profile.shards.0.dfs.knn.0.collector.0.name", "TopScoreDocCollector", "dfs knn vector profiling with vector_operations_count") task.skipTest("cat.aliases/10_basic/Deprecated local parameter", "CAT APIs not covered by compatibility policy") task.skipTest("cat.shards/10_basic/Help", "sync_id is removed in 9.0") task.skipTest("search/500_date_range/from, to, include_lower, include_upper deprecated", "deprecated parameters are removed in 9.0") - task.skipTest("logsdb/10_settings/logsdb with default ignore dynamic beyond limit and default sorting", "skip until pr/118968 gets backported") - task.skipTest("logsdb/10_settings/logsdb with default ignore dynamic beyond limit and too low limit", "skip until pr/118968 gets backported") - task.skipTest("logsdb/10_settings/logsdb with default ignore dynamic beyond limit and subobjects false", "skip until pr/118968 gets backported") - task.skipTest("logsdb/10_settings/override sort missing settings", "skip until pr/118968 gets backported") - task.skipTest("logsdb/10_settings/override sort order settings", "skip until pr/118968 gets backported") - task.skipTest("logsdb/10_settings/override sort mode settings", "skip until pr/118968 gets backported") - task.skipTest("search.vectors/41_knn_search_bbq_hnsw/Test knn search", "Scoring has changed in latest versions") - task.skipTest("search.vectors/42_knn_search_bbq_flat/Test knn search", "Scoring has changed in latest versions") - task.skipTest("search.vectors/180_update_dense_vector_type/Test create and update dense vector mapping with bulk indexing", "waiting for #118774 backport") - task.skipTest("search.vectors/160_knn_query_missing_params/kNN query in a bool clause - missing num_candidates", "waiting for #118774 backport") - task.skipTest("search.vectors/110_knn_query_with_filter/Simple knn query", "waiting for #118774 backport") - task.skipTest("search.vectors/160_knn_query_missing_params/kNN search used in nested field - missing num_candidates", "waiting for #118774 backport") - task.skipTest("search.vectors/180_update_dense_vector_type/Test create and update dense vector mapping to int4 with per-doc indexing and flush", "waiting for #118774 backport") - task.skipTest("search.vectors/110_knn_query_with_filter/PRE_FILTER: knn query with internal filter as pre-filter", "waiting for #118774 backport") - task.skipTest("search.vectors/180_update_dense_vector_type/Index, update and merge", "waiting for #118774 backport") - task.skipTest("search.vectors/160_knn_query_missing_params/kNN query with missing num_candidates param - size provided", "waiting for #118774 backport") - task.skipTest("search.vectors/110_knn_query_with_filter/POST_FILTER: knn query with filter from a parent bool query as post-filter", "waiting for #118774 backport") - task.skipTest("search.vectors/120_knn_query_multiple_shards/Aggregations with collected number of docs depends on num_candidates", "waiting for #118774 backport") - task.skipTest("search.vectors/180_update_dense_vector_type/Test create and update dense vector mapping with per-doc indexing and flush", "waiting for #118774 backport") - task.skipTest("search.vectors/110_knn_query_with_filter/PRE_FILTER: knn query with alias filter as pre-filter", "waiting for #118774 backport") - task.skipTest("search.vectors/140_knn_query_with_other_queries/Function score query with knn query", "waiting for #118774 backport") - task.skipTest("search.vectors/130_knn_query_nested_search/nested kNN search inner_hits size > 1", "waiting for #118774 backport") - task.skipTest("search.vectors/110_knn_query_with_filter/PRE_FILTER: pre-filter across multiple aliases", "waiting for #118774 backport") - task.skipTest("search.vectors/160_knn_query_missing_params/kNN search in a dis_max query - missing num_candidates", "waiting for #118774 backport") task.skipTest("search.highlight/30_max_analyzed_offset/Plain highlighter with max_analyzed_offset < 0 should FAIL", "semantics of test has changed") - task.skipTest("indices.create/10_basic/Create lookup index", "default auto_expand_replicas was removed") - task.skipTest("indices.create/10_basic/Create lookup index with one shard", "default auto_expand_replicas was removed") task.skipTest("range/20_synthetic_source/Double range", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("range/20_synthetic_source/Float range", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("range/20_synthetic_source/Integer range", "_source.mode mapping attribute is no-op since 9.0.0") @@ -97,5 +73,14 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("index/92_metrics_auto_subobjects/Root without subobjects with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("index/91_metrics_no_subobjects/Metrics object indexing with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("index/91_metrics_no_subobjects/Root without subobjects with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") - task.skipTest("logsdb/10_settings/routing path allowed in logs mode with routing on sort fields", "Unknown feature routing.logsb_route_on_sort_fields") + task.skipTest("indices.create/20_synthetic_source/synthetic_source with copy_to inside nested object", "temporary until backported") + task.skipTest( + "cluster.desired_nodes/10_basic/Test delete desired nodes with node_version generates a warning", + "node_version warning is removed in 9.0" + ) + task.skipTest( + "cluster.desired_nodes/10_basic/Test update desired nodes with node_version generates a warning", + "node_version warning is removed in 9.0" + ) + task.skipTest("tsdb/20_mapping/nested fields", "nested field support in tsdb indices is now supported") }) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.get_policy.json b/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.get_policy.json index aed7397877393..e735a75f67ee9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.get_policy.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.get_policy.json @@ -30,7 +30,7 @@ "params": { "master_timeout":{ "type":"time", - "description":"Timeout for processing on master node" + "description":"Timeout for waiting for new cluster state in case it is blocked" } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.stats.json index afd314a0dc804..835fa3f6ffef2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.stats.json @@ -20,7 +20,7 @@ "params": { "master_timeout":{ "type":"time", - "description":"Timeout for processing on master node" + "description":"Timeout for waiting for new cluster state in case it is blocked" } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.cancel_migrate_reindex.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.cancel_migrate_reindex.json index 4b5f53c275d79..31cff458e357a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.cancel_migrate_reindex.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.cancel_migrate_reindex.json @@ -1,7 +1,7 @@ { "indices.cancel_migrate_reindex":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex.html", + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex-cancel-api.html", "description":"This API returns the status of a migration reindex attempt for a data stream or index" }, "stability":"experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create_from.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create_from.json index 05d64598dd1ee..435430b7a2673 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create_from.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create_from.json @@ -1,7 +1,7 @@ { "indices.create_from":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex.html", + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-create-index-from-source.html", "description":"This API creates a destination from a source index. It copies the mappings and settings from the source index while allowing request settings and mappings to override the source values." }, "stability":"experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_migrate_reindex_status.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_migrate_reindex_status.json index 12151321ac827..d166f3e99197b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_migrate_reindex_status.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_migrate_reindex_status.json @@ -1,7 +1,7 @@ { "indices.get_migrate_reindex_status":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex.html", + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex-status-api.html", "description":"This API returns the status of a migration reindex attempt for a data stream or index" }, "stability":"experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.migrate_reindex.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.migrate_reindex.json index 66a73c6438142..28cef97c7360a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.migrate_reindex.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.migrate_reindex.json @@ -1,7 +1,7 @@ { "indices.migrate_reindex":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex.html", + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex-api.html", "description":"This API reindexes all legacy backing indices for a data stream. It does this in a persistent task. The persistent task id is returned immediately, and the reindexing work is completed in that task" }, "stability":"experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.resolve_cluster.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.resolve_cluster.json index 8af2dde4f8032..c41233664de0e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.resolve_cluster.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.resolve_cluster.json @@ -1,55 +1,56 @@ { - "indices.resolve_cluster":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-resolve-cluster-api.html", - "description":"Resolves the specified index expressions to return information about each cluster, including the local cluster, if included." + "indices.resolve_cluster": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-resolve-cluster-api.html", + "description": "Resolves the specified index expressions to return information about each cluster. If no index expression is provided, this endpoint will return information about all the remote clusters that are configured on the local cluster." }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/json"] + "stability": "stable", + "visibility": "public", + "headers": { + "accept": ["application/json"] }, - "url":{ - "paths":[ + "url": { + "paths": [ { - "path":"/_resolve/cluster/{name}", - "methods":[ - "GET" - ], - "parts":{ - "name":{ - "type":"list", - "description":"A comma-separated list of cluster:index names or wildcard expressions" + "path": "/_resolve/cluster", + "methods": ["GET"] + }, + { + "path": "/_resolve/cluster/{name}", + "methods": ["GET"], + "parts": { + "name": { + "type": "list", + "description": "A comma-separated list of cluster:index names or wildcard expressions" } } } ] }, - "params":{ - "ignore_unavailable":{ - "type":"boolean", - "description":"Whether specified concrete indices should be ignored when unavailable (missing or closed)" + "params": { + "ignore_unavailable": { + "type": "boolean", + "description": "Whether specified concrete indices should be ignored when unavailable (missing or closed). Only allowed when providing an index expression." + }, + "ignore_throttled": { + "type": "boolean", + "description": "Whether specified concrete, expanded or aliased indices should be ignored when throttled. Only allowed when providing an index expression." }, - "ignore_throttled":{ - "type":"boolean", - "description":"Whether specified concrete, expanded or aliased indices should be ignored when throttled" + "allow_no_indices": { + "type": "boolean", + "description": "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified). Only allowed when providing an index expression." }, - "allow_no_indices":{ - "type":"boolean", - "description":"Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)" + "expand_wildcards": { + "type": "enum", + "options": ["open", "closed", "hidden", "none", "all"], + "default": "open", + "description": "Whether wildcard expressions should get expanded to open or closed indices (default: open). Only allowed when providing an index expression." }, - "expand_wildcards":{ - "type":"enum", - "options":[ - "open", - "closed", - "hidden", - "none", - "all" - ], - "default":"open", - "description":"Whether wildcard expressions should get expanded to open or closed indices (default: open)" + "timeout": { + "type": "time", + "description": "The maximum time to wait for remote clusters to respond" } } } } + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.update.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.update.json index 6c458ce080aa7..133354e3ec5be 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.update.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.update.json @@ -14,7 +14,7 @@ "paths": [ { "path": "/_inference/{inference_id}/_update", - "methods": ["POST"], + "methods": ["PUT"], "parts": { "inference_id": { "type": "string", @@ -24,7 +24,7 @@ }, { "path": "/_inference/{task_type}/{inference_id}/_update", - "methods": ["POST"], + "methods": ["PUT"], "parts": { "task_type": { "type": "string", diff --git a/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java b/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java index 675092bffe8d5..e59d8a452b6c3 100644 --- a/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java +++ b/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java @@ -36,6 +36,7 @@ public class ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { .module("data-streams") .feature(FeatureFlag.TIME_SERIES_MODE) .feature(FeatureFlag.SUB_OBJECTS_AUTO_ENABLED) + .feature(FeatureFlag.INDEX_RECOVERY_USE_SYNTHETIC_SOURCE) .build(); public ClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml index 1d1aa524ffb21..a45146a4e147a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/10_basic.yml @@ -59,61 +59,6 @@ teardown: - contains: { nodes: { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb" } } - contains: { nodes: { settings: { node: { name: "instance-000188" } }, processors: 16.0, memory: "128gb", storage: "1tb" } } --- -"Test update desired nodes with node_version generates a warning": - - skip: - reason: "contains is a newly added assertion" - features: ["contains", "allowed_warnings"] - - do: - cluster.state: {} - - # Get master node id - - set: { master_node: master } - - - do: - nodes.info: {} - - set: { nodes.$master.version: es_version } - - - do: - _internal.update_desired_nodes: - history_id: "test" - version: 1 - body: - nodes: - - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } - allowed_warnings: - - "[version removal] Specifying node_version in desired nodes requests is deprecated." - - match: { replaced_existing_history_id: false } - - - do: - _internal.get_desired_nodes: {} - - match: - $body: - history_id: "test" - version: 1 - nodes: - - { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } - - - do: - _internal.update_desired_nodes: - history_id: "test" - version: 2 - body: - nodes: - - { settings: { "node.name": "instance-000187" }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } - - { settings: { "node.name": "instance-000188" }, processors: 16.0, memory: "128gb", storage: "1tb", node_version: $es_version } - allowed_warnings: - - "[version removal] Specifying node_version in desired nodes requests is deprecated." - - match: { replaced_existing_history_id: false } - - - do: - _internal.get_desired_nodes: {} - - - match: { history_id: "test" } - - match: { version: 2 } - - length: { nodes: 2 } - - contains: { nodes: { settings: { node: { name: "instance-000187" } }, processors: 8.5, memory: "64gb", storage: "128gb", node_version: $es_version } } - - contains: { nodes: { settings: { node: { name: "instance-000188" } }, processors: 16.0, memory: "128gb", storage: "1tb", node_version: $es_version } } ---- "Test update move to a new history id": - skip: reason: "contains is a newly added assertion" @@ -199,46 +144,6 @@ teardown: _internal.get_desired_nodes: {} - match: { status: 404 } --- -"Test delete desired nodes with node_version generates a warning": - - skip: - features: allowed_warnings - - do: - cluster.state: {} - - - set: { master_node: master } - - - do: - nodes.info: {} - - set: { nodes.$master.version: es_version } - - - do: - _internal.update_desired_nodes: - history_id: "test" - version: 1 - body: - nodes: - - { settings: { "node.external_id": "instance-000187" }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } - allowed_warnings: - - "[version removal] Specifying node_version in desired nodes requests is deprecated." - - match: { replaced_existing_history_id: false } - - - do: - _internal.get_desired_nodes: {} - - match: - $body: - history_id: "test" - version: 1 - nodes: - - { settings: { node: { external_id: "instance-000187" } }, processors: 8.0, memory: "64gb", storage: "128gb", node_version: $es_version } - - - do: - _internal.delete_desired_nodes: {} - - - do: - catch: missing - _internal.get_desired_nodes: {} - - match: { status: 404 } ---- "Test update desired nodes is idempotent": - skip: reason: "contains is a newly added assertion" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml index 096ccbce9a58b..dc476147c9601 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml @@ -1602,6 +1602,74 @@ synthetic_source with copy_to pointing inside object: hits.hits.2.fields: c.copy: [ "100", "hello", "zap" ] +--- +synthetic_source with copy_to inside nested object: + - do: + indices.create: + index: test + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + name: + type: keyword + my_values: + type: nested + properties: + k: + type: keyword + copy_to: my_values.copy + second_level: + type: nested + properties: + k2: + type: keyword + copy_to: my_values.copy + copy: + type: keyword + dummy: + type: keyword + + - do: + index: + index: test + id: 1 + refresh: true + body: + name: "A" + my_values: + k: "hello" + + - do: + index: + index: test + id: 2 + refresh: true + body: + name: "B" + my_values: + second_level: + k2: "hello" + + - do: + search: + index: test + sort: name + + - match: + hits.hits.0._source: + name: "A" + my_values: + k: "hello" + - match: + hits.hits.1._source: + name: "B" + my_values: + second_level: + k2: "hello" + --- synthetic_source with copy_to pointing to ambiguous field: - do: @@ -1940,3 +2008,143 @@ create index with use_synthetic_source: flush: false - gt: { test.store_size_in_bytes: 0 } - is_false: test.fields._recovery_source +--- +"Nested synthetic source with indexed dense vectors": + - requires: + test_runner_features: [ capabilities ] + capabilities: + - method: PUT + path: /{index} + capabilities: [ synthetic_nested_dense_vector_bug_fix ] + reason: "Requires synthetic source bugfix for dense vectors in nested objects" + - do: + indices.create: + index: nested_dense_vector_synthetic_test + body: + mappings: + properties: + parent: + type: nested + properties: + vector: + type: dense_vector + index: true + similarity: l2_norm + text: + type: text + settings: + index: + mapping: + source: + mode: synthetic + - do: + index: + index: nested_dense_vector_synthetic_test + id: 0 + refresh: true + body: { "parent": [ { "vector": [ 1, 2 ],"text": "foo" }, { "vector": [ 2, 2 ], "text": "bar" } ] } + + - do: + index: + index: nested_dense_vector_synthetic_test + id: 1 + refresh: true + body: { "parent": [ { "text": "foo" }, { "vector": [ 2, 2 ], "text": "bar" } ] } + + - do: + index: + index: nested_dense_vector_synthetic_test + id: 2 + refresh: true + body: { "parent": [ { "vector": [ 1, 2 ] }, { "vector": [ 2, 2 ], "text": "bar" } ] } + + + - do: + search: + index: nested_dense_vector_synthetic_test + body: + query: + match_all: {} + + - match: { hits.hits.0._source.parent.0.vector: [ 1.0, 2.0 ] } + - match: { hits.hits.0._source.parent.0.text: "foo" } + - match: { hits.hits.0._source.parent.1.vector: [ 2.0, 2.0 ] } + - match: { hits.hits.0._source.parent.1.text: "bar" } + - is_false: hits.hits.1._source.parent.0.vector + - match: { hits.hits.1._source.parent.0.text: "foo" } + - match: { hits.hits.1._source.parent.1.vector: [ 2.0, 2.0 ] } + - match: { hits.hits.1._source.parent.1.text: "bar" } + - match: {hits.hits.2._source.parent.0.vector: [ 1.0, 2.0 ] } + - is_false: hits.hits.2._source.parent.0.text + - match: { hits.hits.2._source.parent.1.vector: [ 2.0, 2.0 ] } + - match: { hits.hits.2._source.parent.1.text: "bar" } +--- +"Nested synthetic source with un-indexed dense vectors": + - requires: + test_runner_features: [ capabilities ] + capabilities: + - method: PUT + path: /{index} + capabilities: [ synthetic_nested_dense_vector_bug_fix ] + reason: "Requires synthetic source bugfix for dense vectors in nested objects" + - do: + indices.create: + index: nested_dense_vector_synthetic_test + body: + mappings: + properties: + parent: + type: nested + properties: + vector: + type: dense_vector + index: false + text: + type: text + settings: + index: + mapping: + source: + mode: synthetic + - do: + index: + index: nested_dense_vector_synthetic_test + id: 0 + refresh: true + body: { "parent": [ { "vector": [ 1, 2 ],"text": "foo" }, { "vector": [ 2, 2 ], "text": "bar" } ] } + + - do: + index: + index: nested_dense_vector_synthetic_test + id: 1 + refresh: true + body: { "parent": [ { "text": "foo" }, { "vector": [ 2, 2 ], "text": "bar" } ] } + + - do: + index: + index: nested_dense_vector_synthetic_test + id: 2 + refresh: true + body: { "parent": [ { "vector": [ 1, 2 ] }, { "vector": [ 2, 2 ], "text": "bar" } ] } + + + - do: + search: + index: nested_dense_vector_synthetic_test + body: + query: + match_all: {} + + - match: { hits.hits.0._source.parent.0.vector: [ 1.0, 2.0 ] } + - match: { hits.hits.0._source.parent.0.text: "foo" } + - match: { hits.hits.0._source.parent.1.vector: [ 2.0, 2.0 ] } + - match: { hits.hits.0._source.parent.1.text: "bar" } + - is_false: hits.hits.1._source.parent.0.vector + - match: { hits.hits.1._source.parent.0.text: "foo" } + - match: { hits.hits.1._source.parent.1.vector: [ 2.0, 2.0 ] } + - match: { hits.hits.1._source.parent.1.text: "bar" } + - match: {hits.hits.2._source.parent.0.vector: [ 1.0, 2.0 ] } + - is_false: hits.hits.2._source.parent.0.text + - match: { hits.hits.2._source.parent.1.vector: [ 2.0, 2.0 ] } + - match: { hits.hits.2._source.parent.1.text: "bar" } + diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.resolve_cluster/10_basic_resolve_cluster.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.resolve_cluster/10_basic_resolve_cluster.yml index 46bd0b8099e4a..0d1d93513aa68 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.resolve_cluster/10_basic_resolve_cluster.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.resolve_cluster/10_basic_resolve_cluster.yml @@ -39,7 +39,7 @@ setup: - do: indices.resolve_cluster: name: '*' - expand_wildcards: [closed] + expand_wildcards: closed - match: {(local).connected: true} - match: {(local).skip_unavailable: false} @@ -65,7 +65,7 @@ setup: - do: indices.resolve_cluster: name: 'index2*' - expand_wildcards: [open,closed] + expand_wildcards: open,closed - match: {(local).connected: true} - match: {(local).skip_unavailable: false} @@ -75,7 +75,7 @@ setup: - do: indices.resolve_cluster: name: 'index2*' - expand_wildcards: [closed] + expand_wildcards: closed - match: {(local).connected: true} - match: {(local).skip_unavailable: false} @@ -115,7 +115,7 @@ setup: - do: indices.resolve_cluster: name: 'my_alias2,doesnotexist*' - expand_wildcards: [all] + expand_wildcards: all - match: {(local).connected: true} - match: {(local).skip_unavailable: false} @@ -141,10 +141,10 @@ setup: - do: indices.resolve_cluster: name: '*' - expand_wildcards: [all] - ignore_unavailable: [true] - ignore_throttled: [true] - allow_no_indices: [true] + expand_wildcards: all + ignore_unavailable: true + ignore_throttled: true + allow_no_indices: true allowed_warnings: - "[ignore_throttled] parameter is deprecated because frozen indices have been deprecated. Consider cold or frozen tiers in place of frozen indices." @@ -157,10 +157,10 @@ setup: - do: indices.resolve_cluster: name: '*' - expand_wildcards: [open] - ignore_unavailable: [false] - ignore_throttled: [false] - allow_no_indices: [false] + expand_wildcards: open + ignore_unavailable: false + ignore_throttled: false + allow_no_indices: false allowed_warnings: - "[ignore_throttled] parameter is deprecated because frozen indices have been deprecated. Consider cold or frozen tiers in place of frozen indices." @@ -170,3 +170,14 @@ setup: - is_false: (local).error # should not be present - exists: (local).version.number +--- +"Resolve cluster with no index expression": + - requires: + cluster_features: ["gte_v8.18.0"] + reason: "resolve cluster with no index expression introduced in 8.18" + + - do: + indices.resolve_cluster: + timeout: 400s + + - is_false: (local).error # should not be present - body should be empty since no remotes configured diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml index 2a31b3bd387c4..13107b39d6e1f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml @@ -201,6 +201,47 @@ non-default sort settings: - match: { test-sort.settings.index.sort.mode.0: "max" } - match: { test-sort.settings.index.sort.mode.1: "max" } +--- +non-default sort settings with presence of nested: + - requires: + cluster_features: [ "mapper.nested.sorting_fields_check_fix" ] + reason: "Fixed behavior" + + - do: + indices.create: + index: test-sort + body: + settings: + index: + mode: logsdb + number_of_shards: 2 + number_of_replicas: 0 + sort: + field: [ "agent_id", "@timestamp" ] + order: [ "asc", "desc" ] + mappings: + properties: + "@timestamp": + type: date + agent_id: + type: keyword + agent: + type: nested + properties: + id: + type: keyword + + - do: + indices.get_settings: + index: test-sort + + - is_true: test-sort + - match: { test-sort.settings.index.mode: "logsdb" } + - match: { test-sort.settings.index.sort.field.0: "agent_id" } + - match: { test-sort.settings.index.sort.field.1: "@timestamp" } + - match: { test-sort.settings.index.sort.order.0: "asc" } + - match: { test-sort.settings.index.sort.order.1: "desc" } + --- override sort order settings: - requires: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mapping/10_copy_to.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mapping/10_copy_to.yml new file mode 100644 index 0000000000000..58f09ec71ad61 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mapping/10_copy_to.yml @@ -0,0 +1,41 @@ +--- +copy_to from object with dynamic strict to dynamic field: + - requires: + cluster_features: ["mapper.copy_to.dynamic_handling"] + reason: requires a fix + + - do: + indices.create: + index: test + body: + mappings: + properties: + one: + dynamic: strict + properties: + k: + type: keyword + copy_to: two.k + + - do: + index: + index: test + id: 1 + refresh: true + body: + one: + k: "hey" + + - do: + search: + index: test + body: + docvalue_fields: [ "two.k.keyword" ] + + - match: + hits.hits.0._source: + one: + k: "hey" + - match: + hits.hits.0.fields: + two.k.keyword: [ "hey" ] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mtermvectors/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mtermvectors/10_basic.yml index fbfb06dcda9a1..c28b947b112a8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mtermvectors/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mtermvectors/10_basic.yml @@ -3,6 +3,7 @@ setup: - do: indices.create: index: testidx + wait_for_active_shards: all body: mappings: properties: @@ -80,6 +81,7 @@ setup: - do: indices.create: index: testidx2 + wait_for_active_shards: all - do: indices.put_alias: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int4_flat.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int4_flat.yml index f9f8d56e1d9c9..0a2da6e14a6ad 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int4_flat.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int4_flat.yml @@ -57,8 +57,15 @@ setup: another_vector: [-0.5, 11.0, 0, 12] - do: - indices.refresh: {} + indices.flush: { } + # For added test reliability, pending the resolution of https://github.com/elastic/elasticsearch/issues/109416. + - do: + indices.forcemerge: + max_num_segments: 1 + index: int4_flat + - do: + indices.refresh: {} --- "kNN search only": - do: @@ -192,13 +199,14 @@ setup: num_candidates: 3 k: 3 field: vector - similarity: 10.3 + # Set high allowed similarity, reduce once we can update underlying quantization algo + similarity: 110 query_vector: [-0.5, 90.0, -10, 14.8] - - length: {hits.hits: 1} + - is_true: hits.hits.0 - - match: {hits.hits.0._id: "2"} - - match: {hits.hits.0.fields.name.0: "moose.jpg"} + #- match: {hits.hits.0._id: "2"} + #- match: {hits.hits.0.fields.name.0: "moose.jpg"} --- "Vector similarity with filter only": - do: @@ -210,7 +218,8 @@ setup: num_candidates: 3 k: 3 field: vector - similarity: 11 + # Set high allowed similarity, reduce once we can update underlying quantization algo + similarity: 110 query_vector: [-0.5, 90.0, -10, 14.8] filter: {"term": {"name": "moose.jpg"}} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index 81ca84a06f815..c1fdb8adc8ee9 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -230,7 +230,6 @@ dfs knn vector profiling: - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.create_weight: 0 } - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.create_weight_count: 0 } - gt: { profile.shards.0.dfs.knn.0.rewrite_time: 0 } - - match: { profile.shards.0.dfs.knn.0.collector.0.name: "SimpleTopScoreDocCollector" } - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } @@ -275,6 +274,47 @@ dfs knn vector profiling description: - match: { hits.total.value: 1 } - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScoreQuery[0,...][0.009673266,...],0.009673266" } +--- +dfs knn vector profiling collector name: + - requires: + cluster_features: "lucene_10_1_upgrade" + reason: collector name changed with lucene 10.1 + + - do: + indices.create: + index: images + body: + settings: + index.number_of_shards: 1 + mappings: + properties: + image: + type: "dense_vector" + dims: 3 + index: true + similarity: "l2_norm" + + - do: + index: + index: images + id: "1" + refresh: true + body: + image: [1, 5, -20] + + - do: + search: + index: images + body: + profile: true + knn: + field: "image" + query_vector: [-5, 9, -12] + k: 1 + num_candidates: 100 + + - match: { profile.shards.0.dfs.knn.0.collector.0.name: "TopScoreDocCollector" } + --- dfs knn vector profiling with vector_operations_count: - requires: @@ -335,7 +375,6 @@ dfs knn vector profiling with vector_operations_count: - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.create_weight: 0 } - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.create_weight_count: 0 } - gt: { profile.shards.0.dfs.knn.0.rewrite_time: 0 } - - match: { profile.shards.0.dfs.knn.0.collector.0.name: "SimpleTopScoreDocCollector" } - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/30_context.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/30_context.yml index f88726469f51c..71b4ec9c128d8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/30_context.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/30_context.yml @@ -395,3 +395,75 @@ setup: field: suggest_multi_contexts contexts: location: [] + +--- +"Duplicate suggestions in different contexts": + - requires: + cluster_features: [ "search.completion_field.duplicate.support" ] + reason: "Support for duplicate suggestions in different contexts" + + - do: + index: + refresh: true + index: test + id: "1" + body: + suggest_context: + - + input: "foox" + weight: 2 + contexts: + color: ["red", "yellow"] + - + input: "foox" + weight: 3 + contexts: + color: ["blue", "green", "yellow"] + - do: + search: + body: + suggest: + result: + text: "foo" + completion: + field: suggest_context + contexts: + color: "red" + + - length: { suggest.result: 1 } + - length: { suggest.result.0.options: 1 } + - match: { suggest.result.0.options.0.text: "foox" } + - match: { suggest.result.0.options.0._score: 2 } + + - do: + search: + body: + suggest: + result: + text: "foo" + completion: + field: suggest_context + contexts: + color: "yellow" + + - length: { suggest.result: 1 } + - length: { suggest.result.0.options: 1 } + - match: { suggest.result.0.options.0.text: "foox" } + # the highest weight wins + - match: { suggest.result.0.options.0._score: 3 } + + - do: + search: + body: + suggest: + result: + text: "foo" + completion: + field: suggest_context + contexts: + color: "blue" + + - length: { suggest.result: 1 } + - length: { suggest.result.0.options: 1 } + - match: { suggest.result.0.options.0.text: "foox" } + - match: { suggest.result.0.options.0._score: 3 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml index 8bbda56db7e53..37a937bd59b5a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml @@ -268,3 +268,80 @@ - length: { suggest.result: 1 } - length: { suggest.result.0.options: 1 } + +--- +"Duplicate suggestions in different contexts in sub-fields": + - requires: + cluster_features: [ "search.completion_field.duplicate.support" ] + reason: "Support for duplicate suggestions in different contexts" + + - do: + indices.create: + index: completion_with_context + body: + mappings: + "properties": + "suggest_1": + "type": "completion" + "contexts": + - + "name": "color" + "type": "category" + "fields": + "suggest_2": + "type": "completion" + "contexts": + - + "name": "color" + "type": "category" + + + - do: + index: + refresh: true + index: completion_with_context + id: "1" + body: + suggest_1: + - + input: "foox" + weight: 2 + contexts: + color: ["red"] + - + input: "foox" + weight: 3 + contexts: + color: ["blue", "green"] + - do: + search: + body: + suggest: + result: + text: "foo" + completion: + field: suggest_1.suggest_2 + contexts: + color: "red" + + - length: { suggest.result: 1 } + - length: { suggest.result.0.options: 1 } + - match: { suggest.result.0.options.0.text: "foox" } + - match: { suggest.result.0.options.0._score: 2 } + + + - do: + search: + body: + suggest: + result: + text: "foo" + completion: + field: suggest_1.suggest_2 + contexts: + color: "blue" + + - length: { suggest.result: 1 } + - length: { suggest.result.0.options: 1 } + - match: { suggest.result.0.options.0.text: "foox" } + - match: { suggest.result.0.options.0._score: 3 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml index 9543783f0d6a3..02db799e52e51 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml @@ -14,6 +14,12 @@ setup: - synonyms: "bye => goodbye" id: "synonym-rule-2" + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + index: .synonyms + wait_for_status: green + # Create synonyms synonyms_set2 - do: synonyms.put_synonym: @@ -25,12 +31,6 @@ setup: - synonyms: "bye => goodbye" id: "synonym-rule-2" - # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - - do: - cluster.health: - index: .synonyms - wait_for_status: green - # Create my_index1 with synonym_filter that uses synonyms_set1 - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/160_nested_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/160_nested_fields.yml new file mode 100644 index 0000000000000..f4aca5ab264e8 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/160_nested_fields.yml @@ -0,0 +1,233 @@ +setup: + - requires: + cluster_features: ["mapper.tsdb_nested_field_support"] + reason: "tsdb index with nested field support enabled" + +--- +"Create TSDB index with field of nested type": + - do: + indices.create: + index: test + body: + settings: + index: + mode: time_series + number_of_replicas: 1 + number_of_shards: 1 + routing_path: [department] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + "@timestamp": + type: date + department: + type: keyword + time_series_dimension: true + staff: + type: integer + courses: + type: nested + properties: + name: + type: keyword + credits: + type: integer + + - do: + index: + index: test + body: { "@timestamp": "2021-04-28T01:00:00Z", "department": "compsci", "staff": 12, "courses": [ { "name": "Object Oriented Programming", "credits": 3 }, { "name": "Theory of Computation", "credits": 4 } ] } + + - do: + index: + index: test + body: { "@timestamp": "2021-04-28T02:00:00Z", "department": "math", "staff": 20, "courses": [ { "name": "Precalculus", "credits": 1 }, { "name": "Linear Algebra", "credits": 3 } ] } + + - do: + indices.refresh: + index: [ test ] + + - do: + search: + index: test + body: + size: 0 + query: + nested: + path: "courses" + query: + bool: + must: + - term: + courses.name: Precalculus + - term: + courses.credits: 3 + + - match: { hits.total.value: 0 } + + - do: + search: + index: test + body: + query: + nested: + path: "courses" + query: + bool: + must: + - term: + courses.name: "Object Oriented Programming" + - term: + courses.credits: 3 + + - match: { hits.total.value: 1 } + - match: { "hits.hits.0._source.@timestamp": "2021-04-28T01:00:00.000Z" } + - match: { hits.hits.0._source.department: "compsci" } + - match: { hits.hits.0._source.courses: [ { "name": "Object Oriented Programming", "credits": 3 }, { "name": "Theory of Computation", "credits": 4, } ] } + +--- + +"TSDB index with multi-level nested fields": + - do: + indices.create: + index: test + body: + settings: + index: + mode: time_series + number_of_replicas: 1 + number_of_shards: 1 + routing_path: [department] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + "@timestamp": + type: date + department: + type: keyword + time_series_dimension: true + staff: + type: integer + courses: + type: nested + properties: + name: + type: keyword + credits: + type: integer + students: + type: nested + properties: + name: + type: text + major: + type: keyword + + - do: + index: + index: test + body: + "@timestamp": "2021-04-28T01:00:00Z" + department: "compsci" + staff: 12 + courses: + - name: "Object Oriented Programming" + credits: 3 + students: + - name: "Kimora Tanner" + major: "Computer Science" + - name: "Bruno Garrett" + major: "Software Engineering" + - name: "Theory of Computation" + credits: 4 + students: + - name: "Elliott Booker" + major: "Computer Engineering" + - name: "Kimora Tanner" + major: "Software Engineering" + + - do: + index: + index: test + body: + "@timestamp": "2021-04-28T02:00:00Z" + department: "math" + staff: 20 + courses: + - name: "Precalculus" + credits: 4 + students: + - name: "Elliott Ayers" + major: "Software Engineering" + - name: "Sylvie Howe" + major: "Computer Engineering" + - name: "Linear Algebra" + credits: 3 + students: + - name: "Kimora Tanner" + major: "Computer Science" + - name: "Bruno Garett" + major: "Software Engineering" + - name: "Amelia Booker" + major: "Psychology" + + - do: + index: + index: test + body: + "@timestamp": "2021-04-28T03:00:00Z" + department: "compsci" + staff: 12 + courses: + - name: "Object Oriented Programming" + credits: 3 + students: + - name: "Kimora Tanner" + major: "Computer Science" + - name: "Bruno Garrett" + major: "Software Engineering" + - name: "Elliott Booker" + major: "Computer Engineering" + - name: "Theory of Computation" + credits: 4 + students: + - name: "Kimora Tanner" + major: "Software Engineering" + - name: "Elliott Ayers" + major: "Software Engineering" + - name: "Apollo Pittman" + major: "Computer Engineering" + + - do: + indices.refresh: + index: [ test ] + + - do: + search: + index: test + body: + query: + nested: + path: "courses" + query: + bool: + must: + - nested: + path: "courses.students" + query: + bool: + must: + - match: + courses.students.name: "Elliott" + - term: + courses.students.major: "Computer Engineering" + - term: + courses.name: "Theory of Computation" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._source.department: "compsci" } + - match: { "hits.hits.0._source.@timestamp": "2021-04-28T01:00:00.000Z" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index f25601fc2e228..5963ddb46e0b3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -344,37 +344,6 @@ nested dimensions: type: keyword time_series_dimension: true ---- -nested fields: - - requires: - cluster_features: ["gte_v8.2.0"] - reason: message changed in 8.2.0 - - - do: - catch: /cannot have nested fields when index is in \[index.mode=time_series\]/ - indices.create: - index: test - body: - settings: - index: - mode: time_series - routing_path: [dim] - time_series: - start_time: 2021-04-28T00:00:00Z - end_time: 2021-04-29T00:00:00Z - mappings: - properties: - "@timestamp": - type: date - dim: - type: keyword - time_series_dimension: true - nested: - type: nested - properties: - foo: - type: keyword - --- "Unable to define a metric type for a runtime field": - requires: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/100_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/100_synthetic_source.yml index f4894692b6cad..219bc52c4e28c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/100_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/100_synthetic_source.yml @@ -6,8 +6,8 @@ setup: --- keyword: - requires: - cluster_features: ["gte_v8.4.0"] - reason: introduced in 8.4.0 + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source - do: indices.create: @@ -60,13 +60,14 @@ keyword: index: test run_expensive_tasks: true - is_false: test.fields._source - - is_true: test.fields._recovery_source + # When synthetic source is used there is no _recovery_source field + - match: { test.fields._recovery_source: null } --- stored text: - requires: - cluster_features: ["gte_v8.5.0"] - reason: introduced in 8.5.0 + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source - do: indices.create: @@ -121,4 +122,5 @@ stored text: index: test run_expensive_tasks: true - is_false: test.fields._source - - is_true: test.fields._recovery_source + # When synthetic source is used there is no _recovery_source field + - match: { test.fields._recovery_source: null } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java index a5aa39f5feb1e..83e79ff7f45a8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java @@ -85,7 +85,7 @@ public void testMissingKeystoreFile() throws Exception { final Environment environment = internalCluster().getInstance(Environment.class); final AtomicReference reloadSettingsError = new AtomicReference<>(); // keystore file should be missing for this test case - Files.deleteIfExists(KeyStoreWrapper.keystorePath(environment.configFile())); + Files.deleteIfExists(KeyStoreWrapper.keystorePath(environment.configDir())); final int initialReloadCount = mockReloadablePlugin.getReloadCount(); final CountDownLatch latch = new CountDownLatch(1); executeReloadSecureSettings(Strings.EMPTY_ARRAY, emptyPassword(), new ActionListener<>() { @@ -129,10 +129,10 @@ public void testInvalidKeystoreFile() throws Exception { final int initialReloadCount = mockReloadablePlugin.getReloadCount(); // invalid "keystore" file should be present in the config dir try (InputStream keystore = ReloadSecureSettingsIT.class.getResourceAsStream("invalid.txt.keystore")) { - if (Files.exists(environment.configFile()) == false) { - Files.createDirectory(environment.configFile()); + if (Files.exists(environment.configDir()) == false) { + Files.createDirectory(environment.configDir()); } - Files.copy(keystore, KeyStoreWrapper.keystorePath(environment.configFile()), StandardCopyOption.REPLACE_EXISTING); + Files.copy(keystore, KeyStoreWrapper.keystorePath(environment.configDir()), StandardCopyOption.REPLACE_EXISTING); } final CountDownLatch latch = new CountDownLatch(1); executeReloadSecureSettings(Strings.EMPTY_ARRAY, emptyPassword(), new ActionListener<>() { @@ -363,7 +363,7 @@ public void testInvalidKeyInSettings() throws Exception { try (KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create()) { keyStoreWrapper.setString(VALID_SECURE_SETTING_NAME, new char[0]); - keyStoreWrapper.save(environment.configFile(), new char[0], false); + keyStoreWrapper.save(environment.configDir(), new char[0], false); } PlainActionFuture actionFuture = new PlainActionFuture<>(); @@ -374,7 +374,7 @@ public void testInvalidKeyInSettings() throws Exception { try (KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create()) { assertThat(keyStoreWrapper, notNullValue()); keyStoreWrapper.setString("some.setting.that.does.not.exist", new char[0]); - keyStoreWrapper.save(environment.configFile(), new char[0], false); + keyStoreWrapper.save(environment.configDir(), new char[0], false); } actionFuture = new PlainActionFuture<>(); @@ -432,7 +432,7 @@ public void onFailure(Exception e) { private SecureSettings writeEmptyKeystore(Environment environment, char[] password) throws Exception { final KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create(); - keyStoreWrapper.save(environment.configFile(), password, false); + keyStoreWrapper.save(environment.configDir(), password, false); return keyStoreWrapper; } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingIT.java new file mode 100644 index 0000000000000..0a29b99ca6fdc --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingIT.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.admin.indices.mapping.put; + +import org.apache.logging.log4j.Level; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.MockLog; +import org.elasticsearch.test.junit.annotations.TestLogging; + +import static org.hamcrest.Matchers.equalTo; + +public class PutMappingIT extends ESSingleNodeTestCase { + + @TestLogging( + reason = "testing DEBUG logging", + value = "org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction:DEBUG" + ) + public void testFailureLogging() { + final var indexName = randomIdentifier(); + createIndex(indexName); + final var fieldName = randomIdentifier(); + safeGet(client().execute(TransportPutMappingAction.TYPE, new PutMappingRequest(indexName).source(fieldName, "type=keyword"))); + MockLog.assertThatLogger( + () -> assertThat( + asInstanceOf( + IllegalArgumentException.class, + safeAwaitFailure( + AcknowledgedResponse.class, + l -> client().execute( + TransportPutMappingAction.TYPE, + new PutMappingRequest(indexName).source(fieldName, "type=long"), + l + ) + ) + ).getMessage(), + equalTo("mapper [" + fieldName + "] cannot be changed from type [keyword] to [long]") + ), + TransportPutMappingAction.class, + new MockLog.SeenEventExpectation( + "failure message", + TransportPutMappingAction.class.getCanonicalName(), + Level.DEBUG, + "failed to put mappings on indices [[" + indexName + ) + ); + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java index a130a5b869adc..3338675160268 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -220,7 +220,7 @@ public void testUpdatePriority() { public void testIndexDirIsDeletedWhenShardRemoved() throws Exception { Environment env = getInstanceFromNode(Environment.class); - Path idxPath = env.sharedDataFile().resolve(randomAlphaOfLength(10)); + Path idxPath = env.sharedDataDir().resolve(randomAlphaOfLength(10)); logger.info("--> idxPath: [{}]", idxPath); Settings idxSettings = Settings.builder().put(IndexMetadata.SETTING_DATA_PATH, idxPath).build(); createIndex("test", idxSettings); @@ -254,7 +254,7 @@ public void testExpectedShardSizeIsPresent() throws InterruptedException { public void testIndexCanChangeCustomDataPath() throws Exception { final String index = "test-custom-data-path"; - final Path sharedDataPath = getInstanceFromNode(Environment.class).sharedDataFile().resolve(randomAsciiLettersOfLength(10)); + final Path sharedDataPath = getInstanceFromNode(Environment.class).sharedDataDir().resolve(randomAsciiLettersOfLength(10)); final Path indexDataPath = sharedDataPath.resolve("start-" + randomAsciiLettersOfLength(10)); logger.info("--> creating index [{}] with data_path [{}]", index, indexDataPath); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java index 4e9e4b4d641d2..b9513dfb95187 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java @@ -32,7 +32,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; @@ -531,8 +530,7 @@ public void testResolvePath() throws Exception { nodeNameToNodeId.put(cursor.getValue().getName(), cursor.getKey()); } - final GroupShardsIterator shardIterators = state.getRoutingTable() - .activePrimaryShardsGrouped(new String[] { indexName }, false); + final List shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(new String[] { indexName }, false); final List iterators = iterableAsArrayList(shardIterators); final ShardRouting shardRouting = iterators.iterator().next().nextOrNull(); assertThat(shardRouting, notNullValue()); @@ -562,7 +560,7 @@ public void testResolvePath() throws Exception { command.findAndProcessShardPath( options, environmentByNodeName.get(nodeName), - environmentByNodeName.get(nodeName).dataFiles(), + environmentByNodeName.get(nodeName).dataDirs(), state, shardPath -> assertThat(shardPath.resolveIndex(), equalTo(indexPath)) ); @@ -571,8 +569,7 @@ public void testResolvePath() throws Exception { private Path getPathToShardData(String indexName, String dirSuffix) { ClusterState state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); - GroupShardsIterator shardIterators = state.getRoutingTable() - .activePrimaryShardsGrouped(new String[] { indexName }, false); + List shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(new String[] { indexName }, false); List iterators = iterableAsArrayList(shardIterators); ShardIterator shardIterator = RandomPicks.randomFrom(random(), iterators); ShardRouting shardRouting = shardIterator.nextOrNull(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java index 1594514d2f415..141f24e283b0c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -34,7 +34,6 @@ import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; @@ -311,8 +310,7 @@ public void testCorruptPrimaryNoReplica() throws ExecutionException, Interrupted } assertThat(response.getStatus(), is(ClusterHealthStatus.RED)); ClusterState state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); - GroupShardsIterator shardIterators = state.getRoutingTable() - .activePrimaryShardsGrouped(new String[] { "test" }, false); + List shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(new String[] { "test" }, false); for (ShardIterator iterator : shardIterators) { ShardRouting routing; while ((routing = iterator.nextOrNull()) != null) { @@ -667,7 +665,7 @@ public void testReplicaCorruption() throws Exception { private int numShards(String... index) { ClusterState state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); - GroupShardsIterator shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(index, false); + List shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(index, false); return shardIterators.size(); } @@ -695,8 +693,7 @@ private ShardRouting corruptRandomPrimaryFile() throws IOException { private ShardRouting corruptRandomPrimaryFile(final boolean includePerCommitFiles) throws IOException { ClusterState state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); Index test = state.metadata().index("test").getIndex(); - GroupShardsIterator shardIterators = state.getRoutingTable() - .activePrimaryShardsGrouped(new String[] { "test" }, false); + List shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(new String[] { "test" }, false); List iterators = iterableAsArrayList(shardIterators); ShardIterator shardIterator = RandomPicks.randomFrom(random(), iterators); ShardRouting shardRouting = shardIterator.nextOrNull(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java index 5cb468da70996..9256065f0d0cd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.index.search.stats.SearchStats; @@ -24,6 +23,7 @@ import org.elasticsearch.test.ESIntegTestCase; import java.util.HashSet; +import java.util.List; import java.util.Set; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -146,7 +146,7 @@ private SearchRequestBuilder addSuggestions(SearchRequestBuilder request, int i) private Set nodeIdsWithIndex(String... indices) { ClusterState state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); - GroupShardsIterator allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true); + List allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true); Set nodes = new HashSet<>(); for (ShardIterator shardIterator : allAssignedShardsGrouped) { for (ShardRouting routing : shardIterator) { @@ -161,7 +161,7 @@ private Set nodeIdsWithIndex(String... indices) { protected int numAssignedShards(String... indices) { ClusterState state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); - GroupShardsIterator allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true); + List allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true); return allAssignedShardsGrouped.size(); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/ResolveClusterTimeoutIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/ResolveClusterTimeoutIT.java new file mode 100644 index 0000000000000..7236702af12e2 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/ResolveClusterTimeoutIT.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.indices.cluster; + +import org.elasticsearch.action.admin.indices.resolve.ResolveClusterActionRequest; +import org.elasticsearch.action.admin.indices.resolve.ResolveClusterActionResponse; +import org.elasticsearch.action.admin.indices.resolve.ResolveClusterInfo; +import org.elasticsearch.action.admin.indices.resolve.TransportResolveClusterAction; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.AbstractMultiClustersTestCase; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.transport.TransportService; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +public class ResolveClusterTimeoutIT extends AbstractMultiClustersTestCase { + private static final String REMOTE_CLUSTER_1 = "cluster-a"; + + @Override + protected List remoteClusterAlias() { + return List.of(REMOTE_CLUSTER_1); + } + + public void testTimeoutParameter() { + long maxTimeoutInMillis = 500; + + // First part: we query _resolve/cluster without stalling a remote. + ResolveClusterActionRequest resolveClusterActionRequest; + if (randomBoolean()) { + resolveClusterActionRequest = new ResolveClusterActionRequest(new String[0], IndicesOptions.DEFAULT, true, true); + } else { + resolveClusterActionRequest = new ResolveClusterActionRequest(new String[] { "*:*" }); + } + + // We set a timeout but since we don't stall any cluster, we should always get back response just fine before the timeout. + resolveClusterActionRequest.setTimeout(TimeValue.timeValueSeconds(10)); + ResolveClusterActionResponse clusterActionResponse = safeGet( + client().execute(TransportResolveClusterAction.TYPE, resolveClusterActionRequest) + ); + Map clusterInfo = clusterActionResponse.getResolveClusterInfo(); + + // Remote is connected and error message is null. + assertThat(clusterInfo.get(REMOTE_CLUSTER_1).isConnected(), equalTo(true)); + assertThat(clusterInfo.get(REMOTE_CLUSTER_1).getError(), is(nullValue())); + + // Second part: now we stall the remote and utilise the timeout feature. + CountDownLatch latch = new CountDownLatch(1); + + // Add an override so that the remote cluster receives the TransportResolveClusterAction request but stalls. + for (var nodes : cluster(REMOTE_CLUSTER_1).getNodeNames()) { + ((MockTransportService) cluster(REMOTE_CLUSTER_1).getInstance(TransportService.class, nodes)).addRequestHandlingBehavior( + TransportResolveClusterAction.REMOTE_TYPE.name(), + (requestHandler, transportRequest, transportChannel, transportTask) -> { + // Wait until the TransportResolveRequestAction times out following which the latch is released. + latch.await(); + requestHandler.messageReceived(transportRequest, transportChannel, transportTask); + } + ); + } + + long randomlyChosenTimeout = randomLongBetween(100, maxTimeoutInMillis); + // We now randomly choose a timeout which is guaranteed to hit since the remote is stalled. + resolveClusterActionRequest.setTimeout(TimeValue.timeValueMillis(randomlyChosenTimeout)); + + clusterActionResponse = safeGet(client().execute(TransportResolveClusterAction.TYPE, resolveClusterActionRequest)); + latch.countDown(); + + clusterInfo = clusterActionResponse.getResolveClusterInfo(); + + // Ensure that the request timed out and that the remote is marked as not connected. + assertThat(clusterInfo.get(REMOTE_CLUSTER_1).isConnected(), equalTo(false)); + assertThat( + clusterInfo.get(REMOTE_CLUSTER_1).getError(), + equalTo("Request timed out before receiving a response from the remote cluster") + ); + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index 9a7a77bf77a87..dc4dfc88b2c12 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; @@ -23,6 +24,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.indices.IndicesService; @@ -720,4 +722,35 @@ private void assertEqualsAndStringsInterned(List queryFieldsSetting, Set } } + public void testMultipleSettingsUpdateWithMetadataWriteBlock() { + final var indexName = randomIdentifier(); + createIndex(indexName, Settings.builder().put(IndexMetadata.APIBlock.READ_ONLY.settingName(), true).build()); + + // Metadata writes are blocked by the READ_ONLY block + expectThrows( + ClusterBlockException.class, + () -> updateIndexSettings(Settings.builder().put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "12s"), indexName) + ); + + var randomSetting = randomFrom(IndexMetadata.APIBlock.READ_ONLY, IndexMetadata.APIBlock.READ_ONLY_ALLOW_DELETE).settingName(); + updateIndexSettings( + Settings.builder() + .put(randomSetting, true) // still has the metadata write block... + .put(IndexMetadata.APIBlock.WRITE.settingName(), true) + .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "12s"), // should not be allowed + indexName + ); + + assertThat( + indicesAdmin().prepareGetSettings(indexName) + .get() + .getIndexToSettings() + .get(indexName) + .get(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey()), + equalTo("12s") + ); + + // Updating the setting alone should always work + updateIndexSettings(Settings.builder().put(IndexMetadata.APIBlock.READ_ONLY.settingName(), false)); + } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTaskCreationFailureIT.java b/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTaskCreationFailureIT.java index 6452968f2467f..d6da940b01881 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTaskCreationFailureIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/persistent/PersistentTaskCreationFailureIT.java @@ -77,7 +77,9 @@ public ClusterState execute(ClusterState currentState) { .pendingTasks() .stream() .filter( - pendingClusterTask -> pendingClusterTask.getSource().string().equals("finish persistent task (failed)") + pendingClusterTask -> pendingClusterTask.getSource() + .string() + .matches("finish persistent task \\[.*] \\(failed\\)") ) .count(); assertThat(completePersistentTaskPendingTasksCount, lessThanOrEqualTo(1L)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/rest/RestControllerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/rest/RestControllerIT.java index d3d46a87b9204..564d18ef4a611 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/rest/RestControllerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/rest/RestControllerIT.java @@ -10,6 +10,7 @@ package org.elasticsearch.rest; import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -20,6 +21,7 @@ import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -30,15 +32,21 @@ import org.elasticsearch.telemetry.Measurement; import org.elasticsearch.telemetry.TestTelemetryPlugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; import java.util.List; import java.util.function.Consumer; import java.util.function.Predicate; import java.util.function.Supplier; +import static org.elasticsearch.test.rest.ESRestTestCase.responseAsParser; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; @@ -58,6 +66,49 @@ public void testHeadersEmittedWithChunkedResponses() throws IOException { assertEquals(ChunkedResponseWithHeadersPlugin.HEADER_VALUE, response.getHeader(ChunkedResponseWithHeadersPlugin.HEADER_NAME)); } + public void testHeadersAreCollapsed() throws IOException { + final var client = getRestClient(); + final var request = new Request("GET", TestEchoHeadersPlugin.ROUTE); + request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("X-Foo", "1").addHeader("X-Foo", "2").build()); + final var response = client.performRequest(request); + var responseMap = responseAsParser(response).map(HashMap::new, XContentParser::list); + assertThat(responseMap, hasEntry(equalTo("X-Foo"), containsInAnyOrder("1", "2"))); + } + + public void testHeadersTreatedCaseInsensitive() throws IOException { + final var client = getRestClient(); + final var request = new Request("GET", TestEchoHeadersPlugin.ROUTE); + request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("X-Foo", "1").addHeader("x-foo", "2").build()); + final var response = client.performRequest(request); + var responseMap = responseAsParser(response).map(HashMap::new, XContentParser::list); + assertThat(responseMap, hasEntry(equalTo("x-foo"), containsInAnyOrder("1", "2"))); + assertThat(responseMap, hasEntry(equalTo("X-Foo"), containsInAnyOrder("1", "2"))); + } + + public void testThreadContextPopulationFromMultipleHeadersFailsWithCorrectError() { + final var client = getRestClient(); + final var sameCaseRequest = new Request("GET", TestEchoHeadersPlugin.ROUTE); + sameCaseRequest.setOptions( + RequestOptions.DEFAULT.toBuilder() + .addHeader("x-elastic-product-origin", "elastic") + .addHeader("x-elastic-product-origin", "other") + ); + var exception1 = expectThrows(ResponseException.class, () -> client.performRequest(sameCaseRequest)); + assertThat(exception1.getMessage(), containsString("multiple values for single-valued header [X-elastic-product-origin]")); + } + + public void testMultipleProductOriginHeadersWithDifferentCaseFailsWithCorrectError() { + final var client = getRestClient(); + final var differentCaseRequest = new Request("GET", TestEchoHeadersPlugin.ROUTE); + differentCaseRequest.setOptions( + RequestOptions.DEFAULT.toBuilder() + .addHeader("X-elastic-product-origin", "elastic") + .addHeader("x-elastic-product-origin", "other") + ); + var exception2 = expectThrows(ResponseException.class, () -> client.performRequest(differentCaseRequest)); + assertThat(exception2.getMessage(), containsString("multiple values for single-valued header [X-elastic-product-origin]")); + } + public void testMetricsEmittedOnSuccess() throws Exception { final var client = getRestClient(); final var request = new Request("GET", TestEchoStatusCodePlugin.ROUTE); @@ -125,7 +176,12 @@ private void assertMeasurement(Consumer measurementConsumer) throws @Override protected Collection> nodePlugins() { - return List.of(ChunkedResponseWithHeadersPlugin.class, TestEchoStatusCodePlugin.class, TestTelemetryPlugin.class); + return List.of( + ChunkedResponseWithHeadersPlugin.class, + TestEchoStatusCodePlugin.class, + TestEchoHeadersPlugin.class, + TestTelemetryPlugin.class + ); } public static class TestEchoStatusCodePlugin extends Plugin implements ActionPlugin { @@ -181,6 +237,62 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli } } + public static class TestEchoHeadersPlugin extends Plugin implements ActionPlugin { + static final String ROUTE = "/_test/echo_headers"; + static final String NAME = "test_echo_headers"; + + private static final Logger logger = LogManager.getLogger(TestEchoStatusCodePlugin.class); + + @Override + public Collection getRestHandlers( + Settings settings, + NamedWriteableRegistry namedWriteableRegistry, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster, + Predicate clusterSupportsFeature + ) { + return List.of(new BaseRestHandler() { + @Override + public String getName() { + return NAME; + } + + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.GET, ROUTE), new Route(RestRequest.Method.POST, ROUTE)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + var headers = request.getHeaders(); + logger.info("received header echo request for [{}]", String.join(",", headers.keySet())); + + return channel -> { + final var response = RestResponse.chunked( + RestStatus.OK, + ChunkedRestResponseBodyPart.fromXContent( + params -> Iterators.concat( + ChunkedToXContentHelper.startObject(), + Iterators.map(headers.entrySet().iterator(), e -> (b, p) -> b.field(e.getKey(), e.getValue())), + ChunkedToXContentHelper.endObject() + ), + request, + channel + ), + null + ); + channel.sendResponse(response); + logger.info("sent response"); + }; + } + }); + } + } + public static class ChunkedResponseWithHeadersPlugin extends Plugin implements ActionPlugin { static final String ROUTE = "/_test/chunked_response_with_headers"; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java index 5f8bc57dcbe09..0cc1c89b36d19 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java @@ -238,7 +238,6 @@ public void testCancelMultiSearch() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99929") public void testCancelFailedSearchWhenPartialResultDisallowed() throws Exception { // Have at least two nodes so that we have parallel execution of two request guaranteed even if max concurrent requests per node // are limited to 1 diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java index f63f09764621b..f79321ef8d0de 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java @@ -9,36 +9,70 @@ package org.elasticsearch.search; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.BulkScorer; +import org.apache.lucene.search.ConstantScoreScorer; +import org.apache.lucene.search.ConstantScoreWeight; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; +import org.apache.lucene.search.Scorable; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.Weight; +import org.apache.lucene.util.Bits; +import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.MockScriptPlugin; -import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptType; +import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.search.rescore.RescoreContext; +import org.elasticsearch.search.rescore.Rescorer; +import org.elasticsearch.search.rescore.RescorerBuilder; +import org.elasticsearch.search.suggest.SortBy; +import org.elasticsearch.search.suggest.SuggestBuilder; +import org.elasticsearch.search.suggest.Suggester; +import org.elasticsearch.search.suggest.SuggestionSearchContext; +import org.elasticsearch.search.suggest.term.TermSuggestion; +import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; +import org.elasticsearch.xcontent.XContentBuilder; +import java.io.IOException; import java.util.Collection; import java.util.Collections; -import java.util.Map; +import java.util.List; import java.util.concurrent.TimeUnit; -import java.util.function.Function; -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.index.query.QueryBuilders.scriptQuery; -import static org.elasticsearch.search.SearchTimeoutIT.ScriptedTimeoutPlugin.SCRIPT_NAME; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) +@ESIntegTestCase.SuiteScopeTestCase public class SearchTimeoutIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(ScriptedTimeoutPlugin.class); + return Collections.singleton(SearchTimeoutPlugin.class); } @Override @@ -46,75 +80,476 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { return Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)).build(); } - private void indexDocs() { - for (int i = 0; i < 32; i++) { - prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value").get(); - } - refresh("test"); - } - - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/98369") - public void testTopHitsTimeout() { - indexDocs(); - SearchResponse searchResponse = prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS)) - .setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SCRIPT_NAME, Collections.emptyMap()))) - .get(); - assertThat(searchResponse.isTimedOut(), equalTo(true)); - assertEquals(0, searchResponse.getShardFailures().length); - assertEquals(0, searchResponse.getFailedShards()); - assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); - assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); - assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); - assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); - } - - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/98053") - public void testAggsTimeout() { - indexDocs(); - SearchResponse searchResponse = prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS)) - .setSize(0) - .setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SCRIPT_NAME, Collections.emptyMap()))) - .addAggregation(new TermsAggregationBuilder("terms").field("field.keyword")) - .get(); - assertThat(searchResponse.isTimedOut(), equalTo(true)); - assertEquals(0, searchResponse.getShardFailures().length); - assertEquals(0, searchResponse.getFailedShards()); - assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); - assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); - assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); - assertEquals(searchResponse.getHits().getHits().length, 0); - StringTerms terms = searchResponse.getAggregations().get("terms"); - assertEquals(1, terms.getBuckets().size()); - StringTerms.Bucket bucket = terms.getBuckets().get(0); - assertEquals("value", bucket.getKeyAsString()); - assertThat(bucket.getDocCount(), greaterThan(0L)); - } - - public void testPartialResultsIntolerantTimeout() throws Exception { - prepareIndex("test").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + @Override + protected void setupSuiteScopeCluster() throws Exception { + super.setupSuiteScopeCluster(); + indexRandom(true, "test", randomIntBetween(20, 50)); + } + + /** + * Test the scenario where the query times out before starting to collect documents, verify that partial hits are not returned + */ + public void testTopHitsTimeoutBeforeCollecting() { + // setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set + SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.SECONDS)) + .setQuery(new BulkScorerTimeoutQuery(false)); + ElasticsearchAssertions.assertResponse(searchRequestBuilder, searchResponse -> { + assertThat(searchResponse.isTimedOut(), equalTo(true)); + assertEquals(0, searchResponse.getShardFailures().length); + assertEquals(0, searchResponse.getFailedShards()); + assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); + assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); + // timeout happened before we could collect any doc, total hits is 0 and no hits are returned + assertEquals(0, searchResponse.getHits().getTotalHits().value()); + assertEquals(0, searchResponse.getHits().getHits().length); + }); + } + + /** + * Test the scenario where the query times out while collecting documents, verify that partial hits results are returned + */ + public void testTopHitsTimeoutWhileCollecting() { + // setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set + SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.SECONDS)) + .setQuery(new BulkScorerTimeoutQuery(true)); + ElasticsearchAssertions.assertResponse(searchRequestBuilder, searchResponse -> { + assertThat(searchResponse.isTimedOut(), equalTo(true)); + assertEquals(0, searchResponse.getShardFailures().length); + assertEquals(0, searchResponse.getFailedShards()); + assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); + assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); + assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); + }); + } + + /** + * Test the scenario where the query times out before starting to collect documents, verify that partial aggs results are not returned + */ + public void testAggsTimeoutBeforeCollecting() { + SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setSize(0) + // setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set + .setTimeout(new TimeValue(10, TimeUnit.SECONDS)) + .setQuery(new BulkScorerTimeoutQuery(false)) + .addAggregation(new TermsAggregationBuilder("terms").field("field.keyword")); + ElasticsearchAssertions.assertResponse(searchRequestBuilder, searchResponse -> { + assertThat(searchResponse.isTimedOut(), equalTo(true)); + assertEquals(0, searchResponse.getShardFailures().length); + assertEquals(0, searchResponse.getFailedShards()); + assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); + assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); + assertEquals(0, searchResponse.getHits().getTotalHits().value()); + assertEquals(0, searchResponse.getHits().getHits().length); + StringTerms terms = searchResponse.getAggregations().get("terms"); + // timeout happened before we could collect any doc, total hits is 0 and no buckets are returned + assertEquals(0, terms.getBuckets().size()); + }); + } + + /** + * Test the scenario where the query times out while collecting documents, verify that partial aggs results are returned + */ + public void testAggsTimeoutWhileCollecting() { + SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setSize(0) + // setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set + .setTimeout(new TimeValue(10, TimeUnit.SECONDS)) + .setQuery(new BulkScorerTimeoutQuery(true)) + .addAggregation(new TermsAggregationBuilder("terms").field("field.keyword")); + ElasticsearchAssertions.assertResponse(searchRequestBuilder, searchResponse -> { + assertThat(searchResponse.isTimedOut(), equalTo(true)); + assertEquals(0, searchResponse.getShardFailures().length); + assertEquals(0, searchResponse.getFailedShards()); + assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); + assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); + assertEquals(0, searchResponse.getHits().getHits().length); + StringTerms terms = searchResponse.getAggregations().get("terms"); + assertEquals(1, terms.getBuckets().size()); + StringTerms.Bucket bucket = terms.getBuckets().get(0); + assertEquals("value", bucket.getKeyAsString()); + assertThat(bucket.getDocCount(), greaterThan(0L)); + }); + } + + /** + * Test the scenario where the suggest phase (part of the query phase) times out, yet there are results + * available coming from executing the query and aggs on each shard. + */ + public void testSuggestTimeoutWithPartialResults() { + SuggestBuilder suggestBuilder = new SuggestBuilder(); + suggestBuilder.setGlobalText("text"); + TimeoutSuggestionBuilder timeoutSuggestionBuilder = new TimeoutSuggestionBuilder(); + suggestBuilder.addSuggestion("suggest", timeoutSuggestionBuilder); + SearchRequestBuilder searchRequestBuilder = prepareSearch("test").suggest(suggestBuilder) + .addAggregation(new TermsAggregationBuilder("terms").field("field.keyword")); + ElasticsearchAssertions.assertResponse(searchRequestBuilder, searchResponse -> { + assertThat(searchResponse.isTimedOut(), equalTo(true)); + assertEquals(0, searchResponse.getShardFailures().length); + assertEquals(0, searchResponse.getFailedShards()); + assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); + assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); + assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); + StringTerms terms = searchResponse.getAggregations().get("terms"); + assertEquals(1, terms.getBuckets().size()); + StringTerms.Bucket bucket = terms.getBuckets().get(0); + assertEquals("value", bucket.getKeyAsString()); + assertThat(bucket.getDocCount(), greaterThan(0L)); + }); + } + + /** + * Test the scenario where the rescore phase (part of the query phase) times out, yet there are results + * available coming from executing the query and aggs on each shard. + */ + public void testRescoreTimeoutWithPartialResults() { + SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setRescorer(new TimeoutRescorerBuilder()) + .addAggregation(new TermsAggregationBuilder("terms").field("field.keyword")); + ElasticsearchAssertions.assertResponse(searchRequestBuilder, searchResponse -> { + assertThat(searchResponse.isTimedOut(), equalTo(true)); + assertEquals(0, searchResponse.getShardFailures().length); + assertEquals(0, searchResponse.getFailedShards()); + assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); + assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); + assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); + StringTerms terms = searchResponse.getAggregations().get("terms"); + assertEquals(1, terms.getBuckets().size()); + StringTerms.Bucket bucket = terms.getBuckets().get(0); + assertEquals("value", bucket.getKeyAsString()); + assertThat(bucket.getDocCount(), greaterThan(0L)); + }); + } + + public void testPartialResultsIntolerantTimeoutBeforeCollecting() { + ElasticsearchException ex = expectThrows( + ElasticsearchException.class, + prepareSearch("test") + // setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set + .setTimeout(new TimeValue(10, TimeUnit.SECONDS)) + .setQuery(new BulkScorerTimeoutQuery(false)) + .setAllowPartialSearchResults(false) // this line causes timeouts to report failures + ); + assertTrue(ex.toString().contains("Time exceeded")); + assertEquals(429, ex.status().getStatus()); + } + public void testPartialResultsIntolerantTimeoutWhileCollecting() { ElasticsearchException ex = expectThrows( ElasticsearchException.class, - prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS)) - .setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SCRIPT_NAME, Collections.emptyMap()))) + prepareSearch("test") + // setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set + .setTimeout(new TimeValue(10, TimeUnit.SECONDS)) + .setQuery(new BulkScorerTimeoutQuery(true)) .setAllowPartialSearchResults(false) // this line causes timeouts to report failures ); assertTrue(ex.toString().contains("Time exceeded")); + assertEquals(429, ex.status().getStatus()); } - public static class ScriptedTimeoutPlugin extends MockScriptPlugin { - static final String SCRIPT_NAME = "search_timeout"; + public void testPartialResultsIntolerantTimeoutWhileSuggestingOnly() { + SuggestBuilder suggestBuilder = new SuggestBuilder(); + suggestBuilder.setGlobalText("text"); + TimeoutSuggestionBuilder timeoutSuggestionBuilder = new TimeoutSuggestionBuilder(); + suggestBuilder.addSuggestion("suggest", timeoutSuggestionBuilder); + ElasticsearchException ex = expectThrows( + ElasticsearchException.class, + prepareSearch("test").suggest(suggestBuilder).setAllowPartialSearchResults(false) // this line causes timeouts to report + // failures + ); + assertTrue(ex.toString().contains("Time exceeded")); + assertEquals(429, ex.status().getStatus()); + } + + public void testPartialResultsIntolerantTimeoutWhileSuggesting() { + SuggestBuilder suggestBuilder = new SuggestBuilder(); + suggestBuilder.setGlobalText("text"); + TimeoutSuggestionBuilder timeoutSuggestionBuilder = new TimeoutSuggestionBuilder(); + suggestBuilder.addSuggestion("suggest", timeoutSuggestionBuilder); + ElasticsearchException ex = expectThrows( + ElasticsearchException.class, + prepareSearch("test").setQuery(new TermQueryBuilder("field", "value")) + .suggest(suggestBuilder) + .setAllowPartialSearchResults(false) // this line causes timeouts to report failures + ); + assertTrue(ex.toString().contains("Time exceeded")); + assertEquals(429, ex.status().getStatus()); + } + + public void testPartialResultsIntolerantTimeoutWhileRescoring() { + ElasticsearchException ex = expectThrows( + ElasticsearchException.class, + prepareSearch("test").setQuery(new TermQueryBuilder("field", "value")) + .setRescorer(new TimeoutRescorerBuilder()) + .setAllowPartialSearchResults(false) // this line causes timeouts to report failures + ); + assertTrue(ex.toString().contains("Time exceeded")); + assertEquals(429, ex.status().getStatus()); + } + + public static final class SearchTimeoutPlugin extends Plugin implements SearchPlugin { + @Override + public List> getQueries() { + return Collections.singletonList(new QuerySpec("timeout", BulkScorerTimeoutQuery::new, parser -> { + throw new UnsupportedOperationException(); + })); + } @Override - public Map, Object>> pluginScripts() { - return Collections.singletonMap(SCRIPT_NAME, params -> { - try { - Thread.sleep(500); - } catch (InterruptedException e) { - throw new RuntimeException(e); + public List> getSuggesters() { + return Collections.singletonList(new SuggesterSpec<>("timeout", TimeoutSuggestionBuilder::new, parser -> { + throw new UnsupportedOperationException(); + }, TermSuggestion::new)); + } + + @Override + public List> getRescorers() { + return Collections.singletonList(new RescorerSpec<>("timeout", TimeoutRescorerBuilder::new, parser -> { + throw new UnsupportedOperationException(); + })); + } + } + + /** + * Query builder that produces a Lucene Query which throws a + * {@link org.elasticsearch.search.internal.ContextIndexSearcher.TimeExceededException} before or while scoring documents. + * This helps make this test not time dependent, otherwise it would be unpredictable when exactly the timeout happens, which is + * rather important if we want to test that we are able to return partial results on timeout. + */ + public static final class BulkScorerTimeoutQuery extends AbstractQueryBuilder { + + private final boolean partialResults; + + BulkScorerTimeoutQuery(boolean partialResults) { + this.partialResults = partialResults; + } + + BulkScorerTimeoutQuery(StreamInput in) throws IOException { + super(in); + this.partialResults = in.readBoolean(); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeBoolean(partialResults); + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) {} + + @Override + protected Query doToQuery(SearchExecutionContext context) { + return new Query() { + @Override + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { + return new ConstantScoreWeight(this, boost) { + @Override + public boolean isCacheable(LeafReaderContext ctx) { + return false; + } + + @Override + public ScorerSupplier scorerSupplier(LeafReaderContext context) { + return new ScorerSupplier() { + @Override + public BulkScorer bulkScorer() { + if (partialResults == false) { + ((ContextIndexSearcher) searcher).throwTimeExceededException(); + } + final int maxDoc = context.reader().maxDoc(); + return new BulkScorer() { + @Override + public int score(LeafCollector collector, Bits acceptDocs, int min, int max) throws IOException { + max = Math.min(max, maxDoc); + collector.setScorer(new Scorable() { + @Override + public float score() { + return 1f; + } + }); + for (int doc = min; doc < max; ++doc) { + if (acceptDocs == null || acceptDocs.get(doc)) { + collector.collect(doc); + // collect one doc per segment, only then throw a timeout: this ensures partial + // results are returned + ((ContextIndexSearcher) searcher).throwTimeExceededException(); + } + } + // there is a slight chance that no docs are scored for a specific segment. + // other shards / slices will throw the timeout anyway, one is enough. + return max == maxDoc ? DocIdSetIterator.NO_MORE_DOCS : max; + } + + @Override + public long cost() { + return maxDoc; + } + }; + } + + @Override + public Scorer get(long leadCost) { + assert false; + return new ConstantScoreScorer(score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + } + + @Override + public long cost() { + assert false; + return context.reader().maxDoc(); + } + }; + } + }; + } + + @Override + public String toString(String field) { + return "timeout query"; + } + + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + + @Override + public boolean equals(Object obj) { + return sameClassAs(obj); + } + + @Override + public int hashCode() { + return classHash(); + } + }; + } + + @Override + protected boolean doEquals(BulkScorerTimeoutQuery other) { + return false; + } + + @Override + protected int doHashCode() { + return 0; + } + + @Override + public String getWriteableName() { + return "timeout"; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return null; + } + } + + /** + * Suggestion builder that triggers a timeout as part of its execution + */ + private static final class TimeoutSuggestionBuilder extends TermSuggestionBuilder { + TimeoutSuggestionBuilder() { + super("field"); + } + + TimeoutSuggestionBuilder(StreamInput in) throws IOException { + super(in); + } + + @Override + public String getWriteableName() { + return "timeout"; + } + + @Override + public SuggestionSearchContext.SuggestionContext build(SearchExecutionContext context) { + return new TimeoutSuggestionContext(new TimeoutSuggester((ContextIndexSearcher) context.searcher()), context); + } + } + + private static final class TimeoutSuggester extends Suggester { + private final ContextIndexSearcher contextIndexSearcher; + + TimeoutSuggester(ContextIndexSearcher contextIndexSearcher) { + this.contextIndexSearcher = contextIndexSearcher; + } + + @Override + protected TermSuggestion innerExecute( + String name, + TimeoutSuggestionContext suggestion, + IndexSearcher searcher, + CharsRefBuilder spare + ) { + contextIndexSearcher.throwTimeExceededException(); + assert false; + return new TermSuggestion(name, suggestion.getSize(), SortBy.SCORE); + } + + @Override + protected TermSuggestion emptySuggestion(String name, TimeoutSuggestionContext suggestion, CharsRefBuilder spare) { + return new TermSuggestion(name, suggestion.getSize(), SortBy.SCORE); + } + } + + private static final class TimeoutSuggestionContext extends SuggestionSearchContext.SuggestionContext { + TimeoutSuggestionContext(Suggester suggester, SearchExecutionContext searchExecutionContext) { + super(suggester, searchExecutionContext); + } + } + + private static final class TimeoutRescorerBuilder extends RescorerBuilder { + TimeoutRescorerBuilder() { + super(); + } + + TimeoutRescorerBuilder(StreamInput in) throws IOException { + super(in); + } + + @Override + protected void doWriteTo(StreamOutput out) {} + + @Override + protected void doXContent(XContentBuilder builder, Params params) {} + + @Override + protected RescoreContext innerBuildContext(int windowSize, SearchExecutionContext context) throws IOException { + return new RescoreContext(10, new Rescorer() { + @Override + public TopDocs rescore(TopDocs topDocs, IndexSearcher searcher, RescoreContext rescoreContext) { + ((ContextIndexSearcher) context.searcher()).throwTimeExceededException(); + assert false; + return null; + } + + @Override + public Explanation explain( + int topLevelDocId, + IndexSearcher searcher, + RescoreContext rescoreContext, + Explanation sourceExplanation + ) { + throw new UnsupportedOperationException(); } - return true; }); } + + @Override + public String getWriteableName() { + return "timeout"; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return null; + } + + @Override + public RescorerBuilder rewrite(QueryRewriteContext ctx) { + return this; + } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java index fe49ce57d0400..cbd22856f09a2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.client.Cancellable; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; @@ -73,6 +74,7 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.concurrent.CancellationException; @@ -591,21 +593,31 @@ public void testNoActiveCopy() throws Exception { private void moveOrCloseShardsOnNodes(String nodeName) throws Exception { final IndicesService indicesService = internalCluster().getInstance(IndicesService.class, nodeName); + final ClusterState clusterState = clusterService().state(); for (IndexService indexService : indicesService) { for (IndexShard indexShard : indexService) { if (randomBoolean()) { closeShardNoCheck(indexShard, randomBoolean()); } else if (randomBoolean()) { final ShardId shardId = indexShard.shardId(); - + final var assignedNodes = new HashSet<>(); + clusterState.routingTable().shardRoutingTable(shardId).allShards().forEach(shr -> { + if (shr.currentNodeId() != null) { + assignedNodes.add(shr.currentNodeId()); + } + if (shr.relocatingNodeId() != null) { + assignedNodes.add(shr.relocatingNodeId()); + } + }); final var targetNodes = new ArrayList(); for (final var targetIndicesService : internalCluster().getInstances(IndicesService.class)) { final var targetNode = targetIndicesService.clusterService().localNode(); - if (targetNode.canContainData() && targetIndicesService.getShardOrNull(shardId) == null) { + if (targetNode.canContainData() + && targetIndicesService.getShardOrNull(shardId) == null + && assignedNodes.contains(targetNode.getId()) == false) { targetNodes.add(targetNode.getId()); } } - if (targetNodes.isEmpty()) { continue; } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index fbdcfe26d28ee..0ba4c13c352c1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -994,22 +993,6 @@ public void testRescoreAfterCollapseRandom() throws Exception { }); } - public void testRescoreWithTimeout() throws Exception { - // no dummy docs since merges can change scores while we run queries. - int numDocs = indexRandomNumbers("whitespace", -1, false); - - String intToEnglish = English.intToEnglish(between(0, numDocs - 1)); - String query = intToEnglish.split(" ")[0]; - assertResponse( - prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) - .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) - .setSize(10) - .addRescorer(new QueryRescorerBuilder(functionScoreQuery(new TestTimedScoreFunctionBuilder())).windowSize(100)) - .setTimeout(TimeValue.timeValueMillis(10)), - r -> assertTrue(r.isTimedOut()) - ); - } - @Override protected Collection> nodePlugins() { return List.of(TestTimedQueryPlugin.class); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java index 3d5120226ebed..3d05b0e953959 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java @@ -105,7 +105,7 @@ public void testFieldUsageStats() throws ExecutionException, InterruptedExceptio assertTrue(stats.hasField("field2")); // positions because of span query assertEquals( - Set.of(UsageContext.TERMS, UsageContext.POSTINGS, UsageContext.FREQS, UsageContext.POSITIONS), + Set.of(UsageContext.TERMS, UsageContext.POSTINGS, UsageContext.FREQS, UsageContext.POSITIONS, UsageContext.NORMS), stats.get("field2").keySet() ); assertEquals(1L * numShards, stats.get("field2").getTerms()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java index 2530dd35946fe..ee7f76e6be3f3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.settings.Settings; @@ -165,7 +164,7 @@ public void testSimpleStats() throws Exception { private Set nodeIdsWithIndex(String... indices) { ClusterState state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); - GroupShardsIterator allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true); + List allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true); Set nodes = new HashSet<>(); for (ShardIterator shardIterator : allAssignedShardsGrouped) { for (ShardRouting routing : shardIterator) { @@ -248,7 +247,7 @@ public void testOpenContexts() { protected int numAssignedShards(String... indices) { ClusterState state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); - GroupShardsIterator allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true); + List allAssignedShardsGrouped = state.routingTable().allAssignedShardsGrouped(indices, true); return allAssignedShardsGrouped.size(); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MultiClusterRepoAccessIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MultiClusterRepoAccessIT.java index 77c7b4b762e6a..c1549c1f3d384 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MultiClusterRepoAccessIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MultiClusterRepoAccessIT.java @@ -195,7 +195,7 @@ public void testConcurrentWipeAndRecreateFromOtherCluster() throws IOException { ); assertAcked(clusterAdmin().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, repoName)); - IOUtils.rm(internalCluster().getCurrentMasterNodeInstance(Environment.class).resolveRepoFile(repoPath.toString())); + IOUtils.rm(internalCluster().getCurrentMasterNodeInstance(Environment.class).resolveRepoDir(repoPath.toString())); createRepository(repoName, "fs", repoPath); createFullSnapshot(repoName, "snap-1"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java index 6922b21be37f7..2f4014bf4d350 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java @@ -316,7 +316,6 @@ public void testGetSnapshotsNoRepos() { .get(); assertTrue(getSnapshotsResponse.getSnapshots().isEmpty()); - assertTrue(getSnapshotsResponse.getFailures().isEmpty()); } public void testGetSnapshotsMultipleRepos() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceIT.java index b9e47740e2945..b86cae1c2fb60 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceIT.java @@ -17,10 +17,12 @@ import org.elasticsearch.cluster.SnapshotDeletionsInProgress; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.snapshots.mockstore.MockRepository; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.MockLog; +import org.elasticsearch.test.junit.annotations.TestLogging; import java.util.List; import java.util.concurrent.TimeUnit; @@ -223,4 +225,30 @@ public void testRerouteWhenShardSnapshotsCompleted() throws Exception { safeAwait(shardMovedListener); ensureGreen(indexName); } + + @TestLogging(reason = "testing task description, logged at DEBUG", value = "org.elasticsearch.cluster.service.MasterService:DEBUG") + public void testCreateSnapshotTaskDescription() { + createIndexWithRandomDocs(randomIdentifier(), randomIntBetween(1, 5)); + final var repositoryName = randomIdentifier(); + createRepository(repositoryName, "mock"); + + final var snapshotName = randomIdentifier(); + MockLog.assertThatLogger( + () -> createFullSnapshot(repositoryName, snapshotName), + MasterService.class, + new MockLog.SeenEventExpectation( + "executing cluster state update debug message", + MasterService.class.getCanonicalName(), + Level.DEBUG, + "executing cluster state update for [create_snapshot [" + + snapshotName + + "][CreateSnapshotTask{repository=" + + repositoryName + + ", snapshot=*" + + snapshotName + + "*}]]" + ) + ); + } + } diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 73e9a2058b2b4..01acabca2bc00 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -461,7 +461,8 @@ with org.elasticsearch.index.codec.Elasticsearch814Codec, org.elasticsearch.index.codec.Elasticsearch816Codec, - org.elasticsearch.index.codec.Elasticsearch900Codec; + org.elasticsearch.index.codec.Elasticsearch900Codec, + org.elasticsearch.index.codec.Elasticsearch900Lucene101Codec; provides org.apache.logging.log4j.core.util.ContextDataProvider with org.elasticsearch.common.logging.DynamicContextDataProvider; diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchTimeoutException.java b/server/src/main/java/org/elasticsearch/ElasticsearchTimeoutException.java index eddce19c77888..06ae43144476e 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchTimeoutException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchTimeoutException.java @@ -10,6 +10,7 @@ package org.elasticsearch; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -34,4 +35,10 @@ public ElasticsearchTimeoutException(String message, Object... args) { public ElasticsearchTimeoutException(String message, Throwable cause, Object... args) { super(message, cause, args); } + + @Override + public RestStatus status() { + // closest thing to "your request took longer than you asked for" + return RestStatus.TOO_MANY_REQUESTS; + } } diff --git a/server/src/main/java/org/elasticsearch/ReleaseVersions.java b/server/src/main/java/org/elasticsearch/ReleaseVersions.java index 22cd18c7b4ac3..5e6986a5bf924 100644 --- a/server/src/main/java/org/elasticsearch/ReleaseVersions.java +++ b/server/src/main/java/org/elasticsearch/ReleaseVersions.java @@ -78,10 +78,10 @@ public static IntFunction generateVersionsLookup(Class versionContain // replace all version lists with the smallest & greatest versions versions.replaceAll((k, v) -> { if (v.size() == 1) { - return List.of(v.get(0)); + return List.of(v.getFirst()); } else { v.sort(Comparator.naturalOrder()); - return List.of(v.get(0), v.get(v.size() - 1)); + return List.of(v.getFirst(), v.getLast()); } }); @@ -100,14 +100,14 @@ private static IntFunction lookupFunction(NavigableMap lookupFunction(NavigableMap lookupFunction(NavigableMap T lastItem(List list) { - return list.get(list.size() - 1); - } - private static Version nextVersion(Version version) { return new Version(version.id + 100); // +1 to revision } diff --git a/server/src/main/java/org/elasticsearch/TransportVersion.java b/server/src/main/java/org/elasticsearch/TransportVersion.java index 64d1c0535a561..47cbe605b98e3 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersion.java +++ b/server/src/main/java/org/elasticsearch/TransportVersion.java @@ -118,6 +118,14 @@ public static List getAllVersions() { return VersionsHolder.ALL_VERSIONS; } + /** + * @return whether this is a known {@link TransportVersion}, i.e. one declared in {@link TransportVersions}. Other versions may exist + * in the wild (they're sent over the wire by numeric ID) but we don't know how to communicate using such versions. + */ + public boolean isKnown() { + return VersionsHolder.ALL_VERSIONS_MAP.containsKey(id); + } + public static TransportVersion fromString(String str) { return TransportVersion.fromId(Integer.parseInt(str)); } @@ -130,20 +138,20 @@ public static TransportVersion fromString(String str) { * When a patch version of an existing transport version is created, {@code transportVersion.isPatchFrom(patchVersion)} * will match any transport version at or above {@code patchVersion} that is also of the same base version. *

- * For example, {@code version.isPatchFrom(8_800_00_4)} will return the following for the given {@code version}: + * For example, {@code version.isPatchFrom(8_800_0_04)} will return the following for the given {@code version}: *

    - *
  • {@code 8_799_00_0.isPatchFrom(8_800_00_4)}: {@code false}
  • - *
  • {@code 8_799_00_9.isPatchFrom(8_800_00_4)}: {@code false}
  • - *
  • {@code 8_800_00_0.isPatchFrom(8_800_00_4)}: {@code false}
  • - *
  • {@code 8_800_00_3.isPatchFrom(8_800_00_4)}: {@code false}
  • - *
  • {@code 8_800_00_4.isPatchFrom(8_800_00_4)}: {@code true}
  • - *
  • {@code 8_800_00_9.isPatchFrom(8_800_00_4)}: {@code true}
  • - *
  • {@code 8_800_01_0.isPatchFrom(8_800_00_4)}: {@code false}
  • - *
  • {@code 8_801_00_0.isPatchFrom(8_800_00_4)}: {@code false}
  • + *
  • {@code 8_799_0_00.isPatchFrom(8_800_0_04)}: {@code false}
  • + *
  • {@code 8_799_0_09.isPatchFrom(8_800_0_04)}: {@code false}
  • + *
  • {@code 8_800_0_00.isPatchFrom(8_800_0_04)}: {@code false}
  • + *
  • {@code 8_800_0_03.isPatchFrom(8_800_0_04)}: {@code false}
  • + *
  • {@code 8_800_0_04.isPatchFrom(8_800_0_04)}: {@code true}
  • + *
  • {@code 8_800_0_49.isPatchFrom(8_800_0_04)}: {@code true}
  • + *
  • {@code 8_800_1_00.isPatchFrom(8_800_0_04)}: {@code false}
  • + *
  • {@code 8_801_0_00.isPatchFrom(8_800_0_04)}: {@code false}
  • *
*/ public boolean isPatchFrom(TransportVersion version) { - return onOrAfter(version) && id < version.id + 10 - (version.id % 10); + return onOrAfter(version) && id < version.id + 100 - (version.id % 100); } /** diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 05c2071ad8d5f..e4c83dc50fb41 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -90,84 +90,105 @@ static TransportVersion def(int id) { */ public static final TransportVersion V_8_9_X = def(8_500_020); public static final TransportVersion V_8_10_X = def(8_500_061); - public static final TransportVersion V_8_11_X = def(8_512_00_1); - public static final TransportVersion V_8_12_0 = def(8_560_00_0); - public static final TransportVersion V_8_12_1 = def(8_560_00_1); - public static final TransportVersion V_8_13_0 = def(8_595_00_0); - public static final TransportVersion V_8_13_4 = def(8_595_00_1); - public static final TransportVersion V_8_14_0 = def(8_636_00_1); - public static final TransportVersion V_8_15_0 = def(8_702_00_2); - public static final TransportVersion V_8_15_2 = def(8_702_00_3); - public static final TransportVersion V_8_16_0 = def(8_772_00_1); - public static final TransportVersion ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO_BACKPORT_8_16 = def(8_772_00_2); - public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE_BACKPORT_8_16 = def(8_772_00_3); - public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16 = def(8_772_00_4); - public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_00_0); - public static final TransportVersion REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_774_00_0); - public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_775_00_0); - public static final TransportVersion INFERENCE_DONT_PERSIST_ON_READ = def(8_776_00_0); - public static final TransportVersion SIMULATE_MAPPING_ADDITION = def(8_777_00_0); - public static final TransportVersion INTRODUCE_ALL_APPLICABLE_SELECTOR = def(8_778_00_0); - public static final TransportVersion INDEX_MODE_LOOKUP = def(8_779_00_0); - public static final TransportVersion INDEX_REQUEST_REMOVE_METERING = def(8_780_00_0); - public static final TransportVersion CPU_STAT_STRING_PARSING = def(8_781_00_0); - public static final TransportVersion QUERY_RULES_RETRIEVER = def(8_782_00_0); - public static final TransportVersion ESQL_CCS_EXEC_INFO_WITH_FAILURES = def(8_783_00_0); - public static final TransportVersion LOGSDB_TELEMETRY = def(8_784_00_0); - public static final TransportVersion LOGSDB_TELEMETRY_STATS = def(8_785_00_0); - public static final TransportVersion KQL_QUERY_ADDED = def(8_786_00_0); - public static final TransportVersion ROLE_MONITOR_STATS = def(8_787_00_0); - public static final TransportVersion DATA_STREAM_INDEX_VERSION_DEPRECATION_CHECK = def(8_788_00_0); - public static final TransportVersion ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO = def(8_789_00_0); - public static final TransportVersion VERTEX_AI_INPUT_TYPE_ADDED = def(8_790_00_0); - public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE = def(8_791_00_0); - public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES = def(8_792_00_0); - public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS = def(8_793_00_0); - public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS_REVERT = def(8_794_00_0); - public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_00_0); - public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_00_0); - public static final TransportVersion INGEST_PIPELINE_CONFIGURATION_AS_MAP = def(8_797_00_0); - public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE_FIX_8_17 = def(8_797_00_1); - public static final TransportVersion SOURCE_MODE_TELEMETRY_FIX_8_17 = def(8_797_00_2); - public static final TransportVersion INDEXING_PRESSURE_THROTTLING_STATS = def(8_798_00_0); - public static final TransportVersion REINDEX_DATA_STREAMS = def(8_799_00_0); - public static final TransportVersion ESQL_REMOVE_NODE_LEVEL_PLAN = def(8_800_00_0); - public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE = def(8_801_00_0); - public static final TransportVersion SOURCE_MODE_TELEMETRY = def(8_802_00_0); - public static final TransportVersion NEW_REFRESH_CLUSTER_BLOCK = def(8_803_00_0); - public static final TransportVersion RETRIES_AND_OPERATIONS_IN_BLOBSTORE_STATS = def(8_804_00_0); - public static final TransportVersion ADD_DATA_STREAM_OPTIONS_TO_TEMPLATES = def(8_805_00_0); - public static final TransportVersion KNN_QUERY_RESCORE_OVERSAMPLE = def(8_806_00_0); - public static final TransportVersion SEMANTIC_QUERY_LENIENT = def(8_807_00_0); - public static final TransportVersion ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS = def(8_808_00_0); - public static final TransportVersion EQL_ALLOW_PARTIAL_SEARCH_RESULTS = def(8_809_00_0); - public static final TransportVersion NODE_VERSION_INFORMATION_WITH_MIN_READ_ONLY_INDEX_VERSION = def(8_810_00_0); - public static final TransportVersion ERROR_TRACE_IN_TRANSPORT_HEADER = def(8_811_00_0); - public static final TransportVersion FAILURE_STORE_ENABLED_BY_CLUSTER_SETTING = def(8_812_00_0); - public static final TransportVersion SIMULATE_IGNORED_FIELDS = def(8_813_00_0); - public static final TransportVersion TRANSFORMS_UPGRADE_MODE = def(8_814_00_0); - public static final TransportVersion NODE_SHUTDOWN_EPHEMERAL_ID_ADDED = def(8_815_00_0); - public static final TransportVersion ESQL_CCS_TELEMETRY_STATS = def(8_816_00_0); - public static final TransportVersion TEXT_EMBEDDING_QUERY_VECTOR_BUILDER_INFER_MODEL_ID = def(8_817_00_0); - public static final TransportVersion ESQL_ENABLE_NODE_LEVEL_REDUCTION = def(8_818_00_0); - public static final TransportVersion JINA_AI_INTEGRATION_ADDED = def(8_819_00_0); - public static final TransportVersion TRACK_INDEX_FAILED_DUE_TO_VERSION_CONFLICT_METRIC = def(8_820_00_0); - public static final TransportVersion REPLACE_FAILURE_STORE_OPTIONS_WITH_SELECTOR_SYNTAX = def(8_821_00_0); - public static final TransportVersion ELASTIC_INFERENCE_SERVICE_UNIFIED_CHAT_COMPLETIONS_INTEGRATION = def(8_822_00_0); - public static final TransportVersion KQL_QUERY_TECH_PREVIEW = def(8_823_00_0); - public static final TransportVersion ESQL_PROFILE_ROWS_PROCESSED = def(8_824_00_0); - public static final TransportVersion BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1 = def(8_825_00_0); - public static final TransportVersion REVERT_BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1 = def(8_826_00_0); - public static final TransportVersion ESQL_SKIP_ES_INDEX_SERIALIZATION = def(8_827_00_0); - public static final TransportVersion ADD_INDEX_BLOCK_TWO_PHASE = def(8_828_00_0); - public static final TransportVersion RESOLVE_CLUSTER_NO_INDEX_EXPRESSION = def(8_829_00_0); - public static final TransportVersion ML_ROLLOVER_LEGACY_INDICES = def(8_830_00_0); - public static final TransportVersion ADD_INCLUDE_FAILURE_INDICES_OPTION = def(8_831_00_0); - public static final TransportVersion ESQL_RESPONSE_PARTIAL = def(8_832_00_0); - public static final TransportVersion RANK_DOC_OPTIONAL_METADATA_FOR_EXPLAIN = def(8_833_00_0); - public static final TransportVersion ILM_ADD_SEARCHABLE_SNAPSHOT_ADD_REPLICATE_FOR = def(8_834_00_0); - public static final TransportVersion INGEST_REQUEST_INCLUDE_SOURCE_ON_ERROR = def(8_835_00_0); - public static final TransportVersion RESOURCE_DEPRECATION_CHECKS = def(8_836_00_0); + public static final TransportVersion V_8_11_X = def(8_512_0_01); + public static final TransportVersion V_8_12_0 = def(8_560_0_00); + public static final TransportVersion V_8_12_1 = def(8_560_0_01); + public static final TransportVersion V_8_13_0 = def(8_595_0_00); + public static final TransportVersion V_8_13_4 = def(8_595_0_01); + public static final TransportVersion V_8_14_0 = def(8_636_0_01); + public static final TransportVersion V_8_15_0 = def(8_702_0_02); + public static final TransportVersion V_8_15_2 = def(8_702_0_03); + public static final TransportVersion V_8_16_0 = def(8_772_0_01); + public static final TransportVersion ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO_BACKPORT_8_16 = def(8_772_0_02); + public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE_BACKPORT_8_16 = def(8_772_0_03); + public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16 = def(8_772_0_04); + public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_0_00); + public static final TransportVersion REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_774_0_00); + public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_775_0_00); + public static final TransportVersion INFERENCE_DONT_PERSIST_ON_READ = def(8_776_0_00); + public static final TransportVersion SIMULATE_MAPPING_ADDITION = def(8_777_0_00); + public static final TransportVersion INTRODUCE_ALL_APPLICABLE_SELECTOR = def(8_778_0_00); + public static final TransportVersion INDEX_MODE_LOOKUP = def(8_779_0_00); + public static final TransportVersion INDEX_REQUEST_REMOVE_METERING = def(8_780_0_00); + public static final TransportVersion CPU_STAT_STRING_PARSING = def(8_781_0_00); + public static final TransportVersion QUERY_RULES_RETRIEVER = def(8_782_0_00); + public static final TransportVersion ESQL_CCS_EXEC_INFO_WITH_FAILURES = def(8_783_0_00); + public static final TransportVersion LOGSDB_TELEMETRY = def(8_784_0_00); + public static final TransportVersion LOGSDB_TELEMETRY_STATS = def(8_785_0_00); + public static final TransportVersion KQL_QUERY_ADDED = def(8_786_0_00); + public static final TransportVersion ROLE_MONITOR_STATS = def(8_787_0_00); + public static final TransportVersion DATA_STREAM_INDEX_VERSION_DEPRECATION_CHECK = def(8_788_0_00); + public static final TransportVersion ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO = def(8_789_0_00); + public static final TransportVersion VERTEX_AI_INPUT_TYPE_ADDED = def(8_790_0_00); + public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE = def(8_791_0_00); + public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES = def(8_792_0_00); + public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS = def(8_793_0_00); + public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS_REVERT = def(8_794_0_00); + public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_0_00); + public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_0_00); + public static final TransportVersion INGEST_PIPELINE_CONFIGURATION_AS_MAP = def(8_797_0_00); + public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE_FIX_8_17 = def(8_797_0_01); + public static final TransportVersion SOURCE_MODE_TELEMETRY_FIX_8_17 = def(8_797_0_02); + public static final TransportVersion INDEXING_PRESSURE_THROTTLING_STATS = def(8_798_0_00); + public static final TransportVersion REINDEX_DATA_STREAMS = def(8_799_0_00); + public static final TransportVersion ESQL_REMOVE_NODE_LEVEL_PLAN = def(8_800_0_00); + public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE = def(8_801_0_00); + public static final TransportVersion SOURCE_MODE_TELEMETRY = def(8_802_0_00); + public static final TransportVersion NEW_REFRESH_CLUSTER_BLOCK = def(8_803_0_00); + public static final TransportVersion RETRIES_AND_OPERATIONS_IN_BLOBSTORE_STATS = def(8_804_0_00); + public static final TransportVersion ADD_DATA_STREAM_OPTIONS_TO_TEMPLATES = def(8_805_0_00); + public static final TransportVersion KNN_QUERY_RESCORE_OVERSAMPLE = def(8_806_0_00); + public static final TransportVersion SEMANTIC_QUERY_LENIENT = def(8_807_0_00); + public static final TransportVersion ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS = def(8_808_0_00); + public static final TransportVersion EQL_ALLOW_PARTIAL_SEARCH_RESULTS = def(8_809_0_00); + public static final TransportVersion NODE_VERSION_INFORMATION_WITH_MIN_READ_ONLY_INDEX_VERSION = def(8_810_0_00); + public static final TransportVersion ERROR_TRACE_IN_TRANSPORT_HEADER = def(8_811_0_00); + public static final TransportVersion FAILURE_STORE_ENABLED_BY_CLUSTER_SETTING = def(8_812_0_00); + public static final TransportVersion SIMULATE_IGNORED_FIELDS = def(8_813_0_00); + public static final TransportVersion TRANSFORMS_UPGRADE_MODE = def(8_814_0_00); + public static final TransportVersion NODE_SHUTDOWN_EPHEMERAL_ID_ADDED = def(8_815_0_00); + public static final TransportVersion ESQL_CCS_TELEMETRY_STATS = def(8_816_0_00); + public static final TransportVersion TEXT_EMBEDDING_QUERY_VECTOR_BUILDER_INFER_MODEL_ID = def(8_817_0_00); + public static final TransportVersion ESQL_ENABLE_NODE_LEVEL_REDUCTION = def(8_818_0_00); + public static final TransportVersion JINA_AI_INTEGRATION_ADDED = def(8_819_0_00); + public static final TransportVersion TRACK_INDEX_FAILED_DUE_TO_VERSION_CONFLICT_METRIC = def(8_820_0_00); + public static final TransportVersion REPLACE_FAILURE_STORE_OPTIONS_WITH_SELECTOR_SYNTAX = def(8_821_0_00); + public static final TransportVersion ELASTIC_INFERENCE_SERVICE_UNIFIED_CHAT_COMPLETIONS_INTEGRATION = def(8_822_0_00); + public static final TransportVersion KQL_QUERY_TECH_PREVIEW = def(8_823_0_00); + public static final TransportVersion ESQL_PROFILE_ROWS_PROCESSED = def(8_824_0_00); + public static final TransportVersion BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1 = def(8_825_0_00); + public static final TransportVersion REVERT_BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1 = def(8_826_0_00); + public static final TransportVersion ESQL_SKIP_ES_INDEX_SERIALIZATION = def(8_827_0_00); + public static final TransportVersion ADD_INDEX_BLOCK_TWO_PHASE = def(8_828_0_00); + public static final TransportVersion RESOLVE_CLUSTER_NO_INDEX_EXPRESSION = def(8_829_0_00); + public static final TransportVersion ML_ROLLOVER_LEGACY_INDICES = def(8_830_0_00); + public static final TransportVersion ADD_INCLUDE_FAILURE_INDICES_OPTION = def(8_831_0_00); + public static final TransportVersion ESQL_RESPONSE_PARTIAL = def(8_832_0_00); + public static final TransportVersion RANK_DOC_OPTIONAL_METADATA_FOR_EXPLAIN = def(8_833_0_00); + public static final TransportVersion ILM_ADD_SEARCHABLE_SNAPSHOT_ADD_REPLICATE_FOR = def(8_834_0_00); + public static final TransportVersion INGEST_REQUEST_INCLUDE_SOURCE_ON_ERROR = def(8_835_0_00); + public static final TransportVersion RESOURCE_DEPRECATION_CHECKS = def(8_836_0_00); + public static final TransportVersion LINEAR_RETRIEVER_SUPPORT = def(8_837_0_00); + public static final TransportVersion TIMEOUT_GET_PARAM_FOR_RESOLVE_CLUSTER = def(8_838_0_00); + public static final TransportVersion INFERENCE_REQUEST_ADAPTIVE_RATE_LIMITING = def(8_839_0_00); + public static final TransportVersion ML_INFERENCE_IBM_WATSONX_RERANK_ADDED = def(8_840_0_00); + public static final TransportVersion COHERE_BIT_EMBEDDING_TYPE_SUPPORT_ADDED_BACKPORT_8_X = def(8_840_0_01); + public static final TransportVersion REMOVE_ALL_APPLICABLE_SELECTOR_BACKPORT_8_X = def(8_840_0_02); + public static final TransportVersion ELASTICSEARCH_9_0 = def(9_000_0_00); + public static final TransportVersion REMOVE_SNAPSHOT_FAILURES_90 = def(9_000_0_01); + public static final TransportVersion TRANSPORT_STATS_HANDLING_TIME_REQUIRED_90 = def(9_000_0_02); + public static final TransportVersion REMOVE_DESIRED_NODE_VERSION_90 = def(9_000_0_03); + public static final TransportVersion ESQL_DRIVER_TASK_DESCRIPTION_90 = def(9_000_0_04); + public static final TransportVersion REMOVE_ALL_APPLICABLE_SELECTOR_9_0 = def(9_000_0_05); + public static final TransportVersion COHERE_BIT_EMBEDDING_TYPE_SUPPORT_ADDED = def(9_001_0_00); + public static final TransportVersion REMOVE_SNAPSHOT_FAILURES = def(9_002_0_00); + public static final TransportVersion TRANSPORT_STATS_HANDLING_TIME_REQUIRED = def(9_003_0_00); + public static final TransportVersion REMOVE_DESIRED_NODE_VERSION = def(9_004_0_00); + public static final TransportVersion ESQL_DRIVER_TASK_DESCRIPTION = def(9_005_0_00); + public static final TransportVersion ESQL_RETRY_ON_SHARD_LEVEL_FAILURE = def(9_006_0_00); + public static final TransportVersion ESQL_PROFILE_ASYNC_NANOS = def(9_007_00_0); + public static final TransportVersion ESQL_LOOKUP_JOIN_SOURCE_TEXT = def(9_008_0_00); + public static final TransportVersion REMOVE_ALL_APPLICABLE_SELECTOR = def(9_009_0_00); /* * STOP! READ THIS FIRST! No, really, @@ -180,21 +201,23 @@ static TransportVersion def(int id) { * A new transport version should be added EVERY TIME a change is made to the serialization protocol of one or more classes. Each * transport version should only be used in a single merged commit (apart from the BwC versions copied from o.e.Version, ≤V_8_8_1). * + * More information about versions and backporting at docs/internal/Versioning.md + * * ADDING A TRANSPORT VERSION * To add a new transport version, add a new constant at the bottom of the list, above this comment. Don't add other lines, * comments, etc. The version id has the following layout: * - * M_NNN_SS_P + * M_NNN_S_PP * * M - The major version of Elasticsearch * NNN - The server version part - * SS - The serverless version part. It should always be 00 here, it is used by serverless only. - * P - The patch version part + * S - The subsidiary version part. It should always be 0 here, it is only used in subsidiary repositories. + * PP - The patch version part * * To determine the id of the next TransportVersion constant, do the following: * - Use the same major version, unless bumping majors * - Bump the server version part by 1, unless creating a patch version - * - Leave the serverless part as 00 + * - Leave the subsidiary part as 0 * - Bump the patch part if creating a patch version * * If a patch version is created, it should be placed sorted among the other existing constants. @@ -226,15 +249,13 @@ static TransportVersion def(int id) { * Reference to the earliest compatible transport version to this version of the codebase. * This should be the transport version used by the highest minor version of the previous major. */ - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - // This needs to be bumped to the 8.last - public static final TransportVersion MINIMUM_COMPATIBLE = V_7_17_0; + public static final TransportVersion MINIMUM_COMPATIBLE = BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1; /** * Reference to the minimum transport version that can be used with CCS. * This should be the transport version used by the previous minor release. */ - public static final TransportVersion MINIMUM_CCS_VERSION = V_8_15_0; + public static final TransportVersion MINIMUM_CCS_VERSION = BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1; /** * Sorted list of all versions defined in this class diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 1249f36745835..75be3a1db906f 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -193,12 +193,16 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_16_2 = new Version(8_16_02_99); public static final Version V_8_16_3 = new Version(8_16_03_99); public static final Version V_8_16_4 = new Version(8_16_04_99); + public static final Version V_8_16_5 = new Version(8_16_05_99); public static final Version V_8_17_0 = new Version(8_17_00_99); public static final Version V_8_17_1 = new Version(8_17_01_99); public static final Version V_8_17_2 = new Version(8_17_02_99); + public static final Version V_8_17_3 = new Version(8_17_03_99); public static final Version V_8_18_0 = new Version(8_18_00_99); + public static final Version V_8_19_0 = new Version(8_19_00_99); public static final Version V_9_0_0 = new Version(9_00_00_99); - public static final Version CURRENT = V_9_0_0; + public static final Version V_9_1_0 = new Version(9_01_00_99); + public static final Version CURRENT = V_9_1_0; private static final NavigableMap VERSION_IDS; private static final Map VERSION_STRINGS; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java index 86b476f84ecff..da9626d14ca60 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java @@ -22,14 +22,12 @@ import org.elasticsearch.cluster.desirednodes.VersionConflictException; import org.elasticsearch.cluster.metadata.DesiredNodes; import org.elasticsearch.cluster.metadata.DesiredNodesMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.MasterServiceTaskQueue; import org.elasticsearch.common.Priority; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -42,7 +40,6 @@ public class TransportUpdateDesiredNodesAction extends TransportMasterNodeAction { private static final Logger logger = LogManager.getLogger(TransportUpdateDesiredNodesAction.class); - private final FeatureService featureService; private final MasterServiceTaskQueue taskQueue; @Inject @@ -50,10 +47,8 @@ public TransportUpdateDesiredNodesAction( TransportService transportService, ClusterService clusterService, RerouteService rerouteService, - FeatureService featureService, ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, AllocationService allocationService ) { super( @@ -67,7 +62,6 @@ public TransportUpdateDesiredNodesAction( UpdateDesiredNodesResponse::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - this.featureService = featureService; this.taskQueue = clusterService.createTaskQueue( "update-desired-nodes", Priority.URGENT, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java index c84df0ddfe644..3b773ae686845 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java @@ -108,7 +108,7 @@ protected NodesReloadSecureSettingsResponse.NodeResponse nodeOperation( Task task ) { // We default to using an empty string as the keystore password so that we mimic pre 7.3 API behavior - try (KeyStoreWrapper keystore = KeyStoreWrapper.load(environment.configFile())) { + try (KeyStoreWrapper keystore = KeyStoreWrapper.load(environment.configDir())) { // reread keystore from config file if (keystore == null) { return new NodesReloadSecureSettingsResponse.NodeResponse( diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java index dd21b11ad18cc..b13d0b031c5d8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -34,6 +33,7 @@ import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; @@ -101,7 +101,7 @@ protected void masterOperation( } Set nodeIds = new HashSet<>(); - GroupShardsIterator groupShardsIterator = clusterService.operationRouting() + List groupShardsIterator = clusterService.operationRouting() .searchShards(clusterState, concreteIndices, routingMap, request.preference()); ShardRouting shard; ClusterSearchShardsGroup[] groupResponses = new ClusterSearchShardsGroup[groupShardsIterator.size()]; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java index dc261177567cc..797d95529ee23 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java @@ -9,7 +9,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.get; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; @@ -17,12 +17,10 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; -import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -35,9 +33,6 @@ public class GetSnapshotsResponse extends ActionResponse implements ChunkedToXCo private final List snapshots; - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // always empty, can be dropped - private final Map failures; - @Nullable private final String next; @@ -45,15 +40,8 @@ public class GetSnapshotsResponse extends ActionResponse implements ChunkedToXCo private final int remaining; - public GetSnapshotsResponse( - List snapshots, - Map failures, - @Nullable String next, - final int total, - final int remaining - ) { + public GetSnapshotsResponse(List snapshots, @Nullable String next, final int total, final int remaining) { this.snapshots = List.copyOf(snapshots); - this.failures = failures == null ? Map.of() : Map.copyOf(failures); this.next = next; this.total = total; this.remaining = remaining; @@ -61,7 +49,11 @@ public GetSnapshotsResponse( public GetSnapshotsResponse(StreamInput in) throws IOException { this.snapshots = in.readCollectionAsImmutableList(SnapshotInfo::readFrom); - this.failures = Collections.unmodifiableMap(in.readMap(StreamInput::readException)); + if (in.getTransportVersion().before(TransportVersions.REMOVE_SNAPSHOT_FAILURES) + && in.getTransportVersion().isPatchFrom(TransportVersions.REMOVE_SNAPSHOT_FAILURES_90) == false) { + // Deprecated `failures` field + in.readMap(StreamInput::readException); + } this.next = in.readOptionalString(); this.total = in.readVInt(); this.remaining = in.readVInt(); @@ -76,25 +68,11 @@ public List getSnapshots() { return snapshots; } - /** - * Returns a map of repository name to {@link ElasticsearchException} for each unsuccessful response. - */ - public Map getFailures() { - return failures; - } - @Nullable public String next() { return next; } - /** - * Returns true if there is at least one failed response. - */ - public boolean isFailed() { - return failures.isEmpty() == false; - } - public int totalCount() { return total; } @@ -106,7 +84,11 @@ public int remaining() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeCollection(snapshots); - out.writeMap(failures, StreamOutput::writeException); + if (out.getTransportVersion().before(TransportVersions.REMOVE_SNAPSHOT_FAILURES) + && out.getTransportVersion().isPatchFrom(TransportVersions.REMOVE_SNAPSHOT_FAILURES_90) == false) { + // Deprecated `failures` field + out.writeMap(Map.of(), StreamOutput::writeException); + } out.writeOptionalString(next); out.writeVInt(total); out.writeVInt(remaining); @@ -120,18 +102,6 @@ public Iterator toXContentChunked(ToXContent.Params params) { return b; }), Iterators.map(getSnapshots().iterator(), snapshotInfo -> snapshotInfo::toXContentExternal), Iterators.single((b, p) -> { b.endArray(); - if (failures.isEmpty() == false) { - b.startObject("failures"); - for (Map.Entry error : failures.entrySet()) { - b.field(error.getKey(), (bb, pa) -> { - bb.startObject(); - error.getValue().toXContent(bb, pa); - bb.endObject(); - return bb; - }); - } - b.endObject(); - } if (next != null) { b.field("next", next); } @@ -151,12 +121,12 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetSnapshotsResponse that = (GetSnapshotsResponse) o; - return Objects.equals(snapshots, that.snapshots) && Objects.equals(failures, that.failures) && Objects.equals(next, that.next); + return Objects.equals(snapshots, that.snapshots) && Objects.equals(next, that.next); } @Override public int hashCode() { - return Objects.hash(snapshots, failures, next); + return Objects.hash(snapshots, next); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 896b336d54d7b..ec4a578ef25cd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -543,7 +543,6 @@ private GetSnapshotsResponse buildResponse() { } return new GetSnapshotsResponse( snapshotInfos, - null, remaining > 0 ? sortBy.encodeAfterQueryParam(snapshotInfos.get(snapshotInfos.size() - 1)) : null, totalCount.get(), remaining diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java index 69ab9f57d2be7..071e9b42752c0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java @@ -77,6 +77,17 @@ public boolean hasErrors() { return errors; } + /** + * Get a list of all errors from the response. If there are no errors, an empty list is returned. + */ + public List getErrors() { + if (errors == false) { + return List.of(); + } else { + return actionResults.stream().filter(a -> a.getError() != null).map(AliasActionResult::getError).toList(); + } + } + /** * Build a response from a list of action results. Sets the errors boolean based * on whether an of the individual results contain an error. @@ -165,6 +176,13 @@ public static AliasActionResult buildSuccess(List indices, AliasActions return new AliasActionResult(indices, action, null); } + /** + * The error result if the action failed, null if the action succeeded. + */ + public ElasticsearchException getError() { + return error; + } + private int getStatus() { return error == null ? 200 : error.status().getStatus(); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeCapabilities.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeCapabilities.java new file mode 100644 index 0000000000000..0574e05001f12 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeCapabilities.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.admin.indices.analyze; + +import java.util.Set; + +public final class AnalyzeCapabilities { + private AnalyzeCapabilities() {} + + private static final String WRONG_CUSTOM_ANALYZER_RETURNS_400_CAPABILITY = "wrong_custom_analyzer_returns_400"; + + public static final Set CAPABILITIES = Set.of(WRONG_CUSTOM_ANALYZER_RETURNS_400_CAPABILITY); +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index fb672b49c2f5a..5f5d27bda2708 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -144,6 +144,8 @@ public static AnalyzeAction.Response analyze( if (analyzer != null) { return analyze(request, analyzer, maxTokenCount); } + } catch (IllegalStateException e) { + throw new IllegalArgumentException("Can not build a custom analyzer", e); } // Otherwise we use a built-in analyzer, which should not be closed diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java index e668624440351..a9eb8d4f8ed66 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java @@ -13,6 +13,7 @@ import org.apache.lucene.backward_codecs.lucene50.Lucene50PostingsFormat; import org.apache.lucene.backward_codecs.lucene84.Lucene84PostingsFormat; import org.apache.lucene.backward_codecs.lucene90.Lucene90PostingsFormat; +import org.apache.lucene.backward_codecs.lucene912.Lucene912PostingsFormat; import org.apache.lucene.backward_codecs.lucene99.Lucene99PostingsFormat; import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.FieldsProducer; @@ -21,7 +22,7 @@ import org.apache.lucene.codecs.PointsReader; import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.codecs.TermVectorsReader; -import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; +import org.apache.lucene.codecs.lucene101.Lucene101PostingsFormat; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; @@ -306,6 +307,9 @@ private static void readProximity(Terms terms, PostingsEnum postings) throws IOE private static BlockTermState getBlockTermState(TermsEnum termsEnum, BytesRef term) throws IOException { if (term != null && termsEnum.seekExact(term)) { final TermState termState = termsEnum.termState(); + if (termState instanceof final Lucene101PostingsFormat.IntBlockTermState blockTermState) { + return new BlockTermState(blockTermState.docStartFP, blockTermState.posStartFP, blockTermState.payStartFP); + } if (termState instanceof final Lucene912PostingsFormat.IntBlockTermState blockTermState) { return new BlockTermState(blockTermState.docStartFP, blockTermState.posStartFP, blockTermState.payStartFP); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageAction.java index 6c0eb79714093..4a3f398656c14 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageAction.java @@ -21,7 +21,6 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -214,13 +213,8 @@ protected AnalyzeIndexDiskUsageResponse newResponse( } @Override - protected GroupShardsIterator shards( - ClusterState clusterState, - AnalyzeIndexDiskUsageRequest request, - String[] concreteIndices - ) { - final GroupShardsIterator groups = clusterService.operationRouting() - .searchShards(clusterState, concreteIndices, null, null); + protected List shards(ClusterState clusterState, AnalyzeIndexDiskUsageRequest request, String[] concreteIndices) { + final List groups = clusterService.operationRouting().searchShards(clusterState, concreteIndices, null, null); for (ShardIterator group : groups) { // fails fast if any non-active groups if (group.size() == 0) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java index da08b78d711cf..7a866792d167a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.cluster.ClusterState; @@ -92,12 +93,16 @@ protected void shardOperation( ActionListener listener ) { assert (task instanceof CancellableTask) == false; // TODO: add cancellation handling here once the task supports it - threadPool.executor(ThreadPool.Names.FORCE_MERGE).execute(ActionRunnable.supply(listener, () -> { + SubscribableListener.newForked(l -> { IndexShard indexShard = indicesService.indexServiceSafe(shardRouting.shardId().getIndex()) .getShard(shardRouting.shardId().id()); - indexShard.forceMerge(request); - return EmptyResult.INSTANCE; - })); + indexShard.ensureMutable(l.map(unused -> indexShard)); + }).andThen((l, indexShard) -> { + threadPool.executor(ThreadPool.Names.FORCE_MERGE).execute(ActionRunnable.supply(l, () -> { + indexShard.forceMerge(request); + return EmptyResult.INSTANCE; + })); + }).addListener(listener); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java index 2870a6538f8bb..1c99d84900866 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java @@ -36,7 +36,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -126,7 +125,7 @@ protected void masterOperation( performMappingUpdate(concreteIndices, request, listener, metadataMappingService, false); } catch (IndexNotFoundException ex) { - logger.debug(() -> "failed to put mappings on indices [" + Arrays.asList(request.indices() + "]"), ex); + logger.debug(() -> "failed to put mappings on indices " + Arrays.toString(request.indices()), ex); throw ex; } } @@ -162,25 +161,21 @@ static void performMappingUpdate( MetadataMappingService metadataMappingService, boolean autoUpdate ) { - final ActionListener wrappedListener = listener.delegateResponse((l, e) -> { - logger.debug(() -> "failed to put mappings on indices [" + Arrays.asList(concreteIndices) + "]", e); + ActionListener.run(listener.delegateResponse((l, e) -> { + logger.debug(() -> "failed to put mappings on indices " + Arrays.toString(concreteIndices), e); l.onFailure(e); - }); - final PutMappingClusterStateUpdateRequest updateRequest; - try { - updateRequest = new PutMappingClusterStateUpdateRequest( - request.masterNodeTimeout(), - request.ackTimeout(), - request.source(), - autoUpdate, - concreteIndices - ); - } catch (IOException e) { - wrappedListener.onFailure(e); - return; - } - - metadataMappingService.putMapping(updateRequest, wrappedListener); + }), + wrappedListener -> metadataMappingService.putMapping( + new PutMappingClusterStateUpdateRequest( + request.masterNodeTimeout(), + request.ackTimeout(), + request.source(), + autoUpdate, + concreteIndices + ), + wrappedListener + ) + ); } static String checkForFailureStoreViolations(ClusterState clusterState, Index[] concreteIndices, PutMappingRequest request) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java index 6b77a39c32ffe..2286f64648185 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.injection.guice.Inject; @@ -125,17 +126,16 @@ public void onPrimaryOperationComplete( IndexShardRoutingTable indexShardRoutingTable, ActionListener listener ) { - assert replicaRequest.primaryRefreshResult.refreshed() : "primary has not refreshed"; - UnpromotableShardRefreshRequest unpromotableReplicaRequest = new UnpromotableShardRefreshRequest( - indexShardRoutingTable, - replicaRequest.primaryRefreshResult.primaryTerm(), - replicaRequest.primaryRefreshResult.generation(), - false - ); + var primaryTerm = replicaRequest.primaryRefreshResult.primaryTerm(); + assert Engine.UNKNOWN_PRIMARY_TERM < primaryTerm : primaryTerm; + + var generation = replicaRequest.primaryRefreshResult.generation(); + assert Engine.RefreshResult.UNKNOWN_GENERATION < generation : generation; + transportService.sendRequest( transportService.getLocalNode(), TransportUnpromotableShardRefreshAction.NAME, - unpromotableReplicaRequest, + new UnpromotableShardRefreshRequest(indexShardRoutingTable, primaryTerm, generation, false), new ActionListenerResponseHandler<>(listener.safeMap(r -> null), in -> ActionResponse.Empty.INSTANCE, refreshExecutor) ); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java index 0fcc8e1115209..3f9e0cbf299e6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; @@ -51,6 +52,7 @@ public class ResolveClusterActionRequest extends ActionRequest implements Indice */ private boolean localIndicesRequested = false; private IndicesOptions indicesOptions; + private TimeValue timeout; // true if the user did not provide any index expression - they only want cluster level info, not index matching private final boolean clusterInfoOnly; @@ -89,6 +91,9 @@ public ResolveClusterActionRequest(StreamInput in) throws IOException { this.clusterInfoOnly = false; this.isQueryingCluster = false; } + if (in.getTransportVersion().onOrAfter(TransportVersions.TIMEOUT_GET_PARAM_FOR_RESOLVE_CLUSTER)) { + this.timeout = in.readOptionalTimeValue(); + } } @Override @@ -103,6 +108,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(clusterInfoOnly); out.writeBoolean(isQueryingCluster); } + if (out.getTransportVersion().onOrAfter(TransportVersions.TIMEOUT_GET_PARAM_FOR_RESOLVE_CLUSTER)) { + out.writeOptionalTimeValue(timeout); + } } static String createVersionErrorMessage(TransportVersion versionFound) { @@ -124,12 +132,14 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ResolveClusterActionRequest request = (ResolveClusterActionRequest) o; - return Arrays.equals(names, request.names) && indicesOptions.equals(request.indicesOptions()); + return Arrays.equals(names, request.names) + && indicesOptions.equals(request.indicesOptions()) + && Objects.equals(timeout, request.timeout); } @Override public int hashCode() { - int result = Objects.hash(indicesOptions); + int result = Objects.hash(indicesOptions, timeout); result = 31 * result + Arrays.hashCode(names); return result; } @@ -139,6 +149,10 @@ public String[] indices() { return names; } + public TimeValue getTimeout() { + return timeout; + } + public boolean clusterInfoOnly() { return clusterInfoOnly; } @@ -202,6 +216,10 @@ boolean localIndicesPresent(String[] indices) { return false; } + public void setTimeout(TimeValue timeout) { + this.timeout = timeout; + } + @Override public String toString() { return "ResolveClusterActionRequest{" diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java index 0f1b77af0242e..d61901b246d05 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java @@ -646,10 +646,6 @@ private static void enrichIndexAbstraction( : switch (resolvedExpression.selector()) { case DATA -> dataStream.getDataComponent().getIndices().stream(); case FAILURES -> dataStream.getFailureIndices().stream(); - case ALL_APPLICABLE -> Stream.concat( - dataStream.getIndices().stream(), - dataStream.getFailureIndices().stream() - ); }; String[] backingIndices = dataStreamIndices.map(Index::getName).toArray(String[]::new); dataStreams.add(new ResolvedDataStream(dataStream.getName(), backingIndices, DataStream.TIMESTAMP_FIELD_NAME)); @@ -670,13 +666,6 @@ private static Stream getAliasIndexStream(ResolvedExpression resolvedExpr assert ia.isDataStreamRelated() : "Illegal selector [failures] used on non data stream alias"; yield ia.getFailureIndices(metadata).stream(); } - case ALL_APPLICABLE -> { - if (ia.isDataStreamRelated()) { - yield Stream.concat(ia.getIndices().stream(), ia.getFailureIndices(metadata).stream()); - } else { - yield ia.getIndices().stream(); - } - } }; } return aliasIndices; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterAction.java index 9d82b1edff0a9..32ab8bf1220ff 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.ListenerTimeouts; import org.elasticsearch.action.support.RefCountingRunnable; import org.elasticsearch.client.internal.RemoteClusterClient; import org.elasticsearch.cluster.ClusterState; @@ -29,12 +30,14 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Strings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.search.SearchService; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.TransportService; @@ -60,12 +63,14 @@ public class TransportResolveClusterAction extends HandledTransportAction resultsListener; + TimeValue timeout = request.getTimeout(); + // Wrap the listener with a timeout since a timeout was specified. + if (timeout != null) { + var releaserListener = ActionListener.releaseAfter(remoteListener, refs.acquire()); + resultsListener = ListenerTimeouts.wrapWithTimeout( + threadPool, + timeout, + searchCoordinationExecutor, + releaserListener, + ignored -> releaserListener.onFailure(new ConnectTransportException(null, REMOTE_CONNECTION_TIMEOUT_ERROR)) + ); + } else { + resultsListener = ActionListener.releaseAfter(remoteListener, refs.acquire()); + } + + remoteClusterClient.execute(TransportResolveClusterAction.REMOTE_TYPE, remoteRequest, resultsListener); } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java index 608d32d50a856..cb46d039c5b3b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java @@ -13,14 +13,13 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.support.ActiveShardCount; -import org.elasticsearch.action.support.IndexComponentSelector; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.SelectorResolver; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; @@ -126,14 +125,12 @@ public ActionRequestValidationException validate() { ); } + // Ensure we have a valid selector in the request if (rolloverTarget != null) { - ResolvedExpression resolvedExpression = SelectorResolver.parseExpression(rolloverTarget, indicesOptions); - IndexComponentSelector selector = resolvedExpression.selector(); - if (IndexComponentSelector.ALL_APPLICABLE.equals(selector)) { - validationException = addValidationError( - "rollover cannot be applied to both regular and failure indices at the same time", - validationException - ); + try { + SelectorResolver.parseExpression(rolloverTarget, indicesOptions); + } catch (InvalidIndexNameException exception) { + validationException = addValidationError(exception.getMessage(), validationException); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index e01f364712676..10d755c75b1c4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -22,7 +22,6 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -145,7 +144,7 @@ protected ShardValidateQueryResponse readShardResponse(StreamInput in) throws IO } @Override - protected GroupShardsIterator shards(ClusterState clusterState, ValidateQueryRequest request, String[] concreteIndices) { + protected List shards(ClusterState clusterState, ValidateQueryRequest request, String[] concreteIndices) { final String routing; if (request.allShards()) { routing = null; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestModifier.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestModifier.java index 282e4d33fb83b..d168a5c0040b6 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestModifier.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestModifier.java @@ -179,7 +179,7 @@ private void assertResponsesAreCorrect(BulkItemResponse[] bulkResponses, BulkIte * @param slot the slot in the bulk request to mark as failed. * @param e the failure encountered. */ - synchronized void markItemAsFailed(int slot, Exception e) { + synchronized void markItemAsFailed(int slot, Exception e, IndexDocFailureStoreStatus failureStoreStatus) { final DocWriteRequest docWriteRequest = bulkRequest.requests().get(slot); final String id = Objects.requireNonNullElse(docWriteRequest.id(), DROPPED_OR_FAILED_ITEM_WITH_AUTO_GENERATED_ID); // We hit a error during preprocessing a request, so we: @@ -187,7 +187,7 @@ synchronized void markItemAsFailed(int slot, Exception e) { // 2) Add a bulk item failure for this request // 3) Continue with the next request in the bulk. failedSlots.set(slot); - BulkItemResponse.Failure failure = new BulkItemResponse.Failure(docWriteRequest.index(), id, e); + BulkItemResponse.Failure failure = new BulkItemResponse.Failure(docWriteRequest.index(), id, e, failureStoreStatus); itemResponses.add(BulkItemResponse.failure(slot, docWriteRequest.opType(), failure)); } @@ -223,7 +223,7 @@ public void markItemForFailureStore(int slot, String targetIndexName, Exception assert false : "Attempting to route a failed write request type to a failure store but the failure store is not enabled! " + "This should be guarded against in TransportBulkAction#shouldStoreFailure()"; - markItemAsFailed(slot, e); + markItemAsFailed(slot, e, IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN); } else { // We get the index write request to find the source of the failed document IndexRequest indexRequest = TransportBulkAction.getIndexWriteRequest(bulkRequest.requests().get(slot)); @@ -238,7 +238,7 @@ public void markItemForFailureStore(int slot, String targetIndexName, Exception + "], index: [" + targetIndexName + "]"; - markItemAsFailed(slot, e); + markItemAsFailed(slot, e, IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN); logger.debug( () -> "Attempted to redirect an invalid write operation after ingest failure - type: [" + bulkRequest.requests().get(slot).getClass().getName() @@ -267,7 +267,7 @@ public void markItemForFailureStore(int slot, String targetIndexName, Exception + "]", ioException ); - markItemAsFailed(slot, e); + markItemAsFailed(slot, e, IndexDocFailureStoreStatus.FAILED); } } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 33c73898c0394..294ac41c8a16b 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -145,6 +145,15 @@ protected BulkShardResponse newResponseInstance(StreamInput in) throws IOExcepti return new BulkShardResponse(in); } + @Override + protected void shardOperationOnPrimary( + BulkShardRequest request, + IndexShard primary, + ActionListener> listener + ) { + primary.ensureMutable(listener.delegateFailure((l, ignored) -> super.shardOperationOnPrimary(request, primary, l))); + } + @Override protected void dispatchedShardOperationOnPrimary( BulkShardRequest request, diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/RequestDispatcher.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/RequestDispatcher.java index fce925d868532..15577632176f5 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/RequestDispatcher.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/RequestDispatcher.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -93,7 +92,7 @@ final class RequestDispatcher { this.onComplete = new RunOnce(onComplete); this.indexSelectors = ConcurrentCollections.newConcurrentMap(); for (String index : indices) { - final GroupShardsIterator shardIts; + final List shardIts; try { shardIts = clusterService.operationRouting().searchShards(clusterState, new String[] { index }, null, null); } catch (Exception e) { @@ -250,7 +249,7 @@ private static class IndexSelector { private final Set unmatchedShardIds = new HashSet<>(); private final Map failures = new HashMap<>(); - IndexSelector(GroupShardsIterator shardIts) { + IndexSelector(List shardIts) { for (ShardIterator shardIt : shardIts) { for (ShardRouting shard : shardIt) { nodeToShards.computeIfAbsent(shard.currentNodeId(), node -> new ArrayList<>()).add(shard); diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index 29b926598ac32..96317069b40f1 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; @@ -28,7 +29,6 @@ import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.PlainShardIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; @@ -108,7 +108,7 @@ protected ShardIterator shards(ClusterState state, InternalRequest request) { if (iterator == null) { return null; } - return PlainShardIterator.allSearchableShards(iterator); + return ShardIterator.allSearchableShards(iterator); } @Override @@ -231,7 +231,10 @@ private void getFromTranslog( final var retryingListener = listener.delegateResponse((l, e) -> { final var cause = ExceptionsHelper.unwrapCause(e); logger.debug("get_from_translog failed", cause); - if (cause instanceof ShardNotFoundException || cause instanceof IndexNotFoundException) { + if (cause instanceof ShardNotFoundException + || cause instanceof IndexNotFoundException + || cause instanceof AlreadyClosedException) { + // TODO AlreadyClosedException the engine reset should be fixed by ES-10826 logger.debug("retrying get_from_translog"); observer.waitForNextChange(new ClusterStateObserver.Listener() { @Override @@ -246,7 +249,13 @@ public void onClusterServiceClose() { @Override public void onTimeout(TimeValue timeout) { - l.onFailure(new ElasticsearchException("Timed out retrying get_from_translog", cause)); + // TODO AlreadyClosedException the engine reset should be fixed by ES-10826 + if (cause instanceof AlreadyClosedException) { + // Do an additional retry just in case AlreadyClosedException didn't generate a cluster update + tryGetFromTranslog(request, indexShard, node, l); + } else { + l.onFailure(new ElasticsearchException("Timed out retrying get_from_translog", cause)); + } } }); } else { diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetFromTranslogAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetFromTranslogAction.java index 3cbd7497dcf39..6fc1ff5300101 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetFromTranslogAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetFromTranslogAction.java @@ -81,7 +81,7 @@ protected void doExecute(Task task, Request request, ActionListener li if (engine == null) { throw new AlreadyClosedException("engine closed"); } - segmentGeneration = ((InternalEngine) engine).getLastUnsafeSegmentGenerationForGets(); + segmentGeneration = engine.getLastUnsafeSegmentGenerationForGets(); } return new Response(result, indexShard.getOperationPrimaryTerm(), segmentGeneration); }); diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index d9a04acc0466e..3a66db14decdb 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; @@ -28,7 +29,6 @@ import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.PlainShardIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; @@ -112,7 +112,7 @@ protected ShardIterator shards(ClusterState state, InternalRequest request) { if (iterator == null) { return null; } - return PlainShardIterator.allSearchableShards(iterator); + return ShardIterator.allSearchableShards(iterator); } @Override @@ -212,7 +212,10 @@ private void shardMultiGetFromTranslog( final var retryingListener = listener.delegateResponse((l, e) -> { final var cause = ExceptionsHelper.unwrapCause(e); logger.debug("mget_from_translog[shard] failed", cause); - if (cause instanceof ShardNotFoundException || cause instanceof IndexNotFoundException) { + if (cause instanceof ShardNotFoundException + || cause instanceof IndexNotFoundException + || cause instanceof AlreadyClosedException) { + // TODO AlreadyClosedException the engine reset should be fixed by ES-10826 logger.debug("retrying mget_from_translog[shard]"); observer.waitForNextChange(new ClusterStateObserver.Listener() { @Override @@ -227,7 +230,13 @@ public void onClusterServiceClose() { @Override public void onTimeout(TimeValue timeout) { - l.onFailure(new ElasticsearchException("Timed out retrying mget_from_translog[shard]", cause)); + // TODO AlreadyClosedException the engine reset should be fixed by ES-10826 + if (cause instanceof AlreadyClosedException) { + // Do an additional retry just in case AlreadyClosedException didn't generate a cluster update + tryShardMultiGetFromTranslog(request, indexShard, node, l); + } else { + l.onFailure(new ElasticsearchException("Timed out retrying mget_from_translog[shard]", cause)); + } } }); } else { @@ -313,7 +322,7 @@ private void getAndAddToResponse( MultiGetRequest.Item item = request.items.get(location); try { GetResult getResult = indexShard.getService() - .get( + .mget( item.id(), item.storedFields(), request.realtime(), diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetFomTranslogAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetFomTranslogAction.java index e953ff527f637..ec0b5c6cf143f 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetFomTranslogAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetFomTranslogAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; @@ -102,7 +101,7 @@ protected void doExecute(Task task, Request request, ActionListener li if (engine == null) { throw new AlreadyClosedException("engine closed"); } - segmentGeneration = ((InternalEngine) engine).getLastUnsafeSegmentGenerationForGets(); + segmentGeneration = engine.getLastUnsafeSegmentGenerationForGets(); } return new Response(multiGetShardResponse, indexShard.getOperationPrimaryTerm(), segmentGeneration); }); diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index aeea0a5d65c8a..e8693352270c2 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.util.Maps; @@ -60,9 +59,9 @@ import static org.elasticsearch.core.Strings.format; /** - * This is an abstract base class that encapsulates the logic to fan out to all shards in provided {@link GroupShardsIterator} + * This is an abstract base class that encapsulates the logic to fan out to all shards in provided {@link List} * and collect the results. If a shard request returns a failure this class handles the advance to the next replica of the shard until - * the shards replica iterator is exhausted. Each shard is referenced by position in the {@link GroupShardsIterator} which is later + * the shards replica iterator is exhausted. Each shard is referenced by position in the {@link List} which is later * referred to as the {@code shardIndex}. * The fan out and collect algorithm is traditionally used as the initial phase which can either be a query execution or collection of * distributed frequencies @@ -90,15 +89,13 @@ abstract class AbstractSearchAsyncAction exten private final Object shardFailuresMutex = new Object(); private final AtomicBoolean hasShardResponse = new AtomicBoolean(false); private final AtomicInteger successfulOps = new AtomicInteger(); - private final AtomicInteger skippedOps = new AtomicInteger(); private final SearchTimeProvider timeProvider; private final SearchResponse.Clusters clusters; - protected final GroupShardsIterator toSkipShardsIts; - protected final GroupShardsIterator shardsIts; + protected final List toSkipShardsIts; + protected final List shardsIts; private final SearchShardIterator[] shardIterators; - private final int expectedTotalOps; - private final AtomicInteger totalOps = new AtomicInteger(); + private final AtomicInteger outstandingShards; private final int maxConcurrentRequestsPerNode; private final Map pendingExecutionsPerNode = new ConcurrentHashMap<>(); private final boolean throttleConcurrentRequests; @@ -118,7 +115,7 @@ abstract class AbstractSearchAsyncAction exten Executor executor, SearchRequest request, ActionListener listener, - GroupShardsIterator shardsIts, + List shardsIts, SearchTimeProvider timeProvider, ClusterState clusterState, SearchTask task, @@ -137,20 +134,14 @@ abstract class AbstractSearchAsyncAction exten iterators.add(iterator); } } - this.toSkipShardsIts = new GroupShardsIterator<>(toSkipIterators); - this.shardsIts = new GroupShardsIterator<>(iterators); - + this.toSkipShardsIts = toSkipIterators; + this.shardsIts = iterators; + outstandingShards = new AtomicInteger(shardsIts.size()); this.shardIterators = iterators.toArray(new SearchShardIterator[0]); // we later compute the shard index based on the natural order of the shards // that participate in the search request. This means that this number is // consistent between two requests that target the same shards. Arrays.sort(shardIterators); - - // we need to add 1 for non active partition, since we count it in the total. This means for each shard in the iterator we sum up - // it's number of active shards but use 1 as the default if no replica of a shard is active at this point. - // on a per shards level we use shardIt.remaining() to increment the totalOps pointer but add 1 for the current shard result - // we process hence we add one for the non active partition here. - this.expectedTotalOps = shardsIts.totalSizeWith1ForEmpty(); this.maxConcurrentRequestsPerNode = maxConcurrentRequestsPerNode; // in the case were we have less shards than maxConcurrentRequestsPerNode we don't need to throttle this.throttleConcurrentRequests = maxConcurrentRequestsPerNode < shardsIts.size(); @@ -179,8 +170,8 @@ protected void notifyListShards( SearchSourceBuilder sourceBuilder ) { progressListener.notifyListShards( - SearchProgressListener.buildSearchShards(this.shardsIts), - SearchProgressListener.buildSearchShards(toSkipShardsIts), + SearchProgressListener.buildSearchShardsFromIter(this.shardsIts), + SearchProgressListener.buildSearchShardsFromIter(toSkipShardsIts), clusters, sourceBuilder == null || sourceBuilder.size() > 0, timeProvider @@ -251,35 +242,8 @@ protected final void run() { void skipShard(SearchShardIterator iterator) { successfulOps.incrementAndGet(); - skippedOps.incrementAndGet(); assert iterator.skip(); - successfulShardExecution(iterator); - } - - private static boolean assertExecuteOnStartThread() { - // Ensure that the current code has the following stacktrace: - // AbstractSearchAsyncAction#start -> AbstractSearchAsyncAction#executePhase -> AbstractSearchAsyncAction#performPhaseOnShard - final StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); - assert stackTraceElements.length >= 6 : stackTraceElements; - int index = 0; - assert stackTraceElements[index++].getMethodName().equals("getStackTrace"); - assert stackTraceElements[index++].getMethodName().equals("assertExecuteOnStartThread"); - assert stackTraceElements[index++].getMethodName().equals("failOnUnavailable"); - if (stackTraceElements[index].getMethodName().equals("performPhaseOnShard")) { - assert stackTraceElements[index].getClassName().endsWith("CanMatchPreFilterSearchPhase"); - index++; - } - assert stackTraceElements[index].getClassName().endsWith("AbstractSearchAsyncAction"); - assert stackTraceElements[index++].getMethodName().equals("run"); - - assert stackTraceElements[index].getClassName().endsWith("AbstractSearchAsyncAction"); - assert stackTraceElements[index++].getMethodName().equals("executePhase"); - - assert stackTraceElements[index].getClassName().endsWith("AbstractSearchAsyncAction"); - assert stackTraceElements[index++].getMethodName().equals("start"); - - assert stackTraceElements[index].getClassName().endsWith("AbstractSearchAsyncAction") == false; - return true; + successfulShardExecution(); } private void performPhaseOnShard(final int shardIndex, final SearchShardIterator shardIt, final SearchShardTarget shard) { @@ -300,7 +264,7 @@ private void doPerformPhaseOnShard(int shardIndex, SearchShardIterator shardIt, public void innerOnResponse(Result result) { try { releasable.close(); - onShardResult(result, shardIt); + onShardResult(result); } catch (Exception exc) { onShardFailure(shardIndex, shard, shardIt, exc); } @@ -323,7 +287,6 @@ public void onFailure(Exception e) { } private void failOnUnavailable(int shardIndex, SearchShardIterator shardIt) { - assert assertExecuteOnStartThread(); SearchShardTarget unassignedShard = new SearchShardTarget(null, shardIt.shardId(), shardIt.getClusterAlias()); onShardFailure(shardIndex, unassignedShard, shardIt, new NoShardAvailableActionException(shardIt.shardId())); } @@ -380,7 +343,7 @@ protected void executeNextPhase(String currentPhase, Supplier nextP "Partial shards failure (unavailable: {}, successful: {}, skipped: {}, num-shards: {}, phase: {})", discrepancy, successfulOps.get(), - skippedOps.get(), + toSkipShardsIts.size(), getNumShards(), currentPhase ); @@ -449,17 +412,14 @@ private void onShardFailure(final int shardIndex, SearchShardTarget shard, final } onShardGroupFailure(shardIndex, shard, e); } - final int totalOps = this.totalOps.incrementAndGet(); - if (totalOps == expectedTotalOps) { - onPhaseDone(); - } else if (totalOps > expectedTotalOps) { - throw new AssertionError( - "unexpected higher total ops [" + totalOps + "] compared to expected [" + expectedTotalOps + "]", - new SearchPhaseExecutionException(getName(), "Shard failures", null, buildShardFailures()) - ); + if (lastShard == false) { + performPhaseOnShard(shardIndex, shardIt, nextShard); } else { - if (lastShard == false) { - performPhaseOnShard(shardIndex, shardIt, nextShard); + // count down outstanding shards, we're done with this shard as there's no more copies to try + final int outstanding = outstandingShards.decrementAndGet(); + assert outstanding >= 0 : "outstanding: " + outstanding; + if (outstanding == 0) { + onPhaseDone(); } } } @@ -526,19 +486,18 @@ private static boolean isTaskCancelledException(Exception e) { /** * Executed once for every successful shard level request. * @param result the result returned form the shard - * @param shardIt the shard iterator */ - protected void onShardResult(Result result, SearchShardIterator shardIt) { + protected void onShardResult(Result result) { assert result.getShardIndex() != -1 : "shard index is not set"; assert result.getSearchShardTarget() != null : "search shard target must not be null"; hasShardResponse.set(true); if (logger.isTraceEnabled()) { logger.trace("got first-phase result from {}", result != null ? result.getSearchShardTarget() : null); } - results.consumeResult(result, () -> onShardResultConsumed(result, shardIt)); + results.consumeResult(result, () -> onShardResultConsumed(result)); } - private void onShardResultConsumed(Result result, SearchShardIterator shardIt) { + private void onShardResultConsumed(Result result) { successfulOps.incrementAndGet(); // clean a previous error on this shard group (note, this code will be serialized on the same shardIndex value level // so its ok concurrency wise to miss potentially the shard failures being created because of another failure @@ -552,28 +511,14 @@ private void onShardResultConsumed(Result result, SearchShardIterator shardIt) { // cause the successor to read a wrong value from successfulOps if second phase is very fast ie. count etc. // increment all the "future" shards to update the total ops since we some may work and some may not... // and when that happens, we break on total ops, so we must maintain them - successfulShardExecution(shardIt); + successfulShardExecution(); } - private void successfulShardExecution(SearchShardIterator shardsIt) { - final int remainingOpsOnIterator; - if (shardsIt.skip()) { - // It's possible that we're skipping a shard that's unavailable - // but its range was available in the IndexMetadata, in that - // case the shardsIt.remaining() would be 0, expectedTotalOps - // accounts for unavailable shards too. - remainingOpsOnIterator = Math.max(shardsIt.remaining(), 1); - } else { - remainingOpsOnIterator = shardsIt.remaining() + 1; - } - final int xTotalOps = totalOps.addAndGet(remainingOpsOnIterator); - if (xTotalOps == expectedTotalOps) { + private void successfulShardExecution() { + final int outstanding = outstandingShards.decrementAndGet(); + assert outstanding >= 0 : "outstanding: " + outstanding; + if (outstanding == 0) { onPhaseDone(); - } else if (xTotalOps > expectedTotalOps) { - throw new AssertionError( - "unexpected higher total ops [" + xTotalOps + "] compared to expected [" + expectedTotalOps + "]", - new SearchPhaseExecutionException(getName(), "Shard failures", null, buildShardFailures()) - ); } } @@ -640,7 +585,7 @@ private SearchResponse buildSearchResponse( scrollId, getNumShards(), numSuccess, - skippedOps.get(), + toSkipShardsIts.size(), buildTookInMillis(), failures, clusters, @@ -732,7 +677,7 @@ void sendReleaseSearchContext(ShardSearchContextId contextId, Transport.Connecti /** * Executed once all shard results have been received and processed * @see #onShardFailure(int, SearchShardTarget, Exception) - * @see #onShardResult(SearchPhaseResult, SearchShardIterator) + * @see #onShardResult(SearchPhaseResult) */ private void onPhaseDone() { // as a tribute to @kimchy aka. finishHim() executeNextPhase(getName(), this::getNextPhase); diff --git a/server/src/main/java/org/elasticsearch/action/search/CanMatchNodeRequest.java b/server/src/main/java/org/elasticsearch/action/search/CanMatchNodeRequest.java index 7890a0f9f9738..17c272d75d014 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CanMatchNodeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/CanMatchNodeRequest.java @@ -136,7 +136,7 @@ public CanMatchNodeRequest( ) { this.source = getCanMatchSource(searchRequest); this.indicesOptions = indicesOptions; - this.shards = new ArrayList<>(shards); + this.shards = shards; this.searchType = searchRequest.searchType(); this.requestCache = searchRequest.requestCache(); // If allowPartialSearchResults is unset (ie null), the cluster-level default should have been substituted diff --git a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java index d45a8a6f01cd1..5e67a1068a1be 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java @@ -12,7 +12,6 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.FixedBitSet; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.CountDown; @@ -41,7 +40,6 @@ import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.function.BiFunction; -import java.util.stream.Collectors; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.core.Types.forciblyCast; @@ -61,8 +59,8 @@ final class CanMatchPreFilterSearchPhase { private final Logger logger; private final SearchRequest request; - private final GroupShardsIterator shardsIts; - private final ActionListener> listener; + private final List shardsIts; + private final ActionListener> listener; private final TransportSearchAction.SearchTimeProvider timeProvider; private final BiFunction nodeIdToConnection; private final SearchTransportService searchTransportService; @@ -86,12 +84,12 @@ final class CanMatchPreFilterSearchPhase { Map concreteIndexBoosts, Executor executor, SearchRequest request, - GroupShardsIterator shardsIts, + List shardsIts, TransportSearchAction.SearchTimeProvider timeProvider, SearchTask task, boolean requireAtLeastOneMatch, CoordinatorRewriteContextProvider coordinatorRewriteContextProvider, - ActionListener> listener + ActionListener> listener ) { this.logger = logger; this.searchTransportService = searchTransportService; @@ -169,10 +167,9 @@ private void runCoordinatorRewritePhase() { if (matchedShardLevelRequests.isEmpty()) { finishPhase(); } else { - GroupShardsIterator matchingShards = new GroupShardsIterator<>(matchedShardLevelRequests); // verify missing shards only for the shards that we hit for the query - checkNoMissingShards(matchingShards); - new Round(matchingShards).run(); + checkNoMissingShards(matchedShardLevelRequests); + new Round(matchedShardLevelRequests).run(); } } @@ -202,12 +199,12 @@ private synchronized void consumeResult(int shardIndex, boolean canMatch, MinAnd minAndMaxes[shardIndex] = minAndMax; } - private void checkNoMissingShards(GroupShardsIterator shards) { + private void checkNoMissingShards(List shards) { assert assertSearchCoordinationThread(); SearchPhase.doCheckNoMissingShards("can_match", request, shards, SearchPhase::makeMissingShardsError); } - private Map> groupByNode(GroupShardsIterator shards) { + private Map> groupByNode(List shards) { Map> requests = new HashMap<>(); for (int i = 0; i < shards.size(); i++) { final SearchShardIterator shardRoutings = shards.get(i); @@ -230,11 +227,11 @@ private Map> groupByNode(GroupShardsIte * to retry on other available shard copies. */ class Round extends AbstractRunnable { - private final GroupShardsIterator shards; + private final List shards; private final CountDown countDown; private final AtomicReferenceArray failedResponses; - Round(GroupShardsIterator shards) { + Round(List shards) { this.shards = shards; this.countDown = new CountDown(shards.size()); this.failedResponses = new AtomicReferenceArray<>(shardsIts.size()); @@ -328,7 +325,7 @@ private void finishRound() { finishPhase(); } else { // trigger another round, forcing execution - executor.execute(new Round(new GroupShardsIterator<>(remainingShards)) { + executor.execute(new Round(remainingShards) { @Override public boolean isForceExecution() { return true; @@ -350,10 +347,7 @@ private record SendingTarget(@Nullable String clusterAlias, @Nullable String nod private CanMatchNodeRequest createCanMatchRequest(Map.Entry> entry) { final SearchShardIterator first = entry.getValue().get(0); - final List shardLevelRequests = entry.getValue() - .stream() - .map(this::buildShardLevelRequest) - .collect(Collectors.toCollection(ArrayList::new)); + final List shardLevelRequests = entry.getValue().stream().map(this::buildShardLevelRequest).toList(); assert entry.getValue().stream().allMatch(Objects::nonNull); assert entry.getValue() .stream() @@ -419,7 +413,7 @@ private void onPhaseFailure(String msg, Exception cause) { listener.onFailure(new SearchPhaseExecutionException("can_match", msg, cause, ShardSearchFailure.EMPTY_ARRAY)); } - private synchronized GroupShardsIterator getIterator(GroupShardsIterator shardsIts) { + private synchronized List getIterator(List shardsIts) { // TODO: pick the local shard when possible if (requireAtLeastOneMatch && numPossibleMatches == 0) { // this is a special case where we have no hit but we need to get at least one search response in order @@ -452,14 +446,10 @@ private synchronized GroupShardsIterator getIterator(GroupS return shardsIts; } FieldSortBuilder fieldSort = FieldSortBuilder.getPrimaryFieldSortOrNull(request.source()); - return new GroupShardsIterator<>(sortShards(shardsIts, minAndMaxes, fieldSort.order())); + return sortShards(shardsIts, minAndMaxes, fieldSort.order()); } - private static List sortShards( - GroupShardsIterator shardsIts, - MinAndMax[] minAndMaxes, - SortOrder order - ) { + private static List sortShards(List shardsIts, MinAndMax[] minAndMaxes, SortOrder order) { int bound = shardsIts.size(); List toSort = new ArrayList<>(bound); for (int i = 0; i < bound; i++) { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index 5c5c47b5fcc44..056806fbb0b00 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -20,7 +20,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; @@ -56,7 +55,7 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction SearchPhaseResults queryPhaseResultConsumer, SearchRequest request, ActionListener listener, - GroupShardsIterator shardsIts, + List shardsIts, TransportSearchAction.SearchTimeProvider timeProvider, ClusterState clusterState, SearchTask task, diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java index 702369dc38390..1308a2fb61cfb 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java @@ -8,11 +8,11 @@ */ package org.elasticsearch.action.search; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.transport.Transport; +import java.util.List; import java.util.Objects; import java.util.function.Function; @@ -45,14 +45,14 @@ protected static String makeMissingShardsError(StringBuilder missingShards) { + "]. Consider using `allow_partial_search_results` setting to bypass this error."; } - protected void doCheckNoMissingShards(String phaseName, SearchRequest request, GroupShardsIterator shardsIts) { + protected void doCheckNoMissingShards(String phaseName, SearchRequest request, List shardsIts) { doCheckNoMissingShards(phaseName, request, shardsIts, this::missingShardsErrorMessage); } protected static void doCheckNoMissingShards( String phaseName, SearchRequest request, - GroupShardsIterator shardsIts, + List shardsIts, Function makeErrorMessage ) { assert request.allowPartialSearchResults() != null : "SearchRequest missing setting for allowPartialSearchResults"; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java b/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java index a7f92700435a4..6016a0c7a1eba 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java @@ -13,7 +13,6 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.search.SearchResponse.Clusters; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -21,7 +20,6 @@ import java.util.List; import java.util.Objects; -import java.util.stream.StreamSupport; /** * A listener that allows to track progress of the {@link TransportSearchAction}. @@ -225,7 +223,7 @@ static List buildSearchShards(List res .toList(); } - static List buildSearchShards(GroupShardsIterator its) { - return StreamSupport.stream(its.spliterator(), false).map(e -> new SearchShard(e.getClusterAlias(), e.shardId())).toList(); + static List buildSearchShardsFromIter(List its) { + return its.stream().map(e -> new SearchShard(e.getClusterAlias(), e.shardId())).toList(); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java index f75b84abc2f0f..1f595f47dc489 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; @@ -25,6 +24,7 @@ import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.transport.Transport; +import java.util.List; import java.util.Map; import java.util.concurrent.Executor; import java.util.function.BiFunction; @@ -52,7 +52,7 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction resultConsumer, SearchRequest request, ActionListener listener, - GroupShardsIterator shardsIts, + List shardsIts, TransportSearchAction.SearchTimeProvider timeProvider, ClusterState clusterState, SearchTask task, @@ -104,7 +104,7 @@ protected void onShardGroupFailure(int shardIndex, SearchShardTarget shardTarget } @Override - protected void onShardResult(SearchPhaseResult result, SearchShardIterator shardIt) { + protected void onShardResult(SearchPhaseResult result) { QuerySearchResult queryResult = result.queryResult(); if (queryResult.isNull() == false // disable sort optims for scroll requests because they keep track of the last bottom doc locally (per shard) @@ -123,7 +123,7 @@ && getRequest().scroll() == null } bottomSortCollector.consumeTopDocs(topDocs, queryResult.sortValueFormats()); } - super.onShardResult(result, shardIt); + super.onShardResult(result); } static SearchPhase nextPhase( diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java b/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java index b543ff922886f..bf020cbd309eb 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java @@ -10,9 +10,8 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.OriginalIndices; -import org.elasticsearch.cluster.routing.PlainShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.common.util.Countable; +import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.common.util.PlainIterator; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; @@ -24,12 +23,12 @@ import java.util.Objects; /** - * Extension of {@link PlainShardIterator} used in the search api, which also holds the {@link OriginalIndices} + * Iterator for shards used in the search api, which also holds the {@link OriginalIndices} * of the search request (useful especially with cross-cluster search, as each cluster has its own set of original indices) as well as * the cluster alias. * @see OriginalIndices */ -public final class SearchShardIterator implements Comparable, Countable { +public final class SearchShardIterator implements Comparable { private final OriginalIndices originalIndices; private final String clusterAlias; @@ -42,7 +41,7 @@ public final class SearchShardIterator implements Comparable targetNodesIterator; /** - * Creates a {@link PlainShardIterator} instance that iterates over a subset of the given shards + * Creates a {@link SearchShardIterator} instance that iterates over a subset of the given shards * this the a given shardId. * * @param clusterAlias the alias of the cluster where the shard is located @@ -55,7 +54,7 @@ public SearchShardIterator(@Nullable String clusterAlias, ShardId shardId, List< } /** - * Creates a {@link PlainShardIterator} instance that iterates over a subset of the given shards + * Creates a {@link SearchShardIterator} instance that iterates over a subset of the given shards * * @param clusterAlias the alias of the cluster where the shard is located * @param shardId shard id of the group @@ -103,6 +102,9 @@ public String getClusterAlias() { return clusterAlias; } + /** + * Returns the next shard, or {@code null} if none available. + */ SearchShardTarget nextOrNull() { final String nodeId = targetNodesIterator.nextOrNull(); if (nodeId != null) { @@ -111,6 +113,11 @@ SearchShardTarget nextOrNull() { return null; } + /** + * Return the number of shards remaining in this {@link ShardsIterator} + * + * @return number of shard remaining + */ int remaining() { return targetNodesIterator.remaining(); } @@ -130,6 +137,9 @@ List getTargetNodeIds() { return targetNodesIterator.asList(); } + /** + * Resets the iterator to its initial state. + */ void reset() { targetNodesIterator.reset(); } @@ -155,11 +165,18 @@ boolean prefiltered() { return prefiltered; } - @Override + /** + * The number of shard routing instances. + * + * @return number of shard routing instances in this iterator + */ public int size() { return targetNodesIterator.size(); } + /** + * The shard id this group relates to. + */ ShardId shardId() { return shardId; } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java index 6c95a3c8fd436..b8d0a928e05aa 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -49,6 +48,7 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.List; import java.util.Map; import java.util.concurrent.Executor; import java.util.function.BiFunction; @@ -150,7 +150,7 @@ public void runNewSearchPhase( SearchTask task, SearchRequest searchRequest, Executor executor, - GroupShardsIterator shardIterators, + List shardIterators, TransportSearchAction.SearchTimeProvider timeProvider, BiFunction connectionLookup, ClusterState clusterState, @@ -212,7 +212,7 @@ void runOpenPointInTimePhase( SearchTask task, SearchRequest searchRequest, Executor executor, - GroupShardsIterator shardIterators, + List shardIterators, TransportSearchAction.SearchTimeProvider timeProvider, BiFunction connectionLookup, ClusterState clusterState, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 4b13d1642b600..6f075c6f35009 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; @@ -41,7 +42,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; @@ -1286,7 +1286,7 @@ private void executeSearch( ); } } - final GroupShardsIterator shardIterators = mergeShardsIterators(localShardIterators, remoteShardIterators); + final List shardIterators = mergeShardsIterators(localShardIterators, remoteShardIterators); failIfOverShardCountLimit(clusterService, shardIterators.size()); @@ -1420,7 +1420,7 @@ private static boolean hasReadOnlyIndices(String[] indices, ClusterState cluster } // package private for testing - static GroupShardsIterator mergeShardsIterators( + static List mergeShardsIterators( List localShardIterators, List remoteShardIterators ) { @@ -1430,7 +1430,8 @@ static GroupShardsIterator mergeShardsIterators( } else { shards = CollectionUtils.concatLists(remoteShardIterators, localShardIterators); } - return GroupShardsIterator.sortAndCreate(shards); + CollectionUtil.timSort(shards); + return shards; } interface SearchPhaseProvider { @@ -1438,7 +1439,7 @@ void runNewSearchPhase( SearchTask task, SearchRequest searchRequest, Executor executor, - GroupShardsIterator shardIterators, + List shardIterators, SearchTimeProvider timeProvider, BiFunction connectionLookup, ClusterState clusterState, @@ -1462,7 +1463,7 @@ public void runNewSearchPhase( SearchTask task, SearchRequest searchRequest, Executor executor, - GroupShardsIterator shardIterators, + List shardIterators, SearchTimeProvider timeProvider, BiFunction connectionLookup, ClusterState clusterState, @@ -1855,7 +1856,7 @@ List getLocalShardsIterator( String[] concreteIndices ) { var routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), searchRequest.indices()); - GroupShardsIterator shardRoutings = clusterService.operationRouting() + List shardRoutings = clusterService.operationRouting() .searchShards( clusterState, concreteIndices, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java index 614a3e9cf22ae..83889b7cf752a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.search; +import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.RemoteClusterActionType; @@ -18,7 +19,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.Rewriteable; @@ -138,15 +138,14 @@ public void searchShards(Task task, SearchShardsRequest searchShardsRequest, Act concreteIndices ); String[] concreteIndexNames = Arrays.stream(concreteIndices).map(Index::getName).toArray(String[]::new); - GroupShardsIterator shardIts = GroupShardsIterator.sortAndCreate( - transportSearchAction.getLocalShardsIterator( - clusterState, - searchRequest, - searchShardsRequest.clusterAlias(), - indicesAndAliases, - concreteIndexNames - ) + List shardIts = transportSearchAction.getLocalShardsIterator( + clusterState, + searchRequest, + searchShardsRequest.clusterAlias(), + indicesAndAliases, + concreteIndexNames ); + CollectionUtil.timSort(shardIts); if (SearchService.canRewriteToMatchNone(searchRequest.source()) == false) { delegate.onResponse(new SearchShardsResponse(toGroups(shardIts), clusterState.nodes().getAllNodes(), aliasFilters)); } else { @@ -170,7 +169,7 @@ public void searchShards(Task task, SearchShardsRequest searchShardsRequest, Act ); } - private static List toGroups(GroupShardsIterator shardIts) { + private static List toGroups(List shardIts) { List groups = new ArrayList<>(shardIts.size()); for (SearchShardIterator shardIt : shardIts) { boolean skip = shardIt.skip(); diff --git a/server/src/main/java/org/elasticsearch/action/support/IndexComponentSelector.java b/server/src/main/java/org/elasticsearch/action/support/IndexComponentSelector.java index 910be151d1bf5..c2cc8d98cca37 100644 --- a/server/src/main/java/org/elasticsearch/action/support/IndexComponentSelector.java +++ b/server/src/main/java/org/elasticsearch/action/support/IndexComponentSelector.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.support; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -23,14 +24,11 @@ * We define as index components the two different sets of indices a data stream could consist of: * - DATA: represents the backing indices * - FAILURES: represent the failing indices - * - ALL: represents all available in this expression components, meaning if it's a data stream both backing and failure indices and if it's - * an index only the index itself. * Note: An index is its own DATA component, but it cannot have a FAILURE component. */ public enum IndexComponentSelector implements Writeable { DATA("data", (byte) 0), - FAILURES("failures", (byte) 1), - ALL_APPLICABLE("*", (byte) 2); + FAILURES("failures", (byte) 1); private final String key; private final byte id; @@ -75,7 +73,15 @@ public static IndexComponentSelector getByKey(String key) { } public static IndexComponentSelector read(StreamInput in) throws IOException { - return getById(in.readByte()); + byte id = in.readByte(); + if (in.getTransportVersion().onOrAfter(TransportVersions.REMOVE_ALL_APPLICABLE_SELECTOR) + || in.getTransportVersion().isPatchFrom(TransportVersions.REMOVE_ALL_APPLICABLE_SELECTOR_9_0) + || in.getTransportVersion().isPatchFrom(TransportVersions.REMOVE_ALL_APPLICABLE_SELECTOR_BACKPORT_8_X)) { + return getById(id); + } else { + // Legacy value ::*, converted to ::data + return id == 2 ? DATA : getById(id); + } } // Visible for testing @@ -95,10 +101,10 @@ public void writeTo(StreamOutput out) throws IOException { } public boolean shouldIncludeData() { - return this == ALL_APPLICABLE || this == DATA; + return this == DATA; } public boolean shouldIncludeFailures() { - return this == ALL_APPLICABLE || this == FAILURES; + return this == FAILURES; } } diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index 836eedd960f10..6fabf515ac75c 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -22,7 +22,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -35,6 +34,7 @@ import org.elasticsearch.transport.Transports; import java.io.IOException; +import java.util.List; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReferenceArray; @@ -108,7 +108,7 @@ protected void doExecuteForked(Task task, Request request, ActionListener shards(ClusterState clusterState, Request request, String[] concreteIndices); + protected abstract List shards(ClusterState clusterState, Request request, String[] concreteIndices); protected abstract ClusterBlockException checkGlobalBlock(ClusterState state, Request request); @@ -121,7 +121,7 @@ protected class AsyncBroadcastAction { final ActionListener listener; final ClusterState clusterState; final DiscoveryNodes nodes; - final GroupShardsIterator shardsIts; + final List shardsIts; final int expectedOps; final AtomicInteger counterOps = new AtomicInteger(); // ShardResponse or Exception diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableAction.java index 1255dbdf7419d..c44a0118111f3 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableAction.java @@ -76,7 +76,7 @@ protected TransportBroadcastUnpromotableAction( @Override protected void doExecute(Task task, Request request, ActionListener listener) { - final var unpromotableShards = request.indexShardRoutingTable.unpromotableShards(); + final var unpromotableShards = request.indexShardRoutingTable.assignedUnpromotableShards(); final var responses = new ArrayList(unpromotableShards.size()); try (var listeners = new RefCountingListener(listener.map(v -> combineUnpromotableShardResponses(responses)))) { diff --git a/server/src/main/java/org/elasticsearch/action/support/local/LocalClusterStateRequest.java b/server/src/main/java/org/elasticsearch/action/support/local/LocalClusterStateRequest.java index ec7c72c0ab428..2a42a3a8f7597 100644 --- a/server/src/main/java/org/elasticsearch/action/support/local/LocalClusterStateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/local/LocalClusterStateRequest.java @@ -41,12 +41,23 @@ protected LocalClusterStateRequest(TimeValue masterTimeout) { */ @UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION) protected LocalClusterStateRequest(StreamInput in) throws IOException { + this(in, true); + } + + /** + * This constructor exists solely for BwC purposes. It should exclusively be used by requests that used to extend + * {@link org.elasticsearch.action.support.master.MasterNodeRequest} and still need to be able to serialize incoming request. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION) + protected LocalClusterStateRequest(StreamInput in, boolean readLocal) throws IOException { super(in); masterTimeout = in.readTimeValue(); if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { in.readVLong(); } - in.readBoolean(); + if (readLocal) { + in.readBoolean(); + } } @Override diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java index 997d859ec35a2..cefb27376f9ea 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java @@ -65,7 +65,7 @@ public void onFailure(Exception e) { } }); case IMMEDIATE -> immediate(indexShard, listener.delegateFailureAndWrap((l, r) -> { - if (indexShard.getReplicationGroup().getRoutingTable().allUnpromotableShards().size() > 0) { + if (indexShard.getReplicationGroup().getRoutingTable().unpromotableShards().size() > 0) { sendUnpromotableRequests(indexShard, r.generation(), true, l, postWriteRefreshTimeout); } else { l.onResponse(true); diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java index 02479a9f8d143..8a2e9168c3bd8 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; @@ -65,13 +64,8 @@ protected ShardIterator shards(ClusterState state, InternalRequest request) { final var operationRouting = clusterService.operationRouting(); if (request.request().doc() != null && request.request().routing() == null) { // artificial document without routing specified, ignore its "id" and use either random shard or according to preference - GroupShardsIterator groupShardsIter = operationRouting.searchShards( - state, - new String[] { request.concreteIndex() }, - null, - request.request().preference() - ); - return groupShardsIter.iterator().next(); + return operationRouting.searchShards(state, new String[] { request.concreteIndex() }, null, request.request().preference()) + .getFirst(); } return operationRouting.useOnlyPromotableShardsForStateless( diff --git a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index ee84bcd15824d..bd76ef81d2c43 100644 --- a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.instance.TransportInstanceSingleOperationAction; import org.elasticsearch.client.internal.node.NodeClient; @@ -96,7 +97,10 @@ public TransportUpdateAction( @Override protected Executor executor(ShardId shardId) { - final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); + return executor(indicesService.indexServiceSafe(shardId.getIndex())); + } + + private Executor executor(IndexService indexService) { return threadPool.executor(indexService.getIndexSettings().getIndexMetadata().isSystem() ? Names.SYSTEM_WRITE : Names.WRITE); } @@ -189,136 +193,148 @@ protected void shardOperation(final UpdateRequest request, final ActionListener< final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); final IndexShard indexShard = indexService.getShard(shardId.getId()); final MappingLookup mappingLookup = indexShard.mapperService().mappingLookup(); - final UpdateHelper.Result result = deleteInferenceResults( - request, - updateHelper.prepare(request, indexShard, threadPool::absoluteTimeInMillis), - indexService.getMetadata(), - mappingLookup - ); - - switch (result.getResponseResult()) { - case CREATED -> { - IndexRequest upsertRequest = result.action(); - // we fetch it from the index request so we don't generate the bytes twice, its already done in the index request - final BytesReference upsertSourceBytes = upsertRequest.source(); - client.bulk( - toSingleItemBulkRequest(upsertRequest), - unwrappingSingleItemBulkResponse(ActionListener.wrap(response -> { - UpdateResponse update = new UpdateResponse( - response.getShardInfo(), - response.getShardId(), - response.getId(), - response.getSeqNo(), - response.getPrimaryTerm(), - response.getVersion(), - response.getResult() - ); - if (request.fetchSource() != null && request.fetchSource().fetchSource()) { - Tuple> sourceAndContent = XContentHelper.convertToMap( - upsertSourceBytes, - true, - upsertRequest.getContentType() - ); - update.setGetResult( - UpdateHelper.extractGetResult( - request, - request.concreteIndex(), - mappingLookup, + + var executor = executor(indexService); + assert ThreadPool.assertCurrentThreadPool(Names.SYSTEM_WRITE, Names.WRITE); + + SubscribableListener.newForked(indexShard::ensureMutable) + // Make sure to fork back to a `write` thread pool if necessary + .andThen(executor, threadPool.getThreadContext(), (l, unused) -> ActionListener.completeWith(l, () -> { + assert ThreadPool.assertCurrentThreadPool(Names.SYSTEM_WRITE, Names.WRITE); + return deleteInferenceResults( + request, + updateHelper.prepare(request, indexShard, threadPool::absoluteTimeInMillis), // Gets the doc using the engine + indexService.getMetadata(), + mappingLookup + ); + })) + // Proceed with a single item bulk request + .andThen((l, result) -> { + switch (result.getResponseResult()) { + case CREATED -> { + IndexRequest upsertRequest = result.action(); + // we fetch it from the index request so we don't generate the bytes twice, its already done in the index request + final BytesReference upsertSourceBytes = upsertRequest.source(); + client.bulk( + toSingleItemBulkRequest(upsertRequest), + unwrappingSingleItemBulkResponse(ActionListener.wrap(response -> { + UpdateResponse update = new UpdateResponse( + response.getShardInfo(), + response.getShardId(), + response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), - sourceAndContent.v2(), - sourceAndContent.v1(), - upsertSourceBytes - ) - ); - } else { - update.setGetResult(null); - } - update.setForcedRefresh(response.forcedRefresh()); - listener.onResponse(update); - }, exception -> handleUpdateFailureWithRetry(listener, request, exception, retryCount))) - ); - } - case UPDATED -> { - IndexRequest indexRequest = result.action(); - // we fetch it from the index request so we don't generate the bytes twice, its already done in the index request - final BytesReference indexSourceBytes = indexRequest.source(); - client.bulk( - toSingleItemBulkRequest(indexRequest), - unwrappingSingleItemBulkResponse(ActionListener.wrap(response -> { - UpdateResponse update = new UpdateResponse( - response.getShardInfo(), - response.getShardId(), - response.getId(), - response.getSeqNo(), - response.getPrimaryTerm(), - response.getVersion(), - response.getResult() - ); - update.setGetResult( - UpdateHelper.extractGetResult( - request, - request.concreteIndex(), - mappingLookup, - response.getSeqNo(), - response.getPrimaryTerm(), - response.getVersion(), - result.updatedSourceAsMap(), - result.updateSourceContentType(), - indexSourceBytes - ) + response.getResult() + ); + if (request.fetchSource() != null && request.fetchSource().fetchSource()) { + Tuple> sourceAndContent = XContentHelper.convertToMap( + upsertSourceBytes, + true, + upsertRequest.getContentType() + ); + update.setGetResult( + UpdateHelper.extractGetResult( + request, + request.concreteIndex(), + mappingLookup, + response.getSeqNo(), + response.getPrimaryTerm(), + response.getVersion(), + sourceAndContent.v2(), + sourceAndContent.v1(), + upsertSourceBytes + ) + ); + } else { + update.setGetResult(null); + } + update.setForcedRefresh(response.forcedRefresh()); + l.onResponse(update); + }, exception -> handleUpdateFailureWithRetry(l, request, exception, retryCount))) ); - update.setForcedRefresh(response.forcedRefresh()); - listener.onResponse(update); - }, exception -> handleUpdateFailureWithRetry(listener, request, exception, retryCount))) - ); - } - case DELETED -> { - DeleteRequest deleteRequest = result.action(); - client.bulk( - toSingleItemBulkRequest(deleteRequest), - unwrappingSingleItemBulkResponse(ActionListener.wrap(response -> { - UpdateResponse update = new UpdateResponse( - response.getShardInfo(), - response.getShardId(), - response.getId(), - response.getSeqNo(), - response.getPrimaryTerm(), - response.getVersion(), - response.getResult() + } + case UPDATED -> { + IndexRequest indexRequest = result.action(); + // we fetch it from the index request so we don't generate the bytes twice, its already done in the index request + final BytesReference indexSourceBytes = indexRequest.source(); + client.bulk( + toSingleItemBulkRequest(indexRequest), + unwrappingSingleItemBulkResponse(ActionListener.wrap(response -> { + UpdateResponse update = new UpdateResponse( + response.getShardInfo(), + response.getShardId(), + response.getId(), + response.getSeqNo(), + response.getPrimaryTerm(), + response.getVersion(), + response.getResult() + ); + update.setGetResult( + UpdateHelper.extractGetResult( + request, + request.concreteIndex(), + mappingLookup, + response.getSeqNo(), + response.getPrimaryTerm(), + response.getVersion(), + result.updatedSourceAsMap(), + result.updateSourceContentType(), + indexSourceBytes + ) + ); + update.setForcedRefresh(response.forcedRefresh()); + l.onResponse(update); + }, exception -> handleUpdateFailureWithRetry(l, request, exception, retryCount))) ); - update.setGetResult( - UpdateHelper.extractGetResult( - request, - request.concreteIndex(), - mappingLookup, - response.getSeqNo(), - response.getPrimaryTerm(), - response.getVersion(), - result.updatedSourceAsMap(), - result.updateSourceContentType(), - null - ) + } + case DELETED -> { + DeleteRequest deleteRequest = result.action(); + client.bulk( + toSingleItemBulkRequest(deleteRequest), + unwrappingSingleItemBulkResponse(ActionListener.wrap(response -> { + UpdateResponse update = new UpdateResponse( + response.getShardInfo(), + response.getShardId(), + response.getId(), + response.getSeqNo(), + response.getPrimaryTerm(), + response.getVersion(), + response.getResult() + ); + update.setGetResult( + UpdateHelper.extractGetResult( + request, + request.concreteIndex(), + mappingLookup, + response.getSeqNo(), + response.getPrimaryTerm(), + response.getVersion(), + result.updatedSourceAsMap(), + result.updateSourceContentType(), + null + ) + ); + update.setForcedRefresh(response.forcedRefresh()); + l.onResponse(update); + }, exception -> handleUpdateFailureWithRetry(l, request, exception, retryCount))) ); - update.setForcedRefresh(response.forcedRefresh()); - listener.onResponse(update); - }, exception -> handleUpdateFailureWithRetry(listener, request, exception, retryCount))) - ); - } - case NOOP -> { - UpdateResponse update = result.action(); - IndexService indexServiceOrNull = indicesService.indexService(shardId.getIndex()); - if (indexServiceOrNull != null) { - IndexShard shard = indexService.getShardOrNull(shardId.getId()); - if (shard != null) { - shard.noopUpdate(); } + case NOOP -> { + UpdateResponse update = result.action(); + IndexService indexServiceOrNull = indicesService.indexService(shardId.getIndex()); + if (indexServiceOrNull != null) { + IndexShard shard = indexService.getShardOrNull(shardId.getId()); + if (shard != null) { + shard.noopUpdate(); + } + } + l.onResponse(update); + } + default -> throw new IllegalStateException("Illegal result " + result.getResponseResult()); } - listener.onResponse(update); - } - default -> throw new IllegalStateException("Illegal result " + result.getResponseResult()); - } + }) + .addListener(listener); } private void handleUpdateFailureWithRetry( diff --git a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapUtil.java b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapUtil.java index 8f1537d917c15..f4363ce1948b9 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapUtil.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapUtil.java @@ -24,7 +24,7 @@ private BootstrapUtil() {} public static SecureSettings loadSecureSettings(Environment initialEnv, SecureString keystorePassword) throws BootstrapException { try { - return KeyStoreWrapper.bootstrap(initialEnv.configFile(), () -> keystorePassword); + return KeyStoreWrapper.bootstrap(initialEnv.configDir(), () -> keystorePassword); } catch (Exception e) { throw new BootstrapException(e); } diff --git a/server/src/main/java/org/elasticsearch/bootstrap/ConsoleLoader.java b/server/src/main/java/org/elasticsearch/bootstrap/ConsoleLoader.java index 7b85b369b5dd5..54244f320840a 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/ConsoleLoader.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/ConsoleLoader.java @@ -52,7 +52,7 @@ static Supplier buildConsoleLoader(ClassLoader classLoader) { } private static ClassLoader buildClassLoader(Environment env) { - final Path libDir = env.libFile().resolve("tools").resolve("ansi-console"); + final Path libDir = env.libDir().resolve("tools").resolve("ansi-console"); try (var libDirFilesStream = Files.list(libDir)) { final URL[] urls = libDirFilesStream.filter(each -> each.getFileName().toString().endsWith(".jar")) diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 311df05f9b07b..ba978f09dfef5 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -32,9 +32,9 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; -import org.elasticsearch.entitlement.runtime.policy.LoadNativeLibrariesEntitlement; import org.elasticsearch.entitlement.runtime.policy.Policy; import org.elasticsearch.entitlement.runtime.policy.PolicyParserUtils; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.jdk.JarHell; @@ -187,7 +187,7 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { nodeEnv.validateNativesConfig(); // temporary directories are important for JNA initializeNatives( - nodeEnv.tmpFile(), + nodeEnv.tmpDir(), BootstrapSettings.MEMORY_LOCK_SETTING.get(args.nodeSettings()), true, // always install system call filters, not user-configurable since 8.0.0 BootstrapSettings.CTRLHANDLER_SETTING.get(args.nodeSettings()) @@ -223,8 +223,8 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { ); // load the plugin Java modules and layers now for use in entitlements - var modulesBundles = PluginsLoader.loadModulesBundles(nodeEnv.modulesFile()); - var pluginsBundles = PluginsLoader.loadPluginsBundles(nodeEnv.pluginsFile()); + var modulesBundles = PluginsLoader.loadModulesBundles(nodeEnv.modulesDir()); + var pluginsBundles = PluginsLoader.loadPluginsBundles(nodeEnv.pluginsDir()); final PluginsLoader pluginsLoader; @@ -242,7 +242,13 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { pluginsLoader = PluginsLoader.createPluginsLoader(modulesBundles, pluginsBundles, findPluginsWithNativeAccess(pluginPolicies)); var pluginsResolver = PluginsResolver.create(pluginsLoader); - EntitlementBootstrap.bootstrap(pluginPolicies, pluginsResolver::resolveClassToPluginName); + EntitlementBootstrap.bootstrap( + pluginPolicies, + pluginsResolver::resolveClassToPluginName, + nodeEnv.dataDirs(), + nodeEnv.configDir(), + nodeEnv.tmpDir() + ); } else if (RuntimeVersionFeature.isSecurityManagerAvailable()) { // no need to explicitly enable native access for legacy code pluginsLoader = PluginsLoader.createPluginsLoader(modulesBundles, pluginsBundles, Map.of()); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Security.java b/server/src/main/java/org/elasticsearch/bootstrap/Security.java index dc6de9a6b2c91..a352112b67afb 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -178,11 +178,11 @@ static Map getPluginAndModulePermissions(Environment environment) t } }; - for (Path plugin : PluginsUtils.findPluginDirs(environment.pluginsFile())) { - addPolicy.accept(PolicyUtil.getPluginPolicyInfo(plugin, environment.tmpFile())); + for (Path plugin : PluginsUtils.findPluginDirs(environment.pluginsDir())) { + addPolicy.accept(PolicyUtil.getPluginPolicyInfo(plugin, environment.tmpDir())); } - for (Path plugin : PluginsUtils.findPluginDirs(environment.modulesFile())) { - addPolicy.accept(PolicyUtil.getModulePolicyInfo(plugin, environment.tmpFile())); + for (Path plugin : PluginsUtils.findPluginDirs(environment.modulesDir())) { + addPolicy.accept(PolicyUtil.getModulePolicyInfo(plugin, environment.tmpDir())); } return Collections.unmodifiableMap(map); @@ -199,7 +199,7 @@ static Permissions createPermissions(Environment environment, Path pidFile) thro private static List createRecursiveDataPathPermission(Environment environment) throws IOException { Permissions policy = new Permissions(); - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { addDirectoryPath(policy, Environment.PATH_DATA_SETTING.getKey(), path, "read,readlink,write,delete", true); } return toFilePermissions(policy); @@ -215,13 +215,13 @@ private static Map> readSecuredConfigFiles( Map> securedSettingKeys = new HashMap<>(); for (URL url : mainCodebases) { - for (Permission p : PolicyUtil.getPolicyPermissions(url, template, environment.tmpFile())) { + for (Permission p : PolicyUtil.getPolicyPermissions(url, template, environment.tmpDir())) { readSecuredConfigFilePermissions(environment, url, p, securedConfigFiles, securedSettingKeys); } } for (var pp : pluginPolicies.entrySet()) { - for (Permission p : PolicyUtil.getPolicyPermissions(pp.getKey(), pp.getValue(), environment.tmpFile())) { + for (Permission p : PolicyUtil.getPolicyPermissions(pp.getKey(), pp.getValue(), environment.tmpDir())) { readSecuredConfigFilePermissions(environment, pp.getKey(), p, securedConfigFiles, securedSettingKeys); } } @@ -242,8 +242,8 @@ private static Map> readSecuredConfigFiles( // If the setting shouldn't be an HTTPS URL, that'll be caught by that setting's validation later in the process. // HTTP (no S) URLs are not supported. if (settingValue.toLowerCase(Locale.ROOT).startsWith("https://") == false) { - Path file = environment.configFile().resolve(settingValue); - if (file.startsWith(environment.configFile()) == false) { + Path file = environment.configDir().resolve(settingValue); + if (file.startsWith(environment.configDir()) == false) { throw new IllegalStateException( ps.getValue() + " tried to grant access to file outside config directory " + file ); @@ -263,9 +263,9 @@ private static Map> readSecuredConfigFiles( // always add some config files as exclusive files that no one can access // there's no reason for anyone to read these once the security manager is initialized // so if something has tried to grant itself access, crash out with an error - addSpeciallySecuredConfigFile(securedConfigFiles, environment.configFile().resolve("elasticsearch.yml").toString()); - addSpeciallySecuredConfigFile(securedConfigFiles, environment.configFile().resolve("jvm.options").toString()); - addSpeciallySecuredConfigFile(securedConfigFiles, environment.configFile().resolve("jvm.options.d/-").toString()); + addSpeciallySecuredConfigFile(securedConfigFiles, environment.configDir().resolve("elasticsearch.yml").toString()); + addSpeciallySecuredConfigFile(securedConfigFiles, environment.configDir().resolve("jvm.options").toString()); + addSpeciallySecuredConfigFile(securedConfigFiles, environment.configDir().resolve("jvm.options.d/-").toString()); return Collections.unmodifiableMap(securedConfigFiles); } @@ -279,8 +279,8 @@ private static void readSecuredConfigFilePermissions( ) { String securedFileName = extractSecuredName(p, SecuredConfigFileAccessPermission.class); if (securedFileName != null) { - Path securedFile = environment.configFile().resolve(securedFileName); - if (securedFile.startsWith(environment.configFile()) == false) { + Path securedFile = environment.configDir().resolve(securedFileName); + if (securedFile.startsWith(environment.configDir()) == false) { throw new IllegalStateException("[" + url + "] tried to grant access to file outside config directory " + securedFile); } logger.debug("Jar {} securing access to config file {}", url, securedFile); @@ -336,26 +336,26 @@ static void addClasspathPermissions(Permissions policy) throws IOException { */ static void addFilePermissions(Permissions policy, Environment environment, Path pidFile) throws IOException { // read-only dirs - addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.binFile(), "read,readlink", false); - addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.libFile(), "read,readlink", false); - addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.modulesFile(), "read,readlink", false); - addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.pluginsFile(), "read,readlink", false); - addDirectoryPath(policy, "path.conf", environment.configFile(), "read,readlink", false); + addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.binDir(), "read,readlink", false); + addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.libDir(), "read,readlink", false); + addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.modulesDir(), "read,readlink", false); + addDirectoryPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.pluginsDir(), "read,readlink", false); + addDirectoryPath(policy, "path.conf", environment.configDir(), "read,readlink", false); // read-write dirs - addDirectoryPath(policy, "java.io.tmpdir", environment.tmpFile(), "read,readlink,write,delete", false); - addDirectoryPath(policy, Environment.PATH_LOGS_SETTING.getKey(), environment.logsFile(), "read,readlink,write,delete", false); - if (environment.sharedDataFile() != null) { + addDirectoryPath(policy, "java.io.tmpdir", environment.tmpDir(), "read,readlink,write,delete", false); + addDirectoryPath(policy, Environment.PATH_LOGS_SETTING.getKey(), environment.logsDir(), "read,readlink,write,delete", false); + if (environment.sharedDataDir() != null) { addDirectoryPath( policy, Environment.PATH_SHARED_DATA_SETTING.getKey(), - environment.sharedDataFile(), + environment.sharedDataDir(), "read,readlink,write,delete", false ); } final Set dataFilesPaths = new HashSet<>(); - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { addDirectoryPath(policy, Environment.PATH_DATA_SETTING.getKey(), path, "read,readlink,write,delete", false); /* * We have to do this after adding the path because a side effect of that is that the directory is created; the Path#toRealPath @@ -371,7 +371,7 @@ static void addFilePermissions(Permissions policy, Environment environment, Path throw new IllegalStateException("unable to access [" + path + "]", e); } } - for (Path path : environment.repoFiles()) { + for (Path path : environment.repoDirs()) { addDirectoryPath(policy, Environment.PATH_REPO_SETTING.getKey(), path, "read,readlink,write,delete", false); } @@ -380,7 +380,7 @@ static void addFilePermissions(Permissions policy, Environment environment, Path addSingleFilePath(policy, pidFile, "delete"); } // we need to touch the operator/settings.json file when restoring from snapshots, on some OSs it needs file write permission - addSingleFilePath(policy, environment.configFile().resolve(OPERATOR_DIRECTORY).resolve(SETTINGS_FILE_NAME), "read,readlink,write"); + addSingleFilePath(policy, environment.configDir().resolve(OPERATOR_DIRECTORY).resolve(SETTINGS_FILE_NAME), "read,readlink,write"); } /** diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java b/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java index 6a4296d9b0478..35284cebf22ad 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java @@ -69,14 +69,14 @@ void spawnNativeControllers(final Environment environment) throws IOException { if (spawned.compareAndSet(false, true) == false) { throw new IllegalStateException("native controllers already spawned"); } - if (Files.exists(environment.modulesFile()) == false) { - throw new IllegalStateException("modules directory [" + environment.modulesFile() + "] not found"); + if (Files.exists(environment.modulesDir()) == false) { + throw new IllegalStateException("modules directory [" + environment.modulesDir() + "] not found"); } /* * For each module, attempt to spawn the controller daemon. Silently ignore any module that doesn't include a controller for the * correct platform. */ - List paths = PluginsUtils.findPluginDirs(environment.modulesFile()); + List paths = PluginsUtils.findPluginDirs(environment.modulesDir()); for (final Path modules : paths) { final PluginDescriptor info = PluginDescriptor.readFromProperties(modules); final Path spawnPath = Platforms.nativeControllerPath(modules); @@ -91,7 +91,7 @@ void spawnNativeControllers(final Environment environment) throws IOException { ); throw new IllegalArgumentException(message); } - final Process process = spawnNativeController(spawnPath, environment.tmpFile()); + final Process process = spawnNativeController(spawnPath, environment.tmpDir()); // The process _shouldn't_ write any output via its stdout or stderr, but if it does then // it will block if nothing is reading that output. To avoid this we can pipe the // outputs and create pump threads to write any messages there to the ES log. diff --git a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java index aa3a6a201eac4..659e78f99c21a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java +++ b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java @@ -146,6 +146,10 @@ public boolean hasIndexBlock(String index, ClusterBlock block) { return indicesBlocks.containsKey(index) && indicesBlocks.get(index).contains(block); } + public boolean hasIndexBlockLevel(String index, ClusterBlockLevel level) { + return blocksForIndex(level, index).isEmpty() == false; + } + public boolean hasIndexBlockWithId(String index, int blockId) { final Set clusterBlocks = indicesBlocks.get(index); if (clusterBlocks != null) { @@ -398,6 +402,10 @@ public boolean hasIndexBlock(String index, ClusterBlock block) { return indices.getOrDefault(index, Set.of()).contains(block); } + public boolean hasIndexBlockLevel(String index, ClusterBlockLevel level) { + return indices.getOrDefault(index, Set.of()).stream().anyMatch(clusterBlock -> clusterBlock.contains(level)); + } + public Builder removeIndexBlock(String index, ClusterBlock block) { if (indices.containsKey(index) == false) { return this; diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java index 916a192d53871..77d77099f92cb 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java @@ -371,7 +371,7 @@ private static void blockForbiddenVersions(TransportVersion joiningTransportVers * that are also present across the whole cluster as a result. */ private Set calculateEffectiveClusterFeatures(DiscoveryNodes nodes, Map> nodeFeatures) { - if (featureService.featuresCanBeAssumedForNodes(nodes)) { + if (FeatureService.featuresCanBeAssumedForNodes(nodes)) { Set assumedFeatures = featureService.getNodeFeatures() .values() .stream() @@ -382,7 +382,7 @@ private Set calculateEffectiveClusterFeatures(DiscoveryNodes nodes, Map< // add all assumed features to the featureset of all nodes of the next major version nodeFeatures = new HashMap<>(nodeFeatures); for (var node : nodes.getNodes().entrySet()) { - if (featureService.featuresCanBeAssumedForNode(node.getValue())) { + if (FeatureService.featuresCanBeAssumedForNode(node.getValue())) { assert nodeFeatures.containsKey(node.getKey()) : "Node " + node.getKey() + " does not have any features"; nodeFeatures.computeIfPresent(node.getKey(), (k, v) -> { var newFeatures = new HashSet<>(v); @@ -525,7 +525,7 @@ private Set enforceNodeFeatureBarrier(DiscoveryNode node, Set ef return newNodeFeatures; } - if (featureService.featuresCanBeAssumedForNode(node)) { + if (FeatureService.featuresCanBeAssumedForNode(node)) { // it might still be ok for this node to join if this node can have assumed features, // and all the missing features are assumed // we can get the NodeFeature object direct from this node's registered features diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java index ef28a46d423da..896381ba185ed 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java @@ -156,9 +156,8 @@ public static Map> getAutoExpandReplicaChanges( )) { if (indexMetadata.getNumberOfReplicas() == 0) { nrReplicasChanged.computeIfAbsent(1, ArrayList::new).add(indexMetadata.getIndex().getName()); - } else { - continue; } + continue; } if (allocation == null) { allocation = allocationSupplier.get(); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java index 8366083b1907e..41b716dd99bdf 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java @@ -11,10 +11,8 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -22,7 +20,6 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.Processors; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -36,7 +33,6 @@ import java.util.Objects; import java.util.Set; import java.util.TreeSet; -import java.util.regex.Pattern; import static java.lang.String.format; import static org.elasticsearch.node.Node.NODE_EXTERNAL_ID_SETTING; @@ -52,8 +48,6 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl private static final ParseField PROCESSORS_RANGE_FIELD = new ParseField("processors_range"); private static final ParseField MEMORY_FIELD = new ParseField("memory"); private static final ParseField STORAGE_FIELD = new ParseField("storage"); - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // Remove deprecated field - private static final ParseField VERSION_FIELD = new ParseField("node_version"); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "desired_node", @@ -63,8 +57,7 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparabl (Processors) args[1], (ProcessorsRange) args[2], (ByteSizeValue) args[3], - (ByteSizeValue) args[4], - (String) args[5] + (ByteSizeValue) args[4] ) ); @@ -98,12 +91,6 @@ static void configureParser(ConstructingObjectParser parser) { STORAGE_FIELD, ObjectParser.ValueType.STRING ); - parser.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> p.text(), - VERSION_FIELD, - ObjectParser.ValueType.STRING - ); } private final Settings settings; @@ -112,21 +99,9 @@ static void configureParser(ConstructingObjectParser parser) { private final ByteSizeValue memory; private final ByteSizeValue storage; - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // Remove deprecated version field - private final String version; private final String externalId; private final Set roles; - @Deprecated - public DesiredNode(Settings settings, ProcessorsRange processorsRange, ByteSizeValue memory, ByteSizeValue storage, String version) { - this(settings, null, processorsRange, memory, storage, version); - } - - @Deprecated - public DesiredNode(Settings settings, double processors, ByteSizeValue memory, ByteSizeValue storage, String version) { - this(settings, Processors.of(processors), null, memory, storage, version); - } - public DesiredNode(Settings settings, ProcessorsRange processorsRange, ByteSizeValue memory, ByteSizeValue storage) { this(settings, null, processorsRange, memory, storage); } @@ -136,17 +111,6 @@ public DesiredNode(Settings settings, double processors, ByteSizeValue memory, B } DesiredNode(Settings settings, Processors processors, ProcessorsRange processorsRange, ByteSizeValue memory, ByteSizeValue storage) { - this(settings, processors, processorsRange, memory, storage, null); - } - - DesiredNode( - Settings settings, - Processors processors, - ProcessorsRange processorsRange, - ByteSizeValue memory, - ByteSizeValue storage, - @Deprecated String version - ) { assert settings != null; assert memory != null; assert storage != null; @@ -180,7 +144,6 @@ public DesiredNode(Settings settings, double processors, ByteSizeValue memory, B this.processorsRange = processorsRange; this.memory = memory; this.storage = storage; - this.version = version; this.externalId = NODE_EXTERNAL_ID_SETTING.get(settings); this.roles = Collections.unmodifiableSortedSet(new TreeSet<>(DiscoveryNode.getRolesFromSettings(settings))); } @@ -198,25 +161,11 @@ public static DesiredNode readFrom(StreamInput in) throws IOException { } final var memory = ByteSizeValue.readFrom(in); final var storage = ByteSizeValue.readFrom(in); - final String version; - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { - version = in.readOptionalString(); - } else { - version = Version.readVersion(in).toString(); + if (in.getTransportVersion().before(TransportVersions.REMOVE_DESIRED_NODE_VERSION) + && in.getTransportVersion().isPatchFrom(TransportVersions.REMOVE_DESIRED_NODE_VERSION_90) == false) { + in.readOptionalString(); } - return new DesiredNode(settings, processors, processorsRange, memory, storage, version); - } - - private static final Pattern SEMANTIC_VERSION_PATTERN = Pattern.compile("^(\\d+\\.\\d+\\.\\d+)\\D?.*"); - - private static Version parseLegacyVersion(String version) { - if (version != null) { - var semanticVersionMatcher = SEMANTIC_VERSION_PATTERN.matcher(version); - if (semanticVersionMatcher.matches()) { - return Version.fromString(semanticVersionMatcher.group(1)); - } - } - return null; + return new DesiredNode(settings, processors, processorsRange, memory, storage); } @Override @@ -232,16 +181,9 @@ public void writeTo(StreamOutput out) throws IOException { } memory.writeTo(out); storage.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { - out.writeOptionalString(version); - } else { - Version parsedVersion = parseLegacyVersion(version); - if (version == null) { - // Some node is from before we made the version field not required. If so, fill in with the current node version. - Version.writeVersion(Version.CURRENT, out); - } else { - Version.writeVersion(parsedVersion, out); - } + if (out.getTransportVersion().before(TransportVersions.REMOVE_DESIRED_NODE_VERSION) + && out.getTransportVersion().isPatchFrom(TransportVersions.REMOVE_DESIRED_NODE_VERSION_90) == false) { + out.writeOptionalString(null); } } @@ -269,14 +211,6 @@ public void toInnerXContent(XContentBuilder builder, Params params) throws IOExc } builder.field(MEMORY_FIELD.getPreferredName(), memory); builder.field(STORAGE_FIELD.getPreferredName(), storage); - addDeprecatedVersionField(builder); - } - - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // Remove deprecated field from response - private void addDeprecatedVersionField(XContentBuilder builder) throws IOException { - if (version != null) { - builder.field(VERSION_FIELD.getPreferredName(), version); - } } public boolean hasMasterRole() { @@ -356,7 +290,6 @@ private boolean equalsWithoutProcessorsSpecification(DesiredNode that) { return Objects.equals(settings, that.settings) && Objects.equals(memory, that.memory) && Objects.equals(storage, that.storage) - && Objects.equals(version, that.version) && Objects.equals(externalId, that.externalId) && Objects.equals(roles, that.roles); } @@ -369,7 +302,7 @@ public boolean equalsWithProcessorsCloseTo(DesiredNode that) { @Override public int hashCode() { - return Objects.hash(settings, processors, processorsRange, memory, storage, version, externalId, roles); + return Objects.hash(settings, processors, processorsRange, memory, storage, externalId, roles); } @Override @@ -398,10 +331,6 @@ public String toString() { + '}'; } - public boolean hasVersion() { - return Strings.isNullOrBlank(version) == false; - } - public record ProcessorsRange(Processors min, @Nullable Processors max) implements Writeable, ToXContentObject { private static final ParseField MIN_FIELD = new ParseField("min"); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java index 7b89406be9aa0..606309adf205c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java @@ -44,13 +44,12 @@ public record DesiredNodeWithStatus(DesiredNode desiredNode, Status status) (Processors) args[1], (DesiredNode.ProcessorsRange) args[2], (ByteSizeValue) args[3], - (ByteSizeValue) args[4], - (String) args[5] + (ByteSizeValue) args[4] ), // An unknown status is expected during upgrades to versions >= STATUS_TRACKING_SUPPORT_VERSION // the desired node status would be populated when a node in the newer version is elected as // master, the desired nodes status update happens in NodeJoinExecutor. - args[6] == null ? Status.PENDING : (Status) args[6] + args[5] == null ? Status.PENDING : (Status) args[5] ) ); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstractionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstractionResolver.java index 015c90ebe450e..fe7199f8332d2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstractionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstractionResolver.java @@ -81,8 +81,7 @@ && isIndexVisible( indexNameExpressionResolver, includeDataStreams )) { - // Resolve any ::* suffixes on the expression. We need to resolve them all to their final valid selectors - resolveSelectorsAndCombine(authorizedIndex, selectorString, indicesOptions, resolvedIndices, metadata); + resolveSelectorsAndCollect(authorizedIndex, selectorString, indicesOptions, resolvedIndices, metadata); } } if (resolvedIndices.isEmpty()) { @@ -98,9 +97,8 @@ && isIndexVisible( } } } else { - // Resolve any ::* suffixes on the expression. We need to resolve them all to their final valid selectors Set resolvedIndices = new HashSet<>(); - resolveSelectorsAndCombine(indexAbstraction, selectorString, indicesOptions, resolvedIndices, metadata); + resolveSelectorsAndCollect(indexAbstraction, selectorString, indicesOptions, resolvedIndices, metadata); if (minus) { finalIndices.removeAll(resolvedIndices); } else if (indicesOptions.ignoreUnavailable() == false || isAuthorized.test(indexAbstraction)) { @@ -114,7 +112,7 @@ && isIndexVisible( return finalIndices; } - private static void resolveSelectorsAndCombine( + private static void resolveSelectorsAndCollect( String indexAbstraction, String selectorString, IndicesOptions indicesOptions, @@ -132,19 +130,8 @@ private static void resolveSelectorsAndCombine( selectorString = IndexComponentSelector.DATA.getKey(); } - if (Regex.isMatchAllPattern(selectorString)) { - // Always accept data - collect.add(IndexNameExpressionResolver.combineSelectorExpression(indexAbstraction, IndexComponentSelector.DATA.getKey())); - // Only put failures on the expression if the abstraction supports it. - if (acceptsAllSelectors) { - collect.add( - IndexNameExpressionResolver.combineSelectorExpression(indexAbstraction, IndexComponentSelector.FAILURES.getKey()) - ); - } - } else { - // A non-wildcard selector is always passed along as-is, it's validity for this kind of abstraction is tested later - collect.add(IndexNameExpressionResolver.combineSelectorExpression(indexAbstraction, selectorString)); - } + // A selector is always passed along as-is, it's validity for this kind of abstraction is tested later + collect.add(IndexNameExpressionResolver.combineSelectorExpression(indexAbstraction, selectorString)); } else { assert selectorString == null : "A selector string [" + selectorString + "] is present but selectors are disabled in this context"; @@ -168,6 +155,13 @@ public static boolean isIndexVisible( final boolean isHidden = indexAbstraction.isHidden(); boolean isVisible = isHidden == false || indicesOptions.expandWildcardsHidden() || isVisibleDueToImplicitHidden(expression, index); if (indexAbstraction.getType() == IndexAbstraction.Type.ALIAS) { + if (indexAbstraction.isSystem()) { + // check if it is net new + if (resolver.getNetNewSystemIndexPredicate().test(indexAbstraction.getName())) { + return isSystemIndexVisible(resolver, indexAbstraction); + } + } + // it's an alias, ignore expandWildcardsOpen and expandWildcardsClosed. // complicated to support those options with aliases pointing to multiple indices... isVisible = isVisible && indicesOptions.ignoreAliases() == false; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifier.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifier.java index 9ae78d35527f0..b09bc563f4c50 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifier.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifier.java @@ -39,6 +39,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.isIndexVerifiedBeforeClosed; import static org.elasticsearch.core.Strings.format; /** @@ -164,7 +165,7 @@ public static boolean isReadOnlySupportedVersion( ) { if (isReadOnlyCompatible(indexMetadata, minimumCompatible, minimumReadOnlyCompatible)) { assert isFullySupportedVersion(indexMetadata, minimumCompatible) == false : indexMetadata; - final boolean isReadOnly = hasIndexWritesBlock(indexMetadata); + final boolean isReadOnly = hasReadOnlyBlocks(indexMetadata) || isIndexVerifiedBeforeClosed(indexMetadata); if (isReadOnly == false) { throw new IllegalStateException( "The index " @@ -185,7 +186,7 @@ public static boolean isReadOnlySupportedVersion( return false; } - private static boolean isReadOnlyCompatible( + public static boolean isReadOnlyCompatible( IndexMetadata indexMetadata, IndexVersion minimumCompatible, IndexVersion minimumReadOnlyCompatible @@ -208,7 +209,7 @@ private static boolean isReadOnlyCompatible( return false; } - private static boolean hasIndexWritesBlock(IndexMetadata indexMetadata) { + static boolean hasReadOnlyBlocks(IndexMetadata indexMetadata) { var indexSettings = indexMetadata.getSettings(); if (IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.get(indexSettings) || IndexMetadata.INDEX_READ_ONLY_SETTING.get(indexSettings)) { return indexMetadata.isSearchableSnapshot() @@ -220,7 +221,7 @@ private static boolean hasIndexWritesBlock(IndexMetadata indexMetadata) { public static boolean isReadOnlyVerified(IndexMetadata indexMetadata) { if (isReadOnlyCompatible(indexMetadata, IndexVersions.MINIMUM_COMPATIBLE, IndexVersions.MINIMUM_READONLY_COMPATIBLE)) { - return hasIndexWritesBlock(indexMetadata); + return hasReadOnlyBlocks(indexMetadata); } return false; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index cb074b1437040..d28049f2a6316 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -364,21 +364,9 @@ protected static Collection resolveExpressionsToResources(Co } } else { if (isExclusion) { - if (IndexComponentSelector.ALL_APPLICABLE.equals(selector)) { - resources.remove(new ResolvedExpression(baseExpression, IndexComponentSelector.DATA)); - resources.remove(new ResolvedExpression(baseExpression, IndexComponentSelector.FAILURES)); - } else { - resources.remove(new ResolvedExpression(baseExpression, selector)); - } + resources.remove(new ResolvedExpression(baseExpression, selector)); } else if (ensureAliasOrIndexExists(context, baseExpression, selector)) { - if (IndexComponentSelector.ALL_APPLICABLE.equals(selector)) { - resources.add(new ResolvedExpression(baseExpression, IndexComponentSelector.DATA)); - if (context.getState().getMetadata().getIndicesLookup().get(baseExpression).isDataStreamRelated()) { - resources.add(new ResolvedExpression(baseExpression, IndexComponentSelector.FAILURES)); - } - } else { - resources.add(new ResolvedExpression(baseExpression, selector)); - } + resources.add(new ResolvedExpression(baseExpression, selector)); } } } @@ -1046,8 +1034,7 @@ public String[] indexAliases( private static boolean resolvedExpressionsContainsAbstraction(Set resolvedExpressions, String abstractionName) { return resolvedExpressions.contains(new ResolvedExpression(abstractionName)) - || resolvedExpressions.contains(new ResolvedExpression(abstractionName, IndexComponentSelector.DATA)) - || resolvedExpressions.contains(new ResolvedExpression(abstractionName, IndexComponentSelector.ALL_APPLICABLE)); + || resolvedExpressions.contains(new ResolvedExpression(abstractionName, IndexComponentSelector.DATA)); } /** @@ -1342,8 +1329,7 @@ private static boolean ensureAliasOrIndexExists(Context context, String name, In if (context.options.allowSelectors()) { // Ensure that the selectors are present and that they are compatible with the abstractions they are used with assert selector != null : "Earlier logic should have parsed selectors or added the default selectors already"; - // Check if ::failures has been explicitly requested, since requesting ::* for non-data-stream abstractions would just - // return their data components. + // Check if ::failures has been explicitly requested if (IndexComponentSelector.FAILURES.equals(selector) && indexAbstraction.isDataStreamRelated() == false) { // If requested abstraction is not data stream related, then you cannot use ::failures if (ignoreUnavailable) { @@ -1700,9 +1686,9 @@ private static Set expandToOpenClosed( final IndexMetadata.State excludeState = excludeState(context.getOptions()); Set resources = new HashSet<>(); if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) { - expandToApplicableSelectors(indexAbstraction, selector, resources); + resources.add(new ResolvedExpression(indexAbstraction.getName(), selector)); } else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) { - expandToApplicableSelectors(indexAbstraction, selector, resources); + resources.add(new ResolvedExpression(indexAbstraction.getName(), selector)); } else { if (shouldIncludeRegularIndices(context.getOptions(), selector)) { for (int i = 0, n = indexAbstraction.getIndices().size(); i < n; i++) { @@ -1729,31 +1715,6 @@ private static Set expandToOpenClosed( return resources; } - /** - * Adds the abstraction and selector to the results when preserving data streams and aliases at wildcard resolution. If a selector - * is provided, the result is only added if the selector is applicable to the abstraction provided. If - * {@link IndexComponentSelector#ALL_APPLICABLE} is given, the selectors are expanded only to those which are applicable to the - * provided abstraction. - * @param indexAbstraction abstraction to add - * @param selector The selector to add - * @param resources Result collector which is updated with all applicable resolved expressions for a given abstraction and selector - * pair. - */ - private static void expandToApplicableSelectors( - IndexAbstraction indexAbstraction, - IndexComponentSelector selector, - Set resources - ) { - if (IndexComponentSelector.ALL_APPLICABLE.equals(selector)) { - resources.add(new ResolvedExpression(indexAbstraction.getName(), IndexComponentSelector.DATA)); - if (indexAbstraction.isDataStreamRelated()) { - resources.add(new ResolvedExpression(indexAbstraction.getName(), IndexComponentSelector.FAILURES)); - } - } else if (selector == null || indexAbstraction.isDataStreamRelated() || selector.shouldIncludeFailures() == false) { - resources.add(new ResolvedExpression(indexAbstraction.getName(), selector)); - } - } - private static List resolveEmptyOrTrivialWildcard(Context context, IndexComponentSelector selector) { final String[] allIndices = resolveEmptyOrTrivialWildcardToAllIndices( context.getOptions(), @@ -2150,20 +2111,10 @@ private static V splitSelectorExpression(String expression, BiFunction getIndexSettingsValidationErrors(final Settings settings, final boolean forbidPrivateIndexSettings) { - List validationErrors = validateIndexCustomPath(settings, env.sharedDataFile()); + List validationErrors = validateIndexCustomPath(settings, env.sharedDataDir()); if (forbidPrivateIndexSettings) { validationErrors.addAll(validatePrivateSettingsNotExplicitlySet(settings, indexScopedSettings)); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java index e84fc8d27ba59..6144265e4a83a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java @@ -90,6 +90,7 @@ import java.util.stream.Collectors; import static java.util.stream.Collectors.joining; +import static org.elasticsearch.cluster.metadata.IndexMetadataVerifier.hasReadOnlyBlocks; import static org.elasticsearch.core.Strings.format; /** @@ -1185,6 +1186,18 @@ private ClusterState openIndices(final Index[] indices, final ClusterState curre final Settings.Builder updatedSettings = Settings.builder().put(indexMetadata.getSettings()); updatedSettings.remove(VERIFIED_BEFORE_CLOSE_SETTING.getKey()); + // Reopening a read-only compatible index that has not been marked as read-only is possible if the index was + // verified-before-close in the first place. + var compatibilityVersion = indexMetadata.getCompatibilityVersion(); + if (compatibilityVersion.before(minIndexCompatibilityVersion) && hasReadOnlyBlocks(indexMetadata) == false) { + if (isIndexVerifiedBeforeClosed(indexMetadata)) { + updatedSettings.put(VERIFIED_READ_ONLY_SETTING.getKey(), true); + // at least set a write block if the index was verified-before-close at the time the cluster was upgraded + blocks.addIndexBlock(index.getName(), APIBlock.WRITE.block); + updatedSettings.put(APIBlock.WRITE.settingName(), true); + } // or else, the following indexMetadataVerifier.verifyIndexMetadata() should throw. + } + IndexMetadata newIndexMetadata = IndexMetadata.builder(indexMetadata) .state(IndexMetadata.State.OPEN) .settingsVersion(indexMetadata.getSettingsVersion() + 1) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java index 5d45bf1ce127e..8b8c3f12cdf9f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java @@ -56,7 +56,7 @@ public class MetadataMappingService { public MetadataMappingService(ClusterService clusterService, IndicesService indicesService) { this.clusterService = clusterService; this.indicesService = indicesService; - taskQueue = clusterService.createTaskQueue("put-mapping", Priority.HIGH, new PutMappingExecutor()); + this.taskQueue = clusterService.createTaskQueue("put-mapping", Priority.HIGH, new PutMappingExecutor()); } record PutMappingClusterStateUpdateTask(PutMappingClusterStateUpdateRequest request, ActionListener listener) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java index e984768277d27..c11fa06d83c4d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -51,7 +52,9 @@ import java.util.Objects; import java.util.Set; import java.util.function.BiFunction; +import java.util.function.Function; +import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING; import static org.elasticsearch.index.IndexSettings.same; /** @@ -181,11 +184,14 @@ ClusterState execute(ClusterState currentState) { RoutingTable.Builder routingTableBuilder = null; Metadata.Builder metadataBuilder = Metadata.builder(currentState.metadata()); + final var minSupportedIndexVersion = currentState.nodes().getMinSupportedIndexVersion(); // allow to change any settings to a closed index, and only allow dynamic settings to be changed // on an open index Set openIndices = new HashSet<>(); Set closedIndices = new HashSet<>(); + Set readOnlyIndices = null; + final String[] actualIndices = new String[request.indices().length]; for (int i = 0; i < request.indices().length; i++) { Index index = request.indices()[i]; @@ -197,6 +203,12 @@ ClusterState execute(ClusterState currentState) { } else { closedIndices.add(index); } + if (metadata.getCompatibilityVersion().before(minSupportedIndexVersion)) { + if (readOnlyIndices == null) { + readOnlyIndices = new HashSet<>(); + } + readOnlyIndices.add(index); + } } if (skippedSettings.isEmpty() == false && openIndices.isEmpty() == false) { @@ -327,10 +339,21 @@ ClusterState execute(ClusterState currentState) { } } + final Function verifiedReadOnly = indexName -> VERIFIED_READ_ONLY_SETTING.get( + currentState.metadata().index(indexName).getSettings() + ); final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); boolean changedBlocks = false; for (IndexMetadata.APIBlock block : IndexMetadata.APIBlock.values()) { - changedBlocks |= maybeUpdateClusterBlock(actualIndices, blocks, block.block, block.setting, openSettings, metadataBuilder); + changedBlocks |= maybeUpdateClusterBlock( + actualIndices, + blocks, + block.block, + block.setting, + openSettings, + metadataBuilder, + verifiedReadOnly + ); } changed |= changedBlocks; @@ -359,6 +382,7 @@ ClusterState execute(ClusterState currentState) { // This step is mandatory since we allow to update non-dynamic settings on closed indices. indicesService.verifyIndexMetadata(updatedMetadata, updatedMetadata); } + verifyReadOnlyIndices(readOnlyIndices, updatedState.blocks()); } catch (IOException ex) { throw ExceptionsHelper.convertToElastic(ex); } @@ -417,6 +441,24 @@ public static void updateIndexSettings( } } + /** + * Verifies that read-only compatible indices always have a write block. + * + * @param readOnlyIndices the read-only compatible indices + * @param blocks the updated cluster state blocks + */ + private static void verifyReadOnlyIndices(@Nullable Set readOnlyIndices, ClusterBlocks blocks) { + if (readOnlyIndices != null) { + for (Index readOnlyIndex : readOnlyIndices) { + if (blocks.hasIndexBlockLevel(readOnlyIndex.getName(), ClusterBlockLevel.WRITE) == false) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "Can't remove the write block on read-only compatible index %s", readOnlyIndex) + ); + } + } + } + } + /** * Updates the cluster block only iff the setting exists in the given settings */ @@ -426,7 +468,8 @@ private static boolean maybeUpdateClusterBlock( ClusterBlock block, Setting setting, Settings openSettings, - Metadata.Builder metadataBuilder + Metadata.Builder metadataBuilder, + Function verifiedReadOnlyBeforeBlockChanges ) { boolean changed = false; if (setting.exists(openSettings)) { @@ -436,16 +479,32 @@ private static boolean maybeUpdateClusterBlock( if (blocks.hasIndexBlock(index, block) == false) { blocks.addIndexBlock(index, block); changed = true; + if (block.contains(ClusterBlockLevel.WRITE)) { + var isVerifiedReadOnly = verifiedReadOnlyBeforeBlockChanges.apply(index); + if (isVerifiedReadOnly) { + var indexMetadata = metadataBuilder.get(index); + metadataBuilder.put( + IndexMetadata.builder(indexMetadata) + .settings( + Settings.builder() + .put(indexMetadata.getSettings()) + .put(VERIFIED_READ_ONLY_SETTING.getKey(), true) + ) + ); + } + } } } else { if (blocks.hasIndexBlock(index, block)) { blocks.removeIndexBlock(index, block); changed = true; if (block.contains(ClusterBlockLevel.WRITE)) { - IndexMetadata indexMetadata = metadataBuilder.get(index); - Settings.Builder indexSettings = Settings.builder().put(indexMetadata.getSettings()); - indexSettings.remove(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey()); - metadataBuilder.put(IndexMetadata.builder(indexMetadata).settings(indexSettings)); + if (blocks.hasIndexBlockLevel(index, ClusterBlockLevel.WRITE) == false) { + var indexMetadata = metadataBuilder.get(index); + var indexSettings = Settings.builder().put(indexMetadata.getSettings()); + indexSettings.remove(VERIFIED_READ_ONLY_SETTING.getKey()); + metadataBuilder.put(IndexMetadata.builder(indexMetadata).settings(indexSettings)); + } } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ProcessClusterEventTimeoutException.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ProcessClusterEventTimeoutException.java index 1182faea81ed6..2a273f7f81e0f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ProcessClusterEventTimeoutException.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ProcessClusterEventTimeoutException.java @@ -28,6 +28,6 @@ public ProcessClusterEventTimeoutException(StreamInput in) throws IOException { @Override public RestStatus status() { - return RestStatus.SERVICE_UNAVAILABLE; + return RestStatus.TOO_MANY_REQUESTS; } } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java b/server/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java deleted file mode 100644 index 32f9530e4b185..0000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.routing; - -import org.apache.lucene.util.CollectionUtil; -import org.elasticsearch.common.util.Countable; - -import java.util.Iterator; -import java.util.List; - -/** - * This class implements a compilation of {@link ShardIterator}s. Each {@link ShardIterator} - * iterated by this {@link Iterable} represents a group of shards. - * ShardsIterators are always returned in ascending order independently of their order at construction - * time. The incoming iterators are sorted to ensure consistent iteration behavior across Nodes / JVMs. -*/ -public final class GroupShardsIterator & Countable> implements Iterable { - - private final List iterators; - - /** - * Constructs a new sorted GroupShardsIterator from the given list. Items are sorted based on their natural ordering. - * @see PlainShardIterator#compareTo(ShardIterator) - */ - public static & Countable> GroupShardsIterator sortAndCreate(List iterators) { - CollectionUtil.timSort(iterators); - return new GroupShardsIterator<>(iterators); - } - - /** - * Constructs a new GroupShardsIterator from the given list. - */ - public GroupShardsIterator(List iterators) { - this.iterators = iterators; - } - - /** - * Returns the total number of shards within all groups - * @return total number of shards - */ - public int totalSize() { - return iterators.stream().mapToInt(Countable::size).sum(); - } - - /** - * Returns the total number of shards plus the number of empty groups - * @return number of shards and empty groups - */ - public int totalSizeWith1ForEmpty() { - int size = 0; - for (ShardIt shard : iterators) { - size += Math.max(1, shard.size()); - } - return size; - } - - /** - * Return the number of groups - * @return number of groups - */ - public int size() { - return iterators.size(); - } - - @Override - public Iterator iterator() { - return iterators.iterator(); - } - - public ShardIt get(int index) { - return iterators.get(index); - } -} diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java index bcacf21fcedbf..4504207a24631 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java @@ -9,7 +9,6 @@ package org.elasticsearch.cluster.routing; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.SimpleDiffable; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -236,7 +235,7 @@ public boolean allPrimaryShardsActive() { /** * @return true if an index is available to service search queries. */ - public boolean readyForSearch(ClusterState clusterState) { + public boolean readyForSearch() { for (IndexShardRoutingTable shardRoutingTable : this.shards) { boolean found = false; for (int idx = 0; idx < shardRoutingTable.size(); idx++) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java index 74c2c1d14b77c..8e0c73184f9e2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java @@ -193,7 +193,7 @@ public List assignedShards() { * * @return a {@link List} of shards */ - public List unpromotableShards() { + public List assignedUnpromotableShards() { return this.assignedUnpromotableShards; } @@ -202,16 +202,16 @@ public List unpromotableShards() { * * @return a {@link List} of shards */ - public List allUnpromotableShards() { + public List unpromotableShards() { return this.unpromotableShards; } public ShardIterator shardsRandomIt() { - return new PlainShardIterator(shardId, shuffler.shuffle(Arrays.asList(shards))); + return new ShardIterator(shardId, shuffler.shuffle(Arrays.asList(shards))); } public ShardIterator shardsIt(int seed) { - return new PlainShardIterator(shardId, shuffler.shuffle(Arrays.asList(shards), seed)); + return new ShardIterator(shardId, shuffler.shuffle(Arrays.asList(shards), seed)); } /** @@ -228,12 +228,12 @@ public ShardIterator activeInitializingShardsRandomIt() { */ public ShardIterator activeInitializingShardsIt(int seed) { if (allInitializingShards.isEmpty()) { - return new PlainShardIterator(shardId, shuffler.shuffle(activeShards, seed)); + return new ShardIterator(shardId, shuffler.shuffle(activeShards, seed)); } ArrayList ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size()); ordered.addAll(shuffler.shuffle(activeShards, seed)); ordered.addAll(allInitializingShards); - return new PlainShardIterator(shardId, ordered); + return new ShardIterator(shardId, ordered); } /** @@ -247,10 +247,7 @@ public ShardIterator activeInitializingShardsRankedIt( ) { final int seed = shuffler.nextSeed(); if (allInitializingShards.isEmpty()) { - return new PlainShardIterator( - shardId, - rankShardsAndUpdateStats(shuffler.shuffle(activeShards, seed), collector, nodeSearchCounts) - ); + return new ShardIterator(shardId, rankShardsAndUpdateStats(shuffler.shuffle(activeShards, seed), collector, nodeSearchCounts)); } ArrayList ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size()); @@ -258,7 +255,7 @@ public ShardIterator activeInitializingShardsRankedIt( ordered.addAll(rankedActiveShards); List rankedInitializingShards = rankShardsAndUpdateStats(allInitializingShards, collector, nodeSearchCounts); ordered.addAll(rankedInitializingShards); - return new PlainShardIterator(shardId, ordered); + return new ShardIterator(shardId, ordered); } private static Set getAllNodeIds(final List shards) { @@ -416,9 +413,9 @@ public int compare(ShardRouting s1, ShardRouting s2) { */ public ShardIterator primaryShardIt() { if (primary != null) { - return new PlainShardIterator(shardId, Collections.singletonList(primary)); + return new ShardIterator(shardId, Collections.singletonList(primary)); } - return new PlainShardIterator(shardId, Collections.emptyList()); + return new ShardIterator(shardId, Collections.emptyList()); } public ShardIterator onlyNodeActiveInitializingShardsIt(String nodeId) { @@ -434,7 +431,7 @@ public ShardIterator onlyNodeActiveInitializingShardsIt(String nodeId) { ordered.add(shardRouting); } } - return new PlainShardIterator(shardId, ordered); + return new ShardIterator(shardId, ordered); } public ShardIterator onlyNodeSelectorActiveInitializingShardsIt(String nodeAttributes, DiscoveryNodes discoveryNodes) { @@ -469,7 +466,7 @@ public ShardIterator onlyNodeSelectorActiveInitializingShardsIt(String[] nodeAtt ); throw new IllegalArgumentException(message); } - return new PlainShardIterator(shardId, ordered); + return new ShardIterator(shardId, ordered); } public ShardIterator preferNodeActiveInitializingShardsIt(Set nodeIds) { @@ -487,7 +484,7 @@ public ShardIterator preferNodeActiveInitializingShardsIt(Set nodeIds) { if (allInitializingShards.isEmpty() == false) { preferred.addAll(allInitializingShards); } - return new PlainShardIterator(shardId, preferred); + return new ShardIterator(shardId, preferred); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index 49da00eae8a5a..15eba6fb3dfea 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.routing; +import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -27,6 +28,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; @@ -86,7 +88,7 @@ public ShardIterator useOnlyPromotableShardsForStateless(ShardIterator shards) { // If it is stateless, only route promotable shards. This is a temporary workaround until a more cohesive solution can be // implemented for search shards. if (isStateless && shards != null) { - return new PlainShardIterator( + return new ShardIterator( shards.shardId(), shards.getShardRoutings().stream().filter(ShardRouting::isPromotableToPrimary).collect(Collectors.toList()) ); @@ -95,7 +97,7 @@ public ShardIterator useOnlyPromotableShardsForStateless(ShardIterator shards) { } } - public GroupShardsIterator searchShards( + public List searchShards( ClusterState clusterState, String[] concreteIndices, @Nullable Map> routing, @@ -104,7 +106,7 @@ public GroupShardsIterator searchShards( return searchShards(clusterState, concreteIndices, routing, preference, null, null); } - public GroupShardsIterator searchShards( + public List searchShards( ClusterState clusterState, String[] concreteIndices, @Nullable Map> routing, @@ -124,10 +126,12 @@ public GroupShardsIterator searchShards( nodeCounts ); if (iterator != null) { - set.add(PlainShardIterator.allSearchableShards(iterator)); + set.add(ShardIterator.allSearchableShards(iterator)); } } - return GroupShardsIterator.sortAndCreate(new ArrayList<>(set)); + List res = new ArrayList<>(set); + CollectionUtil.timSort(res); + return res; } public static ShardIterator getShards(ClusterState clusterState, ShardId shardId) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/PlainShardIterator.java b/server/src/main/java/org/elasticsearch/cluster/routing/PlainShardIterator.java deleted file mode 100644 index a429cc040de3c..0000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/routing/PlainShardIterator.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.routing; - -import org.elasticsearch.index.shard.ShardId; - -import java.util.ArrayList; -import java.util.List; - -/** - * The {@link PlainShardIterator} is a {@link ShardsIterator} which iterates all - * shards or a given {@link ShardId shard id} - */ -public class PlainShardIterator extends PlainShardsIterator implements ShardIterator { - - private final ShardId shardId; - - public static PlainShardIterator allSearchableShards(ShardIterator shardIterator) { - return new PlainShardIterator(shardIterator.shardId(), shardsThatCanHandleSearches(shardIterator)); - } - - private static List shardsThatCanHandleSearches(ShardIterator iterator) { - final List shardsThatCanHandleSearches = new ArrayList<>(iterator.size()); - for (ShardRouting shardRouting : iterator) { - if (shardRouting.isSearchable()) { - shardsThatCanHandleSearches.add(shardRouting); - } - } - return shardsThatCanHandleSearches; - } - - /** - * Creates a {@link PlainShardIterator} instance that iterates over a subset of the given shards - * this the a given shardId. - * - * @param shardId shard id of the group - * @param shards shards to iterate - */ - public PlainShardIterator(ShardId shardId, List shards) { - super(shards); - this.shardId = shardId; - } - - @Override - public ShardId shardId() { - return this.shardId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ShardIterator that = (ShardIterator) o; - return shardId.equals(that.shardId()); - } - - @Override - public int hashCode() { - return shardId.hashCode(); - } - - @Override - public int compareTo(ShardIterator o) { - return shardId.compareTo(o.shardId()); - } -} diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java index 60cf6b10417fa..0067d4b577b4e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.routing; +import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diffable; @@ -194,7 +195,7 @@ public List allShards(String index) { * * @param includeEmpty if true, a shard iterator will be added for non-assigned shards as well */ - public GroupShardsIterator allActiveShardsGrouped(String[] indices, boolean includeEmpty) { + public List allActiveShardsGrouped(String[] indices, boolean includeEmpty) { return allSatisfyingPredicateShardsGrouped(indices, includeEmpty, ShardRouting::active); } @@ -203,11 +204,11 @@ public GroupShardsIterator allActiveShardsGrouped(String[] indice * * @param includeEmpty if true, a shard iterator will be added for non-assigned shards as well */ - public GroupShardsIterator allAssignedShardsGrouped(String[] indices, boolean includeEmpty) { + public List allAssignedShardsGrouped(String[] indices, boolean includeEmpty) { return allSatisfyingPredicateShardsGrouped(indices, includeEmpty, ShardRouting::assignedToNode); } - private GroupShardsIterator allSatisfyingPredicateShardsGrouped( + private List allSatisfyingPredicateShardsGrouped( String[] indices, boolean includeEmpty, Predicate predicate @@ -227,12 +228,13 @@ private GroupShardsIterator allSatisfyingPredicateShardsGrouped( if (predicate.test(shardRouting)) { set.add(shardRouting.shardsIt()); } else if (includeEmpty) { // we need this for counting properly, just make it an empty one - set.add(new PlainShardIterator(shardRouting.shardId(), Collections.emptyList())); + set.add(new ShardIterator(shardRouting.shardId(), Collections.emptyList())); } } } } - return GroupShardsIterator.sortAndCreate(set); + CollectionUtil.timSort(set); + return set; } public ShardsIterator allShards(String[] indices) { @@ -285,7 +287,7 @@ private ShardsIterator allShardsSatisfyingPredicate( * @return All the primary shards grouped into a single shard element group each * @throws IndexNotFoundException If an index passed does not exists */ - public GroupShardsIterator activePrimaryShardsGrouped(String[] indices, boolean includeEmpty) { + public List activePrimaryShardsGrouped(String[] indices, boolean includeEmpty) { // use list here since we need to maintain identity across shards ArrayList set = new ArrayList<>(); for (String index : indices) { @@ -299,11 +301,12 @@ public GroupShardsIterator activePrimaryShardsGrouped(String[] in if (primary.active()) { set.add(primary.shardsIt()); } else if (includeEmpty) { // we need this for counting properly, just make it an empty one - set.add(new PlainShardIterator(primary.shardId(), Collections.emptyList())); + set.add(new ShardIterator(primary.shardId(), Collections.emptyList())); } } } - return GroupShardsIterator.sortAndCreate(set); + CollectionUtil.timSort(set); + return set; } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/ShardIterator.java b/server/src/main/java/org/elasticsearch/cluster/routing/ShardIterator.java index 6093fc2c5b3ea..7056d5c953cdc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/ShardIterator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/ShardIterator.java @@ -11,19 +11,65 @@ import org.elasticsearch.index.shard.ShardId; +import java.util.ArrayList; +import java.util.List; + /** - * Allows to iterate over a set of shard instances (routing) within a shard id group. + * The {@link ShardIterator} is a {@link ShardsIterator} which iterates all + * shards of a given {@link ShardId shard id} */ -public interface ShardIterator extends ShardsIterator, Comparable { +public final class ShardIterator extends PlainShardsIterator implements Comparable { + + private final ShardId shardId; + + public static ShardIterator allSearchableShards(ShardIterator shardIterator) { + return new ShardIterator(shardIterator.shardId(), shardsThatCanHandleSearches(shardIterator)); + } + + private static List shardsThatCanHandleSearches(ShardIterator iterator) { + final List shardsThatCanHandleSearches = new ArrayList<>(iterator.size()); + for (ShardRouting shardRouting : iterator) { + if (shardRouting.isSearchable()) { + shardsThatCanHandleSearches.add(shardRouting); + } + } + return shardsThatCanHandleSearches; + } /** - * The shard id this group relates to. + * Creates a {@link ShardIterator} instance that iterates all shards + * of a given shardId. + * + * @param shardId shard id of the group + * @param shards shards to iterate */ - ShardId shardId(); + public ShardIterator(ShardId shardId, List shards) { + super(shards); + this.shardId = shardId; + } /** - * Resets the iterator. + * The shard id this group relates to. */ + public ShardId shardId() { + return this.shardId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ShardIterator that = (ShardIterator) o; + return shardId.equals(that.shardId()); + } + + @Override + public int hashCode() { + return shardId.hashCode(); + } + @Override - void reset(); + public int compareTo(ShardIterator o) { + return shardId.compareTo(o.shardId()); + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java index 157d28e61057c..2ce349e2d3b61 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java @@ -11,7 +11,6 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RecoverySource.ExistingStoreRecoverySource; import org.elasticsearch.cluster.routing.RecoverySource.PeerRecoverySource; @@ -329,7 +328,7 @@ public ShardId shardId() { * A shard iterator with just this shard in it. */ public ShardIterator shardsIt() { - return new PlainShardIterator(shardId, List.of(this)); + return new ShardIterator(shardId, List.of(this)); } public ShardRouting(ShardId shardId, StreamInput in) throws IOException { @@ -935,7 +934,7 @@ public boolean isPromotableToPrimary() { } /** - * Determine if role searchable. Consumers should prefer {@link IndexRoutingTable#readyForSearch(ClusterState)} to determine if an index + * Determine if role searchable. Consumers should prefer {@link IndexRoutingTable#readyForSearch()} to determine if an index * is ready to be searched. */ public boolean isSearchable() { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/ShardsIterator.java b/server/src/main/java/org/elasticsearch/cluster/routing/ShardsIterator.java index 87be723edaeec..c78d60af493e3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/ShardsIterator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/ShardsIterator.java @@ -8,26 +8,12 @@ */ package org.elasticsearch.cluster.routing; -import org.elasticsearch.common.util.Countable; - import java.util.List; /** * Allows to iterate over unrelated shards. */ -public interface ShardsIterator extends Iterable, Countable { - - /** - * Resets the iterator to its initial state. - */ - void reset(); - - /** - * The number of shard routing instances. - * - * @return number of shard routing instances in this iterator - */ - int size(); +public interface ShardsIterator extends Iterable { /** * The number of active shard routing instances @@ -41,13 +27,6 @@ public interface ShardsIterator extends Iterable, Countable { */ ShardRouting nextOrNull(); - /** - * Return the number of shards remaining in this {@link ShardsIterator} - * - * @return number of shard remaining - */ - int remaining(); - @Override int hashCode(); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryService.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryService.java new file mode 100644 index 0000000000000..2e45938f3d2c0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryService.java @@ -0,0 +1,194 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.routing.allocation.allocator; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.threadpool.Scheduler; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.ArrayList; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.atomic.AtomicReference; + +/** + * Manages the lifecycle of a series of {@link BalancingRoundSummary} results from allocation balancing rounds and creates reports thereof. + * Reporting balancer round summary results will provide information with which to do cost-benefit analyses of the work that shard + * allocation rebalancing executes. + * + * Any successfully added summary via {@link #addBalancerRoundSummary(BalancingRoundSummary)} will eventually be collected/drained and + * reported. This should still be done in the event of the node stepping down from master, on the assumption that all summaries are only + * added while master and should be drained for reporting. There is no need to start/stop this service with master election/stepdown because + * balancer rounds will no longer be supplied when not master. It will simply drain the last summaries and then have nothing more to do. + * This does have the tradeoff that non-master nodes will run a task to check for summaries to report every + * {@link #BALANCER_ROUND_SUMMARIES_LOG_INTERVAL_SETTING} seconds. + */ +public class AllocationBalancingRoundSummaryService { + + /** Turns on or off balancing round summary reporting. */ + public static final Setting ENABLE_BALANCER_ROUND_SUMMARIES_SETTING = Setting.boolSetting( + "cluster.routing.allocation.desired_balance.enable_balancer_round_summaries", + false, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** Controls how frequently in time balancer round summaries are logged. */ + public static final Setting BALANCER_ROUND_SUMMARIES_LOG_INTERVAL_SETTING = Setting.timeSetting( + "cluster.routing.allocation.desired_balance.balanace_round_summaries_interval", + TimeValue.timeValueSeconds(10), + TimeValue.ZERO, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + private static final Logger logger = LogManager.getLogger(AllocationBalancingRoundSummaryService.class); + private final ThreadPool threadPool; + private volatile boolean enableBalancerRoundSummaries; + private volatile TimeValue summaryReportInterval; + + /** + * A concurrency-safe list of balancing round summaries. Balancer rounds are run and added here serially, so the queue will naturally + * progress from newer to older results. + */ + private final ConcurrentLinkedQueue summaries = new ConcurrentLinkedQueue<>(); + + /** This reference is set when reporting is scheduled. If it is null, then reporting is inactive. */ + private final AtomicReference scheduledReportFuture = new AtomicReference<>(); + + public AllocationBalancingRoundSummaryService(ThreadPool threadPool, ClusterSettings clusterSettings) { + this.threadPool = threadPool; + // Initialize the local setting values to avoid a null access when ClusterSettings#initializeAndWatch is called on each setting: + // updating enableBalancerRoundSummaries accesses summaryReportInterval. + this.enableBalancerRoundSummaries = clusterSettings.get(ENABLE_BALANCER_ROUND_SUMMARIES_SETTING); + this.summaryReportInterval = clusterSettings.get(BALANCER_ROUND_SUMMARIES_LOG_INTERVAL_SETTING); + + clusterSettings.initializeAndWatch(ENABLE_BALANCER_ROUND_SUMMARIES_SETTING, value -> { + this.enableBalancerRoundSummaries = value; + updateBalancingRoundSummaryReporting(); + }); + clusterSettings.initializeAndWatch(BALANCER_ROUND_SUMMARIES_LOG_INTERVAL_SETTING, value -> { + // The new value will get picked up the next time that the summary report task reschedules itself on the thread pool. + this.summaryReportInterval = value; + }); + } + + /** + * Adds the summary of a balancing round. If summaries are enabled, this will eventually be reported (logging, etc.). If balancer round + * summaries are not enabled in the cluster, then the summary is immediately discarded (so as not to fill up a data structure that will + * never be drained). + */ + public void addBalancerRoundSummary(BalancingRoundSummary summary) { + if (enableBalancerRoundSummaries == false) { + return; + } + + summaries.add(summary); + } + + /** + * Reports on all the balancer round summaries added since the last call to this method, if there are any. Then reschedules itself per + * the {@link #ENABLE_BALANCER_ROUND_SUMMARIES_SETTING} and {@link #BALANCER_ROUND_SUMMARIES_LOG_INTERVAL_SETTING} settings. + */ + private void reportSummariesAndThenReschedule() { + drainAndReportSummaries(); + rescheduleReporting(); + } + + /** + * Drains all the waiting balancer round summaries (if there are any) and reports them. + */ + private void drainAndReportSummaries() { + var combinedSummaries = drainSummaries(); + if (combinedSummaries == CombinedBalancingRoundSummary.EMPTY_RESULTS) { + return; + } + + logger.info("Balancing round summaries: " + combinedSummaries); + } + + /** + * Returns a combined summary of all unreported allocation round summaries: may summarize a single balancer round, multiple, or none. + * + * @return {@link CombinedBalancingRoundSummary#EMPTY_RESULTS} if there are no balancing round summaries waiting to be reported. + */ + private CombinedBalancingRoundSummary drainSummaries() { + ArrayList batchOfSummaries = new ArrayList<>(); + while (summaries.isEmpty() == false) { + batchOfSummaries.add(summaries.poll()); + } + return CombinedBalancingRoundSummary.combine(batchOfSummaries); + } + + /** + * Schedules a periodic task to drain and report the latest balancer round summaries, or cancels the already running task, if the latest + * setting values dictate a change to enable or disable reporting. A change to {@link #BALANCER_ROUND_SUMMARIES_LOG_INTERVAL_SETTING} + * will only take effect when the periodic task completes and reschedules itself. + */ + private void updateBalancingRoundSummaryReporting() { + if (this.enableBalancerRoundSummaries) { + startReporting(this.summaryReportInterval); + } else { + cancelReporting(); + // Clear the data structure so that we don't retain unnecessary memory. + drainSummaries(); + } + } + + /** + * Schedules a reporting task, if one is not already scheduled. The reporting task will reschedule itself going forward. + */ + private void startReporting(TimeValue intervalValue) { + if (scheduledReportFuture.get() == null) { + scheduleReporting(intervalValue); + } + } + + /** + * Cancels the future reporting task and resets {@link #scheduledReportFuture} to null. + * + * Note that this is best-effort: cancellation can race with {@link #rescheduleReporting}. But that is okay because the subsequent + * {@link #rescheduleReporting} will use the latest settings and choose to cancel reporting if appropriate. + */ + private void cancelReporting() { + var future = scheduledReportFuture.getAndSet(null); + if (future != null) { + future.cancel(); + } + } + + private void scheduleReporting(TimeValue intervalValue) { + scheduledReportFuture.set( + threadPool.schedule(this::reportSummariesAndThenReschedule, intervalValue, threadPool.executor(ThreadPool.Names.GENERIC)) + ); + } + + /** + * Looks at the given setting values and decides whether to schedule another reporting task or cancel reporting now. + */ + private void rescheduleReporting() { + if (this.enableBalancerRoundSummaries) { + // It's possible that this races with a concurrent call to cancel reporting, but that's okay. The next rescheduleReporting call + // will check the latest settings and cancel. + scheduleReporting(this.summaryReportInterval); + } else { + cancelReporting(); + } + } + + // @VisibleForTesting + protected void verifyNumberOfSummaries(int numberOfSummaries) { + assert numberOfSummaries == summaries.size(); + } + +} diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancingRoundSummary.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancingRoundSummary.java new file mode 100644 index 0000000000000..2662825eff48e --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancingRoundSummary.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.routing.allocation.allocator; + +/** + * Summarizes the impact to the cluster as a result of a rebalancing round. + * + * @param numberOfShardsToMove The number of shard moves required to move from the previous desired balance to the new one. + */ +public record BalancingRoundSummary(long numberOfShardsToMove) { + + @Override + public String toString() { + return "BalancingRoundSummary{" + "numberOfShardsToMove=" + numberOfShardsToMove + '}'; + } + +} diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/CombinedBalancingRoundSummary.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/CombinedBalancingRoundSummary.java new file mode 100644 index 0000000000000..78fa1f6c5f5f5 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/CombinedBalancingRoundSummary.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.routing.allocation.allocator; + +import java.util.List; + +/** + * Holds combined {@link BalancingRoundSummary} results. Essentially holds a list of the balancing events and the summed up changes + * across all those events: what allocation work was done across some period of time. + * TODO: WIP ES-10341 + * + * Note that each balancing round summary is the difference between, at the time, latest desired balance and the previous desired balance. + * Each summary represents a step towards the next desired balance, which is based on presuming the previous desired balance is reached. So + * combining them is roughly the difference between the first summary's previous desired balance and the last summary's latest desired + * balance. + * + * @param numberOfBalancingRounds How many balancing round summaries are combined in this report. + * @param numberOfShardMoves The sum of shard moves for each balancing round being combined into a single summary. + */ +public record CombinedBalancingRoundSummary(int numberOfBalancingRounds, long numberOfShardMoves) { + + public static final CombinedBalancingRoundSummary EMPTY_RESULTS = new CombinedBalancingRoundSummary(0, 0); + + public static CombinedBalancingRoundSummary combine(List summaries) { + if (summaries.isEmpty()) { + return EMPTY_RESULTS; + } + + int numSummaries = 0; + long numberOfShardMoves = 0; + for (BalancingRoundSummary summary : summaries) { + ++numSummaries; + numberOfShardMoves += summary.numberOfShardsToMove(); + } + return new CombinedBalancingRoundSummary(numSummaries, numberOfShardMoves); + } + +} diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java index 16cbf41ee1bfa..202582839f1d9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java @@ -24,8 +24,7 @@ * strictly increasing sequence number. A new master term restarts the index values from zero. The balancer, * which runs async to reroute, uses the latest request's data to compute the desired balance. * @param assignments a set of the (persistent) node IDs to which each {@link ShardId} should be allocated - * @param weightsPerNode The node weights calculated based on - * {@link org.elasticsearch.cluster.routing.allocation.allocator.WeightFunction#calculateNodeWeight} + * @param weightsPerNode The node weights calculated based on {@link WeightFunction#calculateNodeWeight} */ public record DesiredBalance( long lastConvergedIndex, diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetrics.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetrics.java index fddf9267cdbb1..0eb4d89bd6d3e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetrics.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetrics.java @@ -11,6 +11,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.NodeAllocationStatsAndWeightsCalculator.NodeAllocationStatsAndWeight; +import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.telemetry.metric.DoubleWithAttributes; import org.elasticsearch.telemetry.metric.LongWithAttributes; import org.elasticsearch.telemetry.metric.MeterRegistry; @@ -28,17 +29,28 @@ */ public class DesiredBalanceMetrics { + /** + * @param unassignedShards Shards that are not assigned to any node. + * @param totalAllocations Shards that are assigned to a node. + * @param undesiredAllocationsExcludingShuttingDownNodes Shards that are assigned to a node but must move to alleviate a resource + * constraint per the {@link AllocationDeciders}. Excludes shards that must move + * because of a node shutting down. + */ public record AllocationStats(long unassignedShards, long totalAllocations, long undesiredAllocationsExcludingShuttingDownNodes) {} public record NodeWeightStats(long shardCount, double diskUsageInBytes, double writeLoad, double nodeWeight) {} - public static final DesiredBalanceMetrics NOOP = new DesiredBalanceMetrics(MeterRegistry.NOOP); - + // Reconciliation metrics. + /** See {@link #unassignedShards} */ public static final String UNASSIGNED_SHARDS_METRIC_NAME = "es.allocator.desired_balance.shards.unassigned.current"; + /** See {@link #totalAllocations} */ public static final String TOTAL_SHARDS_METRIC_NAME = "es.allocator.desired_balance.shards.current"; + /** See {@link #undesiredAllocationsExcludingShuttingDownNodes} */ public static final String UNDESIRED_ALLOCATION_COUNT_METRIC_NAME = "es.allocator.desired_balance.allocations.undesired.current"; + /** {@link #UNDESIRED_ALLOCATION_COUNT_METRIC_NAME} / {@link #TOTAL_SHARDS_METRIC_NAME} */ public static final String UNDESIRED_ALLOCATION_RATIO_METRIC_NAME = "es.allocator.desired_balance.allocations.undesired.ratio"; + // Desired balance node metrics. public static final String DESIRED_BALANCE_NODE_WEIGHT_METRIC_NAME = "es.allocator.desired_balance.allocations.node_weight.current"; public static final String DESIRED_BALANCE_NODE_SHARD_COUNT_METRIC_NAME = "es.allocator.desired_balance.allocations.node_shard_count.current"; @@ -47,6 +59,7 @@ public record NodeWeightStats(long shardCount, double diskUsageInBytes, double w public static final String DESIRED_BALANCE_NODE_DISK_USAGE_METRIC_NAME = "es.allocator.desired_balance.allocations.node_disk_usage_bytes.current"; + // Node weight metrics. public static final String CURRENT_NODE_WEIGHT_METRIC_NAME = "es.allocator.allocations.node.weight.current"; public static final String CURRENT_NODE_SHARD_COUNT_METRIC_NAME = "es.allocator.allocations.node.shard_count.current"; public static final String CURRENT_NODE_WRITE_LOAD_METRIC_NAME = "es.allocator.allocations.node.write_load.current"; @@ -59,6 +72,7 @@ public record NodeWeightStats(long shardCount, double diskUsageInBytes, double w public static final AllocationStats EMPTY_ALLOCATION_STATS = new AllocationStats(-1, -1, -1); private volatile boolean nodeIsMaster = false; + /** * Number of unassigned shards during last reconciliation */ @@ -70,9 +84,10 @@ public record NodeWeightStats(long shardCount, double diskUsageInBytes, double w private volatile long totalAllocations; /** - * Number of assigned shards during last reconciliation that are not allocated on desired node and need to be moved + * Number of assigned shards during last reconciliation that are not allocated on a desired node and need to be moved. + * This excludes shards that must be reassigned due to a shutting down node. */ - private volatile long undesiredAllocations; + private volatile long undesiredAllocationsExcludingShuttingDownNodes; private final AtomicReference> weightStatsPerNodeRef = new AtomicReference<>(Map.of()); private final AtomicReference> allocationStatsPerNodeRef = new AtomicReference<>( @@ -89,7 +104,7 @@ public void updateMetrics( if (allocationStats != EMPTY_ALLOCATION_STATS) { this.unassignedShards = allocationStats.unassignedShards; this.totalAllocations = allocationStats.totalAllocations; - this.undesiredAllocations = allocationStats.undesiredAllocationsExcludingShuttingDownNodes; + this.undesiredAllocationsExcludingShuttingDownNodes = allocationStats.undesiredAllocationsExcludingShuttingDownNodes; } weightStatsPerNodeRef.set(weightStatsPerNode); allocationStatsPerNodeRef.set(nodeAllocationStats); @@ -107,7 +122,7 @@ public DesiredBalanceMetrics(MeterRegistry meterRegistry) { UNDESIRED_ALLOCATION_COUNT_METRIC_NAME, "Total number of shards allocated on undesired nodes excluding shutting down nodes", "{shard}", - this::getUndesiredAllocationsMetrics + this::getUndesiredAllocationsExcludingShuttingDownNodesMetrics ); meterRegistry.registerDoublesGauge( UNDESIRED_ALLOCATION_RATIO_METRIC_NAME, @@ -115,6 +130,7 @@ public DesiredBalanceMetrics(MeterRegistry meterRegistry) { "1", this::getUndesiredAllocationsRatioMetrics ); + meterRegistry.registerDoublesGauge( DESIRED_BALANCE_NODE_WEIGHT_METRIC_NAME, "Weight of nodes in the computed desired balance", @@ -133,18 +149,19 @@ public DesiredBalanceMetrics(MeterRegistry meterRegistry) { "bytes", this::getDesiredBalanceNodeDiskUsageMetrics ); - meterRegistry.registerDoublesGauge( - CURRENT_NODE_WEIGHT_METRIC_NAME, - "The weight of nodes based on the current allocation state", - "unit", - this::getCurrentNodeWeightMetrics - ); meterRegistry.registerLongsGauge( DESIRED_BALANCE_NODE_SHARD_COUNT_METRIC_NAME, "Shard count of nodes in the computed desired balance", "unit", this::getDesiredBalanceNodeShardCountMetrics ); + + meterRegistry.registerDoublesGauge( + CURRENT_NODE_WEIGHT_METRIC_NAME, + "The weight of nodes based on the current allocation state", + "unit", + this::getCurrentNodeWeightMetrics + ); meterRegistry.registerDoublesGauge( CURRENT_NODE_WRITE_LOAD_METRIC_NAME, "The current write load of nodes", @@ -194,7 +211,7 @@ public long totalAllocations() { } public long undesiredAllocations() { - return undesiredAllocations; + return undesiredAllocationsExcludingShuttingDownNodes; } private List getUnassignedShardsMetrics() { @@ -330,8 +347,8 @@ private List getTotalAllocationsMetrics() { return getIfPublishing(totalAllocations); } - private List getUndesiredAllocationsMetrics() { - return getIfPublishing(undesiredAllocations); + private List getUndesiredAllocationsExcludingShuttingDownNodesMetrics() { + return getIfPublishing(undesiredAllocationsExcludingShuttingDownNodes); } private List getIfPublishing(long value) { @@ -344,7 +361,7 @@ private List getIfPublishing(long value) { private List getUndesiredAllocationsRatioMetrics() { if (nodeIsMaster) { var total = totalAllocations; - var undesired = undesiredAllocations; + var undesired = undesiredAllocationsExcludingShuttingDownNodes; return List.of(new DoubleWithAttributes(total != 0 ? (double) undesired / total : 0.0)); } return List.of(); @@ -357,7 +374,7 @@ private List getUndesiredAllocationsRatioMetrics() { public void zeroAllMetrics() { unassignedShards = 0; totalAllocations = 0; - undesiredAllocations = 0; + undesiredAllocationsExcludingShuttingDownNodes = 0; weightStatsPerNodeRef.set(Map.of()); allocationStatsPerNodeRef.set(Map.of()); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java index 909a7a7a99a61..dd7216758c3b7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java @@ -20,10 +20,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.UnassignedInfo.AllocationStatus; -import org.elasticsearch.cluster.routing.allocation.NodeAllocationStatsAndWeightsCalculator; -import org.elasticsearch.cluster.routing.allocation.NodeAllocationStatsAndWeightsCalculator.NodeAllocationStatsAndWeight; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceMetrics.AllocationStats; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.ClusterSettings; @@ -36,9 +33,7 @@ import org.elasticsearch.threadpool.ThreadPool; import java.util.Comparator; -import java.util.HashMap; import java.util.Iterator; -import java.util.Map; import java.util.Set; import java.util.function.BiFunction; import java.util.stream.Collectors; @@ -82,16 +77,8 @@ public class DesiredBalanceReconciler { private double undesiredAllocationsLogThreshold; private final NodeAllocationOrdering allocationOrdering = new NodeAllocationOrdering(); private final NodeAllocationOrdering moveOrdering = new NodeAllocationOrdering(); - private final DesiredBalanceMetrics desiredBalanceMetrics; - private final NodeAllocationStatsAndWeightsCalculator nodeAllocationStatsAndWeightsCalculator; - - public DesiredBalanceReconciler( - ClusterSettings clusterSettings, - ThreadPool threadPool, - DesiredBalanceMetrics desiredBalanceMetrics, - NodeAllocationStatsAndWeightsCalculator nodeAllocationStatsAndWeightsCalculator - ) { - this.desiredBalanceMetrics = desiredBalanceMetrics; + + public DesiredBalanceReconciler(ClusterSettings clusterSettings, ThreadPool threadPool) { this.undesiredAllocationLogInterval = new FrequencyCappedAction( threadPool.relativeTimeInMillisSupplier(), TimeValue.timeValueMinutes(5) @@ -101,7 +88,6 @@ public DesiredBalanceReconciler( UNDESIRED_ALLOCATIONS_LOG_THRESHOLD_SETTING, value -> this.undesiredAllocationsLogThreshold = value ); - this.nodeAllocationStatsAndWeightsCalculator = nodeAllocationStatsAndWeightsCalculator; } /** @@ -110,12 +96,13 @@ public DesiredBalanceReconciler( * @param desiredBalance The new desired cluster shard allocation * @param allocation Cluster state information with which to make decisions, contains routing table metadata that will be modified to * reach the given desired balance. + * @return {@link DesiredBalanceMetrics.AllocationStats} for this round of reconciliation changes. */ - public void reconcile(DesiredBalance desiredBalance, RoutingAllocation allocation) { + public DesiredBalanceMetrics.AllocationStats reconcile(DesiredBalance desiredBalance, RoutingAllocation allocation) { var nodeIds = allocation.routingNodes().getAllNodeIds(); allocationOrdering.retainNodes(nodeIds); moveOrdering.retainNodes(nodeIds); - new Reconciliation(desiredBalance, allocation).run(); + return new Reconciliation(desiredBalance, allocation).run(); } public void clear() { @@ -123,6 +110,11 @@ public void clear() { moveOrdering.clear(); } + /** + * Handles updating the {@code RoutingNodes} to reflect the next steps towards the new {@code DesiredBalance}. Updates are limited by + * throttling (there are limits on the number of concurrent shard moves) or resource constraints (some shard moves might not be + * immediately possible until other shards move first). + */ private class Reconciliation { private final DesiredBalance desiredBalance; @@ -135,7 +127,7 @@ private class Reconciliation { this.routingNodes = allocation.routingNodes(); } - void run() { + DesiredBalanceMetrics.AllocationStats run() { try (var ignored = allocation.withReconcilingFlag()) { logger.debug("Reconciling desired balance for [{}]", desiredBalance.lastConvergedIndex()); @@ -144,13 +136,13 @@ void run() { // no data nodes, so fail allocation to report red health failAllocationOfNewPrimaries(allocation); logger.trace("no nodes available, nothing to reconcile"); - return; + return DesiredBalanceMetrics.EMPTY_ALLOCATION_STATS; } if (desiredBalance.assignments().isEmpty()) { // no desired state yet but it is on its way and we'll reroute again when it is ready logger.trace("desired balance is empty, nothing to reconcile"); - return; + return DesiredBalanceMetrics.EMPTY_ALLOCATION_STATS; } // compute next moves towards current desired balance: @@ -163,38 +155,22 @@ void run() { // 2. move any shards that cannot remain where they are logger.trace("Reconciler#moveShards"); moveShards(); + // 3. move any other shards that are desired elsewhere + // This is the rebalancing work. The previous calls were necessary, to assign unassigned shard copies, and move shards that + // violate resource thresholds. Now we run moves to improve the relative node resource loads. logger.trace("Reconciler#balance"); - var allocationStats = balance(); + DesiredBalanceMetrics.AllocationStats allocationStats = balance(); logger.debug("Reconciliation is complete"); - - updateDesireBalanceMetrics(allocationStats); - } - } - - private void updateDesireBalanceMetrics(AllocationStats allocationStats) { - var nodesStatsAndWeights = nodeAllocationStatsAndWeightsCalculator.nodesAllocationStatsAndWeights( - allocation.metadata(), - allocation.routingNodes(), - allocation.clusterInfo(), - desiredBalance - ); - Map filteredNodeAllocationStatsAndWeights = new HashMap<>( - nodesStatsAndWeights.size() - ); - for (var nodeStatsAndWeight : nodesStatsAndWeights.entrySet()) { - var node = allocation.nodes().get(nodeStatsAndWeight.getKey()); - if (node != null) { - filteredNodeAllocationStatsAndWeights.put(node, nodeStatsAndWeight.getValue()); - } + return allocationStats; } - desiredBalanceMetrics.updateMetrics(allocationStats, desiredBalance.weightsPerNode(), filteredNodeAllocationStatsAndWeights); } + /** + * Checks whether every shard is either assigned or ignored. Expected to be called after {@link #allocateUnassigned()}. + */ private boolean allocateUnassignedInvariant() { - // after allocateUnassigned, every shard must be either assigned or ignored - assert routingNodes.unassigned().isEmpty(); final var shardCounts = allocation.metadata().stream().filter(indexMetadata -> @@ -263,45 +239,55 @@ private void allocateUnassigned() { } /* + * Create some comparators to sort the unassigned shard copies in priority to allocate order. * TODO: We could be smarter here and group the shards by index and then * use the sorter to save some iterations. */ - final PriorityComparator secondaryComparator = PriorityComparator.getAllocationComparator(allocation); - final Comparator comparator = (o1, o2) -> { + final PriorityComparator indexPriorityComparator = PriorityComparator.getAllocationComparator(allocation); + final Comparator shardAllocationPriorityComparator = (o1, o2) -> { + // Prioritize assigning a primary shard copy, if one is a primary and the other is not. if (o1.primary() ^ o2.primary()) { return o1.primary() ? -1 : 1; } + + // Then order shards in the same index arbitrarily by shard ID. if (o1.getIndexName().compareTo(o2.getIndexName()) == 0) { return o1.getId() - o2.getId(); } + + // Lastly, prioritize system indices, then use index priority of non-system indices, then by age, etc. + // // this comparator is more expensive than all the others up there // that's why it's added last even though it could be easier to read // if we'd apply it earlier. this comparator will only differentiate across // indices all shards of the same index is treated equally. - final int secondary = secondaryComparator.compare(o1, o2); - assert secondary != 0 : "Index names are equal, should be returned early."; - return secondary; + final int secondaryComparison = indexPriorityComparator.compare(o1, o2); + assert secondaryComparison != 0 : "Index names are equal, should be returned early."; + return secondaryComparison; }; + /* * we use 2 arrays and move replicas to the second array once we allocated an identical * replica in the current iteration to make sure all indices get allocated in the same manner. - * The arrays are sorted by primaries first and then by index and shard ID so a 2 indices with + * The arrays are sorted by primaries first and then by index and shard ID so 2 indices with * 2 replica and 1 shard would look like: * [(0,P,IDX1), (0,P,IDX2), (0,R,IDX1), (0,R,IDX1), (0,R,IDX2), (0,R,IDX2)] * if we allocate for instance (0, R, IDX1) we move the second replica to the secondary array and proceed with * the next replica. If we could not find a node to allocate (0,R,IDX1) we move all it's replicas to ignoreUnassigned. */ - ShardRouting[] primary = unassigned.drain(); - ShardRouting[] secondary = new ShardRouting[primary.length]; - int secondaryLength = 0; - int primaryLength = primary.length; - ArrayUtil.timSort(primary, comparator); + ShardRouting[] orderedShardAllocationList = unassigned.drain(); + ShardRouting[] deferredShardAllocationList = new ShardRouting[orderedShardAllocationList.length]; + int deferredShardAllocationListLength = 0; + int orderedShardAllocationListLength = orderedShardAllocationList.length; + ArrayUtil.timSort(orderedShardAllocationList, shardAllocationPriorityComparator); do { - nextShard: for (int i = 0; i < primaryLength; i++) { - final var shard = primary[i]; + nextShard: for (int i = 0; i < orderedShardAllocationListLength; i++) { + final var shard = orderedShardAllocationList[i]; final var assignment = desiredBalance.getAssignment(shard.shardId()); + // An ignored shard copy is one that has no desired balance assignment. final boolean ignored = assignment == null || isIgnored(routingNodes, shard, assignment); + AllocationStatus unallocatedStatus; if (ignored) { unallocatedStatus = AllocationStatus.NO_ATTEMPT; @@ -331,8 +317,13 @@ private void allocateUnassigned() { if (shard.primary() == false) { // copy over the same replica shards to the secondary array so they will get allocated // in a subsequent iteration, allowing replicas of other shards to be allocated first - while (i < primaryLength - 1 && comparator.compare(primary[i], primary[i + 1]) == 0) { - secondary[secondaryLength++] = primary[++i]; + while (i < orderedShardAllocationListLength - 1 + && shardAllocationPriorityComparator.compare( + orderedShardAllocationList[i], + orderedShardAllocationList[i + 1] + ) == 0) { + deferredShardAllocationList[deferredShardAllocationListLength++] = + orderedShardAllocationList[++i]; } } continue nextShard; @@ -352,18 +343,23 @@ private void allocateUnassigned() { logger.debug("No eligible node found to assign shard [{}]", shard); unassigned.ignoreShard(shard, unallocatedStatus, allocation.changes()); if (shard.primary() == false) { - // we could not allocate it and we are a replica - check if we can ignore the other replicas - while (i < primaryLength - 1 && comparator.compare(primary[i], primary[i + 1]) == 0) { - unassigned.ignoreShard(primary[++i], unallocatedStatus, allocation.changes()); + // We could not allocate the shard copy and the copy is a replica: check if we can ignore the other unassigned + // replicas. + while (i < orderedShardAllocationListLength - 1 + && shardAllocationPriorityComparator.compare( + orderedShardAllocationList[i], + orderedShardAllocationList[i + 1] + ) == 0) { + unassigned.ignoreShard(orderedShardAllocationList[++i], unallocatedStatus, allocation.changes()); } } } - primaryLength = secondaryLength; - ShardRouting[] tmp = primary; - primary = secondary; - secondary = tmp; - secondaryLength = 0; - } while (primaryLength > 0); + ShardRouting[] tmp = orderedShardAllocationList; + orderedShardAllocationList = deferredShardAllocationList; + deferredShardAllocationList = tmp; + orderedShardAllocationListLength = deferredShardAllocationListLength; + deferredShardAllocationListLength = 0; + } while (orderedShardAllocationListLength > 0); } private final class NodeIdsIterator implements Iterator { @@ -371,11 +367,7 @@ private final class NodeIdsIterator implements Iterator { private final ShardRouting shard; private final RoutingNodes routingNodes; /** - * Contains the source of the nodeIds used for shard assignment. It could be: - * * desired - when using desired nodes - * * forced initial allocation - when initial allocation is forced to certain nodes by shrink/split/clone index operation - * * fallback - when assigning the primary shard is temporarily not possible on desired nodes, - * and it is assigned elsewhere in the cluster + * Contains the source of the nodeIds used for shard assignment. */ private NodeIdSource source; private Iterator nodeIds; @@ -431,11 +423,21 @@ public String next() { } private enum NodeIdSource { + // Using desired nodes. DESIRED, + // Initial allocation is forced to certain nodes by shrink/split/clone index operation. FORCED_INITIAL_ALLOCATION, + // Assigning the primary shard is temporarily not possible on desired nodes, and it is assigned elsewhere in the cluster. FALLBACK; } + /** + * Checks whether the {@code shard} copy has been assigned to a node or not in {@code assignment}. + * @param routingNodes The current routing information + * @param shard A particular shard copy + * @param assignment The assignments for shard primary and replica copies + * @return Whether the shard has a node assignment. + */ private boolean isIgnored(RoutingNodes routingNodes, ShardRouting shard, ShardAssignment assignment) { if (assignment.ignored() == 0) { // no shards are ignored @@ -512,7 +514,8 @@ private void moveShards() { } } - private AllocationStats balance() { + private DesiredBalanceMetrics.AllocationStats balance() { + // Check if rebalancing is disabled. if (allocation.deciders().canRebalance(allocation).type() != Decision.Type.YES) { return DesiredBalanceMetrics.EMPTY_ALLOCATION_STATS; } @@ -581,8 +584,11 @@ private AllocationStats balance() { } maybeLogUndesiredAllocationsWarning(totalAllocations, undesiredAllocationsExcludingShuttingDownNodes, routingNodes.size()); - - return new AllocationStats(unassignedShards, totalAllocations, undesiredAllocationsExcludingShuttingDownNodes); + return new DesiredBalanceMetrics.AllocationStats( + unassignedShards, + totalAllocations, + undesiredAllocationsExcludingShuttingDownNodes + ); } private void maybeLogUndesiredAllocationsWarning(int totalAllocations, int undesiredAllocations, int nodeCount) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java index 2c73a27ad3418..cb3f3b306d806 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.AllocationService.RerouteStrategy; @@ -39,6 +40,7 @@ import java.util.ArrayList; import java.util.Comparator; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -87,7 +89,12 @@ public class DesiredBalanceShardsAllocator implements ShardsAllocator { private final AtomicReference currentDesiredBalanceRef = new AtomicReference<>(DesiredBalance.NOT_MASTER); private volatile boolean resetCurrentDesiredBalance = false; private final Set processedNodeShutdowns = new HashSet<>(); + private final NodeAllocationStatsAndWeightsCalculator nodeAllocationStatsAndWeightsCalculator; private final DesiredBalanceMetrics desiredBalanceMetrics; + /** + * Manages balancer round results in order to report on the balancer activity in a configurable manner. + */ + private final AllocationBalancingRoundSummaryService balancerRoundSummaryService; // stats protected final CounterMetric computationsSubmitted = new CounterMetric(); @@ -132,16 +139,13 @@ public DesiredBalanceShardsAllocator( NodeAllocationStatsAndWeightsCalculator nodeAllocationStatsAndWeightsCalculator ) { this.desiredBalanceMetrics = new DesiredBalanceMetrics(telemetryProvider.getMeterRegistry()); + this.nodeAllocationStatsAndWeightsCalculator = nodeAllocationStatsAndWeightsCalculator; + this.balancerRoundSummaryService = new AllocationBalancingRoundSummaryService(threadPool, clusterService.getClusterSettings()); this.delegateAllocator = delegateAllocator; this.threadPool = threadPool; this.reconciler = reconciler; this.desiredBalanceComputer = desiredBalanceComputer; - this.desiredBalanceReconciler = new DesiredBalanceReconciler( - clusterService.getClusterSettings(), - threadPool, - desiredBalanceMetrics, - nodeAllocationStatsAndWeightsCalculator - ); + this.desiredBalanceReconciler = new DesiredBalanceReconciler(clusterService.getClusterSettings(), threadPool); this.desiredBalanceComputation = new ContinuousComputation<>(threadPool.generic()) { @Override @@ -320,6 +324,7 @@ private void setCurrentDesiredBalance(DesiredBalance newDesiredBalance) { } if (currentDesiredBalanceRef.compareAndSet(oldDesiredBalance, newDesiredBalance)) { + balancerRoundSummaryService.addBalancerRoundSummary(calculateBalancingRoundSummary(oldDesiredBalance, newDesiredBalance)); if (logger.isTraceEnabled()) { var diff = DesiredBalance.hasChanges(oldDesiredBalance, newDesiredBalance) ? "Diff: " + DesiredBalance.humanReadableDiff(oldDesiredBalance, newDesiredBalance) @@ -334,6 +339,17 @@ private void setCurrentDesiredBalance(DesiredBalance newDesiredBalance) { } } + /** + * Summarizes the work required to move from an old to new desired balance shard allocation. + */ + private BalancingRoundSummary calculateBalancingRoundSummary(DesiredBalance oldDesiredBalance, DesiredBalance newDesiredBalance) { + return new BalancingRoundSummary(DesiredBalance.shardMovements(oldDesiredBalance, newDesiredBalance)); + } + + /** + * Submits the desired balance to be reconciled (applies the desired changes to the routing table) and creates and publishes a new + * cluster state. The data nodes will receive and apply the new cluster state to start/move/remove shards. + */ protected void submitReconcileTask(DesiredBalance desiredBalance) { masterServiceTaskQueue.submitTask("reconcile-desired-balance", new ReconcileDesiredBalanceTask(desiredBalance), null); } @@ -344,7 +360,11 @@ protected void reconcile(DesiredBalance desiredBalance, RoutingAllocation alloca } else { logger.debug("Reconciling desired balance for [{}]", desiredBalance.lastConvergedIndex()); } - recordTime(cumulativeReconciliationTime, () -> desiredBalanceReconciler.reconcile(desiredBalance, allocation)); + recordTime(cumulativeReconciliationTime, () -> { + DesiredBalanceMetrics.AllocationStats allocationStats = desiredBalanceReconciler.reconcile(desiredBalance, allocation); + updateDesireBalanceMetrics(desiredBalance, allocation, allocationStats); + }); + if (logger.isTraceEnabled()) { logger.trace("Reconciled desired balance: {}", desiredBalance); } else { @@ -378,6 +398,28 @@ public void resetDesiredBalance() { resetCurrentDesiredBalance = true; } + private void updateDesireBalanceMetrics( + DesiredBalance desiredBalance, + RoutingAllocation routingAllocation, + DesiredBalanceMetrics.AllocationStats allocationStats + ) { + var nodesStatsAndWeights = nodeAllocationStatsAndWeightsCalculator.nodesAllocationStatsAndWeights( + routingAllocation.metadata(), + routingAllocation.routingNodes(), + routingAllocation.clusterInfo(), + desiredBalance + ); + Map filteredNodeAllocationStatsAndWeights = + new HashMap<>(nodesStatsAndWeights.size()); + for (var nodeStatsAndWeight : nodesStatsAndWeights.entrySet()) { + var node = routingAllocation.nodes().get(nodeStatsAndWeight.getKey()); + if (node != null) { + filteredNodeAllocationStatsAndWeights.put(node, nodeStatsAndWeight.getValue()); + } + } + desiredBalanceMetrics.updateMetrics(allocationStats, desiredBalance.weightsPerNode(), filteredNodeAllocationStatsAndWeights); + } + public DesiredBalanceStats getStats() { return new DesiredBalanceStats( Math.max(currentDesiredBalanceRef.get().lastConvergedIndex(), 0L), diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/OrderedShardsIterator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/OrderedShardsIterator.java index 8229c21423564..3b8c4403c4c8a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/OrderedShardsIterator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/OrderedShardsIterator.java @@ -33,10 +33,28 @@ public class OrderedShardsIterator implements Iterator { private final ArrayDeque queue; + /** + * This iterator will progress through the shards node by node, each node's shards ordered from most write active to least. + * + * @param allocation + * @param ordering + * @return An iterator over all shards in the {@link RoutingNodes} held by {@code allocation} (all shards assigned to a node). The + * iterator will progress node by node, where each node's shards are ordered from data stream write indices, to regular indices and + * lastly to data stream read indices. + */ public static OrderedShardsIterator createForNecessaryMoves(RoutingAllocation allocation, NodeAllocationOrdering ordering) { return create(allocation.routingNodes(), createShardsComparator(allocation.metadata()), ordering); } + /** + * This iterator will progress through the shards node by node, each node's shards ordered from least write active to most. + * + * @param allocation + * @param ordering + * @return An iterator over all shards in the {@link RoutingNodes} held by {@code allocation} (all shards assigned to a node). The + * iterator will progress node by node, where each node's shards are ordered from data stream read indices, to regular indices and + * lastly to data stream write indices. + */ public static OrderedShardsIterator createForBalancing(RoutingAllocation allocation, NodeAllocationOrdering ordering) { return create(allocation.routingNodes(), createShardsComparator(allocation.metadata()).reversed(), ordering); } @@ -61,6 +79,9 @@ private static Iterator sort(Comparator comparator, return Iterators.forArray(shards); } + /** + * Prioritizes write indices of data streams, and deprioritizes data stream read indices, relative to regular indices. + */ private static Comparator createShardsComparator(Metadata metadata) { return Comparator.comparing(shard -> { var lookup = metadata.getIndicesLookup().get(shard.getIndexName()); diff --git a/server/src/main/java/org/elasticsearch/common/SecureRandomUtils.java b/server/src/main/java/org/elasticsearch/common/SecureRandomUtils.java new file mode 100644 index 0000000000000..bdde158b95db7 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/SecureRandomUtils.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.common; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.CharArrays; + +import java.util.Arrays; +import java.util.Base64; + +public final class SecureRandomUtils { + private SecureRandomUtils() {} + + /** + * Returns a cryptographically secure Base64 encoded {@link SecureString} of {@code numBytes} random bytes. + */ + public static SecureString getBase64SecureRandomString(int numBytes) { + byte[] randomBytes = null; + byte[] encodedBytes = null; + try { + randomBytes = new byte[numBytes]; + SecureRandomHolder.INSTANCE.nextBytes(randomBytes); + encodedBytes = Base64.getUrlEncoder().withoutPadding().encode(randomBytes); + return new SecureString(CharArrays.utf8BytesToChars(encodedBytes)); + } finally { + if (randomBytes != null) { + Arrays.fill(randomBytes, (byte) 0); + } + if (encodedBytes != null) { + Arrays.fill(encodedBytes, (byte) 0); + } + } + } +} diff --git a/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java index f6d6c7fd68738..134f7746ba627 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java +++ b/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java @@ -127,7 +127,7 @@ public static void configure(final Environment environment, boolean useConsole) StatusLogger.getLogger().removeListener(ERROR_LISTENER); } configureESLogging(); - configure(environment.settings(), environment.configFile(), environment.logsFile(), useConsole); + configure(environment.settings(), environment.configDir(), environment.logsDir(), useConsole); initializeStatics(); // creates a permanent status logger that can watch for StatusLogger events and forward to a real logger configureStatusLoggerForwarder(); diff --git a/server/src/main/java/org/elasticsearch/common/logging/internal/LoggerFactoryImpl.java b/server/src/main/java/org/elasticsearch/common/logging/internal/LoggerFactoryImpl.java index 6b92f87a9be23..e8354be5ea225 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/internal/LoggerFactoryImpl.java +++ b/server/src/main/java/org/elasticsearch/common/logging/internal/LoggerFactoryImpl.java @@ -22,6 +22,12 @@ public Logger getLogger(String name) { @Override public Logger getLogger(Class clazz) { - return new LoggerImpl(LogManager.getLogger(clazz)); + // Elasticsearch configures logging at the root level, it does not support + // programmatic configuration at the logger level. Log4j's method for + // getting a logger by Class doesn't just use the class name, but also + // scans the classloader hierarchy for programmatic configuration. Here we + // just delegate to use the String class name so that regardless of which + // classloader a class comes from, we will use the root logging config. + return getLogger(clazz.getName()); } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index bd48572a8bc11..2aa87d808fc93 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -20,6 +20,8 @@ import org.apache.lucene.index.ConcurrentMergeScheduler; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.FilterCodecReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.FilterLeafReader; @@ -70,7 +72,6 @@ import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -89,10 +90,9 @@ import java.util.Map; import java.util.Objects; -import static org.apache.lucene.util.Version.LUCENE_10_0_0; - public class Lucene { - public static final String LATEST_CODEC = "Lucene100"; + + public static final String LATEST_CODEC = "Lucene101"; public static final String SOFT_DELETES_FIELD = "__soft_deletes"; @@ -156,25 +156,7 @@ public static SegmentInfos readSegmentInfos(IndexCommit commit) throws IOExcepti * Reads the segments infos from the given segments file name, failing if it fails to load */ private static SegmentInfos readSegmentInfos(String segmentsFileName, Directory directory) throws IOException { - // TODO Use readCommit(Directory directory, String segmentFileName, int minSupportedMajorVersion) once Lucene 10.1 is available - // and remove the try-catch block for IndexFormatTooOldException - assert IndexVersion.current().luceneVersion().equals(LUCENE_10_0_0) : "remove the try-catch block below"; - try { - return SegmentInfos.readCommit(directory, segmentsFileName); - } catch (IndexFormatTooOldException e) { - try { - // Temporary workaround until Lucene 10.1 is available: try to leverage min. read-only compatibility to read the last commit - // and then check if this is the commit we want. This should always work for the case we are interested in (archive and - // searchable snapshots indices in N-2 version) as no newer commit should be ever written. - var segmentInfos = readSegmentInfos(directory); - if (segmentsFileName.equals(segmentInfos.getSegmentsFileName())) { - return segmentInfos; - } - } catch (Exception suppressed) { - e.addSuppressed(suppressed); - } - throw e; - } + return SegmentInfos.readCommit(directory, segmentsFileName, IndexVersions.MINIMUM_READONLY_COMPATIBLE.luceneVersion().major); } /** @@ -210,7 +192,18 @@ public static SegmentInfos pruneUnreferencedFiles(String segmentsFileName, Direc throw new IllegalStateException("no commit found in the directory"); } } + // Need to figure out what the parent field is that, so that validation in IndexWriter doesn't fail + // if no parent field is configured, but FieldInfo says there is a parent field. + String parentField = null; final IndexCommit cp = getIndexCommit(si, directory); + try (var reader = DirectoryReader.open(cp)) { + var topLevelFieldInfos = FieldInfos.getMergedFieldInfos(reader); + for (FieldInfo fieldInfo : topLevelFieldInfos) { + if (fieldInfo.isParentField()) { + parentField = fieldInfo.getName(); + } + } + } try ( IndexWriter writer = new IndexWriter( directory, @@ -218,6 +211,7 @@ public static SegmentInfos pruneUnreferencedFiles(String segmentsFileName, Direc .setIndexCommit(cp) .setCommitOnClose(false) .setOpenMode(IndexWriterConfig.OpenMode.APPEND) + .setParentField(parentField) ) ) { // do nothing and close this will kick off IndexFileDeleter which will remove all pending files diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index aecc750bd4e39..7397382866388 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -46,6 +46,7 @@ import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.allocation.DataTier; import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings; +import org.elasticsearch.cluster.routing.allocation.allocator.AllocationBalancingRoundSummaryService; import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceComputer; import org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceReconciler; @@ -212,6 +213,8 @@ public void apply(Settings value, Settings current, Settings previous) { } public static final Set> BUILT_IN_CLUSTER_SETTINGS = Stream.of( + AllocationBalancingRoundSummaryService.ENABLE_BALANCER_ROUND_SUMMARIES_SETTING, + AllocationBalancingRoundSummaryService.BALANCER_ROUND_SUMMARIES_LOG_INTERVAL_SETTING, AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING, AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING, BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, @@ -614,6 +617,7 @@ public void apply(Settings value, Settings current, Settings previous) { DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING, IndicesClusterStateService.SHARD_LOCK_RETRY_INTERVAL_SETTING, IndicesClusterStateService.SHARD_LOCK_RETRY_TIMEOUT_SETTING, + IndicesClusterStateService.CONCURRENT_SHARD_CLOSE_LIMIT, IngestSettings.GROK_WATCHDOG_INTERVAL, IngestSettings.GROK_WATCHDOG_MAX_EXECUTION_TIME, TDigestExecutionHint.SETTING, diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 35289352d7daf..0686b7d7683e2 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -42,6 +42,9 @@ import org.elasticsearch.indices.IndicesRequestCache; import org.elasticsearch.indices.ShardLimitValidator; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -51,180 +54,192 @@ */ public final class IndexScopedSettings extends AbstractScopedSettings { - public static final Set> BUILT_IN_INDEX_SETTINGS = Set.of( - MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY, - MergeSchedulerConfig.AUTO_THROTTLE_SETTING, - MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING, - MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING, - IndexMetadata.SETTING_INDEX_VERSION_CREATED, - IndexMetadata.SETTING_INDEX_VERSION_COMPATIBILITY, - IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_SETTING, - IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_SETTING, - IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING, - IndexMetadata.INDEX_AUTO_EXPAND_REPLICAS_SETTING, - IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING, - IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING, - IndexMetadata.INDEX_ROUTING_PARTITION_SIZE_SETTING, - IndexMetadata.INDEX_NUMBER_OF_ROUTING_SHARDS_SETTING, - IndexMetadata.INDEX_READ_ONLY_SETTING, - IndexMetadata.INDEX_BLOCKS_READ_SETTING, - IndexMetadata.INDEX_BLOCKS_WRITE_SETTING, - IndexMetadata.INDEX_BLOCKS_METADATA_SETTING, - IndexMetadata.INDEX_BLOCKS_READ_ONLY_ALLOW_DELETE_SETTING, - IndexMetadata.INDEX_PRIORITY_SETTING, - IndexMetadata.INDEX_DATA_PATH_SETTING, - IndexMetadata.INDEX_HIDDEN_SETTING, - IndexMetadata.INDEX_FORMAT_SETTING, - IndexMetadata.INDEX_DOWNSAMPLE_SOURCE_NAME, - IndexMetadata.INDEX_DOWNSAMPLE_SOURCE_UUID, - IndexMetadata.INDEX_DOWNSAMPLE_ORIGIN_NAME, - IndexMetadata.INDEX_DOWNSAMPLE_ORIGIN_UUID, - IndexMetadata.INDEX_DOWNSAMPLE_STATUS, - IndexMetadata.INDEX_DOWNSAMPLE_INTERVAL, - SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING, - SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING, - SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING, - SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING, - SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING, - SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING, - SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING, - SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING, - SearchSlowLog.INDEX_SEARCH_SLOWLOG_INCLUDE_USER_SETTING, - IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING, - IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING, - IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING, - IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING, - IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING, - IndexingSlowLog.INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG_SETTING, - IndexingSlowLog.INDEX_INDEXING_SLOWLOG_INCLUDE_USER_SETTING, - MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING, - MergePolicyConfig.INDEX_MERGE_POLICY_TYPE_SETTING, - MergePolicyConfig.INDEX_MERGE_POLICY_DELETES_PCT_ALLOWED_SETTING, - MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING, - MergePolicyConfig.INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING, - MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING, - MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING, - MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING, - MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING, - MergePolicyConfig.INDEX_MERGE_POLICY_MERGE_FACTOR_SETTING, - IndexSortConfig.INDEX_SORT_FIELD_SETTING, - IndexSortConfig.INDEX_SORT_ORDER_SETTING, - IndexSortConfig.INDEX_SORT_MISSING_SETTING, - IndexSortConfig.INDEX_SORT_MODE_SETTING, - IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING, - IndexSettings.INDEX_WARMER_ENABLED_SETTING, - IndexSettings.INDEX_REFRESH_INTERVAL_SETTING, - IndexSettings.INDEX_FAST_REFRESH_SETTING, - IndexSettings.MAX_RESULT_WINDOW_SETTING, - IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING, - IndexSettings.MAX_TOKEN_COUNT_SETTING, - IndexSettings.MAX_DOCVALUE_FIELDS_SEARCH_SETTING, - IndexSettings.MAX_SCRIPT_FIELDS_SETTING, - IndexSettings.MAX_NGRAM_DIFF_SETTING, - IndexSettings.MAX_SHINGLE_DIFF_SETTING, - IndexSettings.MAX_RESCORE_WINDOW_SETTING, - IndexSettings.MAX_ANALYZED_OFFSET_SETTING, - IndexSettings.WEIGHT_MATCHES_MODE_ENABLED_SETTING, - IndexSettings.MAX_TERMS_COUNT_SETTING, - IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING, - IndexSettings.DEFAULT_FIELD_SETTING, - IndexSettings.QUERY_STRING_LENIENT_SETTING, - IndexSettings.ALLOW_UNMAPPED, - IndexSettings.INDEX_CHECK_ON_STARTUP, - IndexSettings.MAX_REFRESH_LISTENERS_PER_SHARD, - IndexSettings.MAX_SLICES_PER_SCROLL, - IndexSettings.MAX_REGEX_LENGTH_SETTING, - ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING, - IndexSettings.INDEX_GC_DELETES_SETTING, - IndexSettings.INDEX_SOFT_DELETES_SETTING, - IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING, - IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING, - IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING, - UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, - EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING, - EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING, - IndexSettings.INDEX_FLUSH_AFTER_MERGE_THRESHOLD_SIZE_SETTING, - IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING, - IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_AGE_SETTING, - IndexSettings.INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING, - IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING, - IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING, - IndexSettings.INDEX_SEARCH_IDLE_AFTER, - IndexSettings.INDEX_SEARCH_THROTTLED, - IndexFieldDataService.INDEX_FIELDDATA_CACHE_KEY, - IndexSettings.IGNORE_ABOVE_SETTING, - FieldMapper.IGNORE_MALFORMED_SETTING, - FieldMapper.COERCE_SETTING, - Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING, - MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING, - MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING, - MapperService.INDEX_MAPPING_IGNORE_DYNAMIC_BEYOND_LIMIT_SETTING, - MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING, - MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING, - MapperService.INDEX_MAPPING_DIMENSION_FIELDS_LIMIT_SETTING, - MapperService.INDEX_MAPPING_FIELD_NAME_LENGTH_LIMIT_SETTING, - MapperService.INDEX_MAPPER_DYNAMIC_SETTING, - BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING, - IndexModule.INDEX_STORE_TYPE_SETTING, - IndexModule.INDEX_STORE_PRE_LOAD_SETTING, - IndexModule.INDEX_RECOVERY_TYPE_SETTING, - IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING, - FsDirectoryFactory.INDEX_LOCK_FACTOR_SETTING, - EngineConfig.INDEX_CODEC_SETTING, - IndexMetadata.SETTING_WAIT_FOR_ACTIVE_SHARDS, - IndexSettings.DEFAULT_PIPELINE, - IndexSettings.FINAL_PIPELINE, - MetadataIndexStateService.VERIFIED_BEFORE_CLOSE_SETTING, - MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING, - ExistingShardsAllocator.EXISTING_SHARDS_ALLOCATOR_SETTING, - DiskThresholdDecider.SETTING_IGNORE_DISK_WATERMARKS, - ShardLimitValidator.INDEX_SETTING_SHARD_LIMIT_GROUP, - DataTier.TIER_PREFERENCE_SETTING, - IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING, - IndexSettings.LIFECYCLE_ORIGINATION_DATE_SETTING, - IndexSettings.LIFECYCLE_PARSE_ORIGINATION_DATE_SETTING, - IndexSettings.TIME_SERIES_ES87TSDB_CODEC_ENABLED_SETTING, - IndexSettings.LOGSDB_ROUTE_ON_SORT_FIELDS, - IndexSettings.LOGSDB_SORT_ON_HOST_NAME, - IndexSettings.LOGSDB_ADD_HOST_NAME_FIELD, - IndexSettings.PREFER_ILM_SETTING, - DataStreamFailureStoreDefinition.FAILURE_STORE_DEFINITION_VERSION_SETTING, - FieldMapper.SYNTHETIC_SOURCE_KEEP_INDEX_SETTING, - IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING, - IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING, - IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING, - IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING, - InferenceMetadataFieldsMapper.USE_LEGACY_SEMANTIC_TEXT_FORMAT, + public static final Set> BUILT_IN_INDEX_SETTINGS; - // validate that built-in similarities don't get redefined - Setting.groupSetting("index.similarity.", (s) -> { - Map groups = s.getAsGroups(); - for (String key : SimilarityService.BUILT_IN.keySet()) { - if (groups.containsKey(key)) { - throw new IllegalArgumentException( - "illegal value for [index.similarity." + key + "] cannot redefine built-in similarity" - ); - } - } - }, Property.IndexScope, Property.ServerlessPublic), // this allows similarity settings to be passed - Setting.groupSetting("index.analysis.", Property.IndexScope, Property.ServerlessPublic), // this allows analysis settings to be - // passed + static { + Set> settings = new HashSet<>( + Arrays.asList( + MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY, + MergeSchedulerConfig.AUTO_THROTTLE_SETTING, + MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING, + MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING, + IndexMetadata.SETTING_INDEX_VERSION_CREATED, + IndexMetadata.SETTING_INDEX_VERSION_COMPATIBILITY, + IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_SETTING, + IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_SETTING, + IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING, + IndexMetadata.INDEX_AUTO_EXPAND_REPLICAS_SETTING, + IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING, + IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING, + IndexMetadata.INDEX_ROUTING_PARTITION_SIZE_SETTING, + IndexMetadata.INDEX_NUMBER_OF_ROUTING_SHARDS_SETTING, + IndexMetadata.INDEX_READ_ONLY_SETTING, + IndexMetadata.INDEX_BLOCKS_READ_SETTING, + IndexMetadata.INDEX_BLOCKS_WRITE_SETTING, + IndexMetadata.INDEX_BLOCKS_METADATA_SETTING, + IndexMetadata.INDEX_BLOCKS_READ_ONLY_ALLOW_DELETE_SETTING, + IndexMetadata.INDEX_PRIORITY_SETTING, + IndexMetadata.INDEX_DATA_PATH_SETTING, + IndexMetadata.INDEX_HIDDEN_SETTING, + IndexMetadata.INDEX_FORMAT_SETTING, + IndexMetadata.INDEX_DOWNSAMPLE_SOURCE_NAME, + IndexMetadata.INDEX_DOWNSAMPLE_SOURCE_UUID, + IndexMetadata.INDEX_DOWNSAMPLE_ORIGIN_NAME, + IndexMetadata.INDEX_DOWNSAMPLE_ORIGIN_UUID, + IndexMetadata.INDEX_DOWNSAMPLE_STATUS, + IndexMetadata.INDEX_DOWNSAMPLE_INTERVAL, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_INCLUDE_USER_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_INCLUDE_USER_SETTING, + MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_TYPE_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_DELETES_PCT_ALLOWED_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_MERGE_FACTOR_SETTING, + IndexSortConfig.INDEX_SORT_FIELD_SETTING, + IndexSortConfig.INDEX_SORT_ORDER_SETTING, + IndexSortConfig.INDEX_SORT_MISSING_SETTING, + IndexSortConfig.INDEX_SORT_MODE_SETTING, + IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING, + IndexSettings.INDEX_WARMER_ENABLED_SETTING, + IndexSettings.INDEX_REFRESH_INTERVAL_SETTING, + IndexSettings.INDEX_FAST_REFRESH_SETTING, + IndexSettings.MAX_RESULT_WINDOW_SETTING, + IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING, + IndexSettings.MAX_TOKEN_COUNT_SETTING, + IndexSettings.MAX_DOCVALUE_FIELDS_SEARCH_SETTING, + IndexSettings.MAX_SCRIPT_FIELDS_SETTING, + IndexSettings.MAX_NGRAM_DIFF_SETTING, + IndexSettings.MAX_SHINGLE_DIFF_SETTING, + IndexSettings.MAX_RESCORE_WINDOW_SETTING, + IndexSettings.MAX_ANALYZED_OFFSET_SETTING, + IndexSettings.WEIGHT_MATCHES_MODE_ENABLED_SETTING, + IndexSettings.MAX_TERMS_COUNT_SETTING, + IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING, + IndexSettings.DEFAULT_FIELD_SETTING, + IndexSettings.QUERY_STRING_LENIENT_SETTING, + IndexSettings.ALLOW_UNMAPPED, + IndexSettings.INDEX_CHECK_ON_STARTUP, + IndexSettings.MAX_REFRESH_LISTENERS_PER_SHARD, + IndexSettings.MAX_SLICES_PER_SCROLL, + IndexSettings.MAX_REGEX_LENGTH_SETTING, + ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING, + IndexSettings.INDEX_GC_DELETES_SETTING, + IndexSettings.INDEX_SOFT_DELETES_SETTING, + IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING, + IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING, + IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING, + UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, + EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING, + EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING, + IndexSettings.INDEX_FLUSH_AFTER_MERGE_THRESHOLD_SIZE_SETTING, + IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING, + IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_AGE_SETTING, + IndexSettings.INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING, + IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING, + IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING, + IndexSettings.INDEX_SEARCH_IDLE_AFTER, + IndexSettings.INDEX_SEARCH_THROTTLED, + IndexFieldDataService.INDEX_FIELDDATA_CACHE_KEY, + IndexSettings.IGNORE_ABOVE_SETTING, + FieldMapper.IGNORE_MALFORMED_SETTING, + FieldMapper.COERCE_SETTING, + Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING, + MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING, + MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING, + MapperService.INDEX_MAPPING_IGNORE_DYNAMIC_BEYOND_LIMIT_SETTING, + MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING, + MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING, + MapperService.INDEX_MAPPING_DIMENSION_FIELDS_LIMIT_SETTING, + MapperService.INDEX_MAPPING_FIELD_NAME_LENGTH_LIMIT_SETTING, + MapperService.INDEX_MAPPER_DYNAMIC_SETTING, + BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING, + IndexModule.INDEX_STORE_TYPE_SETTING, + IndexModule.INDEX_STORE_PRE_LOAD_SETTING, + IndexModule.INDEX_RECOVERY_TYPE_SETTING, + IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING, + FsDirectoryFactory.INDEX_LOCK_FACTOR_SETTING, + EngineConfig.INDEX_CODEC_SETTING, + IndexMetadata.SETTING_WAIT_FOR_ACTIVE_SHARDS, + IndexSettings.DEFAULT_PIPELINE, + IndexSettings.FINAL_PIPELINE, + MetadataIndexStateService.VERIFIED_BEFORE_CLOSE_SETTING, + MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING, + ExistingShardsAllocator.EXISTING_SHARDS_ALLOCATOR_SETTING, + DiskThresholdDecider.SETTING_IGNORE_DISK_WATERMARKS, + ShardLimitValidator.INDEX_SETTING_SHARD_LIMIT_GROUP, + DataTier.TIER_PREFERENCE_SETTING, + IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING, + IndexSettings.LIFECYCLE_ORIGINATION_DATE_SETTING, + IndexSettings.LIFECYCLE_PARSE_ORIGINATION_DATE_SETTING, + IndexSettings.TIME_SERIES_ES87TSDB_CODEC_ENABLED_SETTING, + IndexSettings.LOGSDB_ROUTE_ON_SORT_FIELDS, + IndexSettings.LOGSDB_SORT_ON_HOST_NAME, + IndexSettings.LOGSDB_ADD_HOST_NAME_FIELD, + IndexSettings.PREFER_ILM_SETTING, + DataStreamFailureStoreDefinition.FAILURE_STORE_DEFINITION_VERSION_SETTING, + FieldMapper.SYNTHETIC_SOURCE_KEEP_INDEX_SETTING, + IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING, + IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING, + IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING, + IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING, + InferenceMetadataFieldsMapper.USE_LEGACY_SEMANTIC_TEXT_FORMAT, - // TSDB index settings - IndexSettings.MODE, - IndexMetadata.INDEX_ROUTING_PATH, - IndexSettings.TIME_SERIES_START_TIME, - IndexSettings.TIME_SERIES_END_TIME, + // validate that built-in similarities don't get redefined + Setting.groupSetting("index.similarity.", (s) -> { + Map groups = s.getAsGroups(); + for (String key : SimilarityService.BUILT_IN.keySet()) { + if (groups.containsKey(key)) { + throw new IllegalArgumentException( + "illegal value for [index.similarity." + key + "] cannot redefine built-in similarity" + ); + } + } + }, Property.IndexScope, Property.ServerlessPublic), // this allows similarity settings to be passed + Setting.groupSetting("index.analysis.", Property.IndexScope, Property.ServerlessPublic), // this allows analysis settings to + // be + // passed - // Legacy index settings we must keep around for BWC from 7.x - EngineConfig.INDEX_OPTIMIZE_AUTO_GENERATED_IDS, - IndexMetadata.INDEX_ROLLUP_SOURCE_NAME, - IndexMetadata.INDEX_ROLLUP_SOURCE_UUID, - IndexSettings.MAX_ADJACENCY_MATRIX_FILTERS_SETTING, - IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING, - SearchSlowLog.INDEX_SEARCH_SLOWLOG_LEVEL, - Store.FORCE_RAM_TERM_DICT - ); + // TSDB index settings + IndexSettings.MODE, + IndexMetadata.INDEX_ROUTING_PATH, + IndexSettings.TIME_SERIES_START_TIME, + IndexSettings.TIME_SERIES_END_TIME, + + // Legacy index settings we must keep around for BWC from 7.x + EngineConfig.INDEX_OPTIMIZE_AUTO_GENERATED_IDS, + IndexMetadata.INDEX_ROLLUP_SOURCE_NAME, + IndexMetadata.INDEX_ROLLUP_SOURCE_UUID, + IndexSettings.MAX_ADJACENCY_MATRIX_FILTERS_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_LEVEL, + Store.FORCE_RAM_TERM_DICT + ) + ); + + if (IndexSettings.DOC_VALUES_SKIPPER.isEnabled()) { + settings.add(IndexSettings.USE_DOC_VALUES_SKIPPER); + } + BUILT_IN_INDEX_SETTINGS = Collections.unmodifiableSet(settings); + }; public static final IndexScopedSettings DEFAULT_SCOPED_SETTINGS = new IndexScopedSettings(Settings.EMPTY, BUILT_IN_INDEX_SETTINGS); diff --git a/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java b/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java index 4a2e1cd92d4da..168dec0665f51 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java +++ b/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java @@ -142,7 +142,7 @@ public LocallyMountedSecrets(Environment environment) { * @return Secrets directory within an Elasticsearch environment */ public static Path resolveSecretsDir(Environment environment) { - return environment.configFile().toAbsolutePath().resolve(SECRETS_DIRECTORY); + return environment.configDir().toAbsolutePath().resolve(SECRETS_DIRECTORY); } /** diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index f3319a8f85bfb..ab8b390490c1e 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -1580,6 +1580,15 @@ public static Setting boolSetting(String key, boolean defaultValue, Val return new Setting<>(key, Boolean.toString(defaultValue), booleanParser(key, properties), validator, properties); } + public static Setting boolSetting( + String key, + Function defaultValueFn, + Validator validator, + Property... properties + ) { + return new Setting<>(key, defaultValueFn, booleanParser(key, properties), validator, properties); + } + public static Setting boolSetting(String key, Function defaultValueFn, Property... properties) { return new Setting<>(key, defaultValueFn, booleanParser(key, properties), properties); } diff --git a/server/src/main/java/org/elasticsearch/common/text/SizeLimitingStringWriter.java b/server/src/main/java/org/elasticsearch/common/text/SizeLimitingStringWriter.java index 2df7e6537c609..3aa7c67a14c65 100644 --- a/server/src/main/java/org/elasticsearch/common/text/SizeLimitingStringWriter.java +++ b/server/src/main/java/org/elasticsearch/common/text/SizeLimitingStringWriter.java @@ -30,18 +30,29 @@ public SizeLimitingStringWriter(int sizeLimit) { this.sizeLimit = sizeLimit; } - private void checkSizeLimit(int additionalChars) { - int bufLen = getBuffer().length(); - if (bufLen + additionalChars > sizeLimit) { - throw new SizeLimitExceededException( - Strings.format("String [%s...] has exceeded the size limit [%s]", getBuffer().substring(0, Math.min(bufLen, 20)), sizeLimit) - ); + private int limitSize(int additionalChars) { + int neededSize = getBuffer().length() + additionalChars; + if (neededSize > sizeLimit) { + return additionalChars - (neededSize - sizeLimit); } + return additionalChars; + } + + private void throwSizeLimitExceeded(int limitedChars, int requestedChars) { + assert limitedChars < requestedChars; + int bufLen = getBuffer().length(); + int foundSize = bufLen - limitedChars + requestedChars; // reconstitute original + String selection = getBuffer().substring(0, Math.min(bufLen, 20)); + throw new SizeLimitExceededException( + Strings.format("String [%s...] has size [%d] which exceeds the size limit [%d]", selection, foundSize, sizeLimit) + ); } @Override public void write(int c) { - checkSizeLimit(1); + if (limitSize(1) != 1) { + throwSizeLimitExceeded(0, 1); + } super.write(c); } @@ -49,20 +60,29 @@ public void write(int c) { @Override public void write(char[] cbuf, int off, int len) { - checkSizeLimit(len); - super.write(cbuf, off, len); + int limitedLen = limitSize(len); + if (limitedLen > 0) { + super.write(cbuf, off, limitedLen); + } + if (limitedLen != len) { + throwSizeLimitExceeded(limitedLen, len); + } } @Override public void write(String str) { - checkSizeLimit(str.length()); - super.write(str); + this.write(str, 0, str.length()); } @Override public void write(String str, int off, int len) { - checkSizeLimit(len); - super.write(str, off, len); + int limitedLen = limitSize(len); + if (limitedLen > 0) { + super.write(str, off, limitedLen); + } + if (limitedLen != len) { + throwSizeLimitExceeded(limitedLen, len); + } } // append(...) delegates to write(...) methods diff --git a/server/src/main/java/org/elasticsearch/common/util/PlainIterator.java b/server/src/main/java/org/elasticsearch/common/util/PlainIterator.java index f4e90e002ac97..b4a049b34a133 100644 --- a/server/src/main/java/org/elasticsearch/common/util/PlainIterator.java +++ b/server/src/main/java/org/elasticsearch/common/util/PlainIterator.java @@ -13,7 +13,7 @@ import java.util.Iterator; import java.util.List; -public class PlainIterator implements Iterable, Countable { +public class PlainIterator implements Iterable { private final List elements; // Calls to nextOrNull might be performed on different threads in the transport actions so we need the volatile @@ -43,7 +43,6 @@ public T nextOrNull() { } } - @Override public int size() { return elements.size(); } diff --git a/server/src/main/java/org/elasticsearch/env/Environment.java b/server/src/main/java/org/elasticsearch/env/Environment.java index f9594655719f7..813da761159c5 100644 --- a/server/src/main/java/org/elasticsearch/env/Environment.java +++ b/server/src/main/java/org/elasticsearch/env/Environment.java @@ -46,28 +46,28 @@ public class Environment { private final Settings settings; - private final Path[] dataFiles; + private final Path[] dataDirs; - private final Path[] repoFiles; + private final Path[] repoDirs; - private final Path configFile; + private final Path configDir; - private final Path pluginsFile; + private final Path pluginsDir; - private final Path modulesFile; + private final Path modulesDir; - private final Path sharedDataFile; + private final Path sharedDataDir; /** location of bin/, used by plugin manager */ - private final Path binFile; + private final Path binDir; /** location of lib/, */ - private final Path libFile; + private final Path libDir; - private final Path logsFile; + private final Path logsDir; /** Path to the temporary file directory used by the JDK */ - private final Path tmpFile; + private final Path tmpDir; public Environment(final Settings settings, final Path configPath) { this(settings, configPath, PathUtils.get(System.getProperty("java.io.tmpdir"))); @@ -83,67 +83,67 @@ public Environment(final Settings settings, final Path configPath) { } if (configPath != null) { - configFile = configPath.toAbsolutePath().normalize(); + configDir = configPath.toAbsolutePath().normalize(); } else { - configFile = homeFile.resolve("config"); + configDir = homeFile.resolve("config"); } - tmpFile = Objects.requireNonNull(tmpPath); + tmpDir = Objects.requireNonNull(tmpPath); - pluginsFile = homeFile.resolve("plugins"); + pluginsDir = homeFile.resolve("plugins"); List dataPaths = PATH_DATA_SETTING.get(settings); if (dataPaths.isEmpty() == false) { - dataFiles = new Path[dataPaths.size()]; + dataDirs = new Path[dataPaths.size()]; for (int i = 0; i < dataPaths.size(); i++) { - dataFiles[i] = PathUtils.get(dataPaths.get(i)).toAbsolutePath().normalize(); + dataDirs[i] = PathUtils.get(dataPaths.get(i)).toAbsolutePath().normalize(); } } else { - dataFiles = new Path[] { homeFile.resolve("data") }; + dataDirs = new Path[] { homeFile.resolve("data") }; } if (PATH_SHARED_DATA_SETTING.exists(settings)) { - sharedDataFile = PathUtils.get(PATH_SHARED_DATA_SETTING.get(settings)).toAbsolutePath().normalize(); + sharedDataDir = PathUtils.get(PATH_SHARED_DATA_SETTING.get(settings)).toAbsolutePath().normalize(); } else { - sharedDataFile = null; + sharedDataDir = null; } List repoPaths = PATH_REPO_SETTING.get(settings); if (repoPaths.isEmpty()) { - repoFiles = EMPTY_PATH_ARRAY; + repoDirs = EMPTY_PATH_ARRAY; } else { - repoFiles = new Path[repoPaths.size()]; + repoDirs = new Path[repoPaths.size()]; for (int i = 0; i < repoPaths.size(); i++) { - repoFiles[i] = PathUtils.get(repoPaths.get(i)).toAbsolutePath().normalize(); + repoDirs[i] = PathUtils.get(repoPaths.get(i)).toAbsolutePath().normalize(); } } // this is trappy, Setting#get(Settings) will get a fallback setting yet return false for Settings#exists(Settings) if (PATH_LOGS_SETTING.exists(settings)) { - logsFile = PathUtils.get(PATH_LOGS_SETTING.get(settings)).toAbsolutePath().normalize(); + logsDir = PathUtils.get(PATH_LOGS_SETTING.get(settings)).toAbsolutePath().normalize(); } else { - logsFile = homeFile.resolve("logs"); + logsDir = homeFile.resolve("logs"); } - binFile = homeFile.resolve("bin"); - libFile = homeFile.resolve("lib"); - modulesFile = homeFile.resolve("modules"); + binDir = homeFile.resolve("bin"); + libDir = homeFile.resolve("lib"); + modulesDir = homeFile.resolve("modules"); final Settings.Builder finalSettings = Settings.builder().put(settings); if (PATH_DATA_SETTING.exists(settings)) { if (dataPathUsesList(settings)) { - finalSettings.putList(PATH_DATA_SETTING.getKey(), Arrays.stream(dataFiles).map(Path::toString).toList()); + finalSettings.putList(PATH_DATA_SETTING.getKey(), Arrays.stream(dataDirs).map(Path::toString).toList()); } else { - assert dataFiles.length == 1; - finalSettings.put(PATH_DATA_SETTING.getKey(), dataFiles[0]); + assert dataDirs.length == 1; + finalSettings.put(PATH_DATA_SETTING.getKey(), dataDirs[0]); } } finalSettings.put(PATH_HOME_SETTING.getKey(), homeFile); - finalSettings.put(PATH_LOGS_SETTING.getKey(), logsFile.toString()); + finalSettings.put(PATH_LOGS_SETTING.getKey(), logsDir.toString()); if (PATH_REPO_SETTING.exists(settings)) { - finalSettings.putList(Environment.PATH_REPO_SETTING.getKey(), Arrays.stream(repoFiles).map(Path::toString).toList()); + finalSettings.putList(Environment.PATH_REPO_SETTING.getKey(), Arrays.stream(repoDirs).map(Path::toString).toList()); } if (PATH_SHARED_DATA_SETTING.exists(settings)) { - assert sharedDataFile != null; - finalSettings.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), sharedDataFile.toString()); + assert sharedDataDir != null; + finalSettings.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), sharedDataDir.toString()); } this.settings = finalSettings.build(); @@ -159,22 +159,22 @@ public Settings settings() { /** * The data location. */ - public Path[] dataFiles() { - return dataFiles; + public Path[] dataDirs() { + return dataDirs; } /** * The shared data location */ - public Path sharedDataFile() { - return sharedDataFile; + public Path sharedDataDir() { + return sharedDataDir; } /** * The shared filesystem repo locations. */ - public Path[] repoFiles() { - return repoFiles; + public Path[] repoDirs() { + return repoDirs; } /** @@ -182,8 +182,8 @@ public Path[] repoFiles() { * * If the specified location doesn't match any of the roots, returns null. */ - public Path resolveRepoFile(String location) { - return PathUtils.get(repoFiles, location); + public Path resolveRepoDir(String location) { + return PathUtils.get(repoDirs, location); } /** @@ -197,7 +197,7 @@ public URL resolveRepoURL(URL url) { if ("file".equalsIgnoreCase(url.getProtocol())) { if (url.getHost() == null || "".equals(url.getHost())) { // only local file urls are supported - Path path = PathUtils.get(repoFiles, url.toURI()); + Path path = PathUtils.get(repoDirs, url.toURI()); if (path == null) { // Couldn't resolve against known repo locations return null; @@ -232,49 +232,48 @@ public URL resolveRepoURL(URL url) { } } - // TODO: rename all these "file" methods to "dir" /** * The config directory. */ - public Path configFile() { - return configFile; + public Path configDir() { + return configDir; } - public Path pluginsFile() { - return pluginsFile; + public Path pluginsDir() { + return pluginsDir; } - public Path binFile() { - return binFile; + public Path binDir() { + return binDir; } - public Path libFile() { - return libFile; + public Path libDir() { + return libDir; } - public Path modulesFile() { - return modulesFile; + public Path modulesDir() { + return modulesDir; } - public Path logsFile() { - return logsFile; + public Path logsDir() { + return logsDir; } /** Path to the default temp directory used by the JDK */ - public Path tmpFile() { - return tmpFile; + public Path tmpDir() { + return tmpDir; } /** Ensure the configured temp directory is a valid directory */ - public void validateTmpFile() throws IOException { - validateTemporaryDirectory("Temporary directory", tmpFile); + public void validateTmpDir() throws IOException { + validateTemporaryDirectory("Temporary directory", tmpDir); } /** * Ensure the temp directories needed for JNA are set up correctly. */ public void validateNativesConfig() throws IOException { - validateTmpFile(); + validateTmpDir(); if (Constants.LINUX) { validateTemporaryDirectory(LIBFFI_TMPDIR_ENVIRONMENT_VARIABLE + " environment variable", getLibffiTemporaryDirectory()); } @@ -335,15 +334,15 @@ public static long getUsableSpace(Path path) throws IOException { * object which may contain different setting) */ public static void assertEquivalent(Environment actual, Environment expected) { - assertEquals(actual.dataFiles(), expected.dataFiles(), "dataFiles"); - assertEquals(actual.repoFiles(), expected.repoFiles(), "repoFiles"); - assertEquals(actual.configFile(), expected.configFile(), "configFile"); - assertEquals(actual.pluginsFile(), expected.pluginsFile(), "pluginsFile"); - assertEquals(actual.binFile(), expected.binFile(), "binFile"); - assertEquals(actual.libFile(), expected.libFile(), "libFile"); - assertEquals(actual.modulesFile(), expected.modulesFile(), "modulesFile"); - assertEquals(actual.logsFile(), expected.logsFile(), "logsFile"); - assertEquals(actual.tmpFile(), expected.tmpFile(), "tmpFile"); + assertEquals(actual.dataDirs(), expected.dataDirs(), "dataDirs"); + assertEquals(actual.repoDirs(), expected.repoDirs(), "repoDirs"); + assertEquals(actual.configDir(), expected.configDir(), "configDir"); + assertEquals(actual.pluginsDir(), expected.pluginsDir(), "pluginsDir"); + assertEquals(actual.binDir(), expected.binDir(), "binDir"); + assertEquals(actual.libDir(), expected.libDir(), "libDir"); + assertEquals(actual.modulesDir(), expected.modulesDir(), "modulesDir"); + assertEquals(actual.logsDir(), expected.logsDir(), "logsDir"); + assertEquals(actual.tmpDir(), expected.tmpDir(), "tmpDir"); } private static void assertEquals(Object actual, Object expected, String name) { diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 90e2ae5c62703..febde6b6a69ac 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -215,10 +215,10 @@ public NodeLock( final CheckedFunction pathFunction, final Function subPathMapping ) throws IOException { - dataPaths = new DataPath[environment.dataFiles().length]; + dataPaths = new DataPath[environment.dataDirs().length]; locks = new Lock[dataPaths.length]; try { - final Path[] dataPaths = environment.dataFiles(); + final Path[] dataPaths = environment.dataDirs(); for (int dirIndex = 0; dirIndex < dataPaths.length; dirIndex++) { Path dataDir = dataPaths[dirIndex]; Path dir = subPathMapping.apply(dataDir); @@ -267,9 +267,9 @@ public NodeEnvironment(Settings settings, Environment environment) throws IOExce boolean success = false; try { - sharedDataPath = environment.sharedDataFile(); + sharedDataPath = environment.sharedDataDir(); - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { if (Files.exists(path)) { // Call to toRealPath required to resolve symlinks. // We let it fall through to create directories to ensure the symlink @@ -287,7 +287,7 @@ public NodeEnvironment(Settings settings, Environment environment) throws IOExce Locale.ROOT, "failed to obtain node locks, tried %s;" + " maybe these locations are not writable or multiple nodes were started on the same data path?", - Arrays.toString(environment.dataFiles()) + Arrays.toString(environment.dataDirs()) ); throw new IllegalStateException(message, e); } @@ -310,7 +310,7 @@ public NodeEnvironment(Settings settings, Environment environment) throws IOExce } // versions 7.x and earlier put their data under ${path.data}/nodes/; leave a file at that location to prevent downgrades - for (Path dataPath : environment.dataFiles()) { + for (Path dataPath : environment.dataDirs()) { final Path legacyNodesPath = dataPath.resolve("nodes"); if (Files.isRegularFile(legacyNodesPath) == false) { final String content = "written by Elasticsearch " @@ -349,7 +349,7 @@ private static boolean upgradeLegacyNodeFolders(Logger logger, Settings settings boolean upgradeNeeded = false; // check if we can do an auto-upgrade - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { final Path nodesFolderPath = path.resolve("nodes"); if (Files.isDirectory(nodesFolderPath)) { final List nodeLockIds = new ArrayList<>(); @@ -392,7 +392,7 @@ private static boolean upgradeLegacyNodeFolders(Logger logger, Settings settings return false; } - logger.info("upgrading legacy data folders: {}", Arrays.toString(environment.dataFiles())); + logger.info("upgrading legacy data folders: {}", Arrays.toString(environment.dataDirs())); // acquire locks on legacy path for duration of upgrade (to ensure there is no older ES version running on this path) final NodeLock legacyNodeLock; @@ -403,7 +403,7 @@ private static boolean upgradeLegacyNodeFolders(Logger logger, Settings settings Locale.ROOT, "failed to obtain legacy node locks, tried %s;" + " maybe these locations are not writable or multiple nodes were started on the same data path?", - Arrays.toString(environment.dataFiles()) + Arrays.toString(environment.dataDirs()) ); throw new IllegalStateException(message, e); } @@ -494,7 +494,7 @@ private static boolean upgradeLegacyNodeFolders(Logger logger, Settings settings } // upgrade successfully completed, remove legacy nodes folders - IOUtils.rm(Stream.of(environment.dataFiles()).map(path -> path.resolve("nodes")).toArray(Path[]::new)); + IOUtils.rm(Stream.of(environment.dataDirs()).map(path -> path.resolve("nodes")).toArray(Path[]::new)); return true; } diff --git a/server/src/main/java/org/elasticsearch/features/FeatureService.java b/server/src/main/java/org/elasticsearch/features/FeatureService.java index 9f4ec6209c6f1..a4316aeeaf30d 100644 --- a/server/src/main/java/org/elasticsearch/features/FeatureService.java +++ b/server/src/main/java/org/elasticsearch/features/FeatureService.java @@ -53,14 +53,14 @@ public Map getNodeFeatures() { /** * Returns {@code true} if {@code node} can have assumed features. */ - public boolean featuresCanBeAssumedForNode(DiscoveryNode node) { + public static boolean featuresCanBeAssumedForNode(DiscoveryNode node) { return ClusterFeatures.featuresCanBeAssumedForNode(node); } /** * Returns {@code true} if one or more nodes in {@code nodes} can have assumed features. */ - public boolean featuresCanBeAssumedForNodes(DiscoveryNodes nodes) { + public static boolean featuresCanBeAssumedForNodes(DiscoveryNodes nodes) { return ClusterFeatures.featuresCanBeAssumedForNodes(nodes); } diff --git a/server/src/main/java/org/elasticsearch/health/metadata/HealthMetadataService.java b/server/src/main/java/org/elasticsearch/health/metadata/HealthMetadataService.java index 0d30e157a3a09..1c3ac11fb50ed 100644 --- a/server/src/main/java/org/elasticsearch/health/metadata/HealthMetadataService.java +++ b/server/src/main/java/org/elasticsearch/health/metadata/HealthMetadataService.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.gateway.GatewayService; import java.util.List; @@ -50,7 +49,6 @@ public class HealthMetadataService { private static final Logger logger = LogManager.getLogger(HealthMetadataService.class); private final ClusterService clusterService; - private final FeatureService featureService; private final ClusterStateListener clusterStateListener; private final MasterServiceTaskQueue taskQueue; private volatile boolean enabled; @@ -64,17 +62,16 @@ public class HealthMetadataService { // ClusterState to maintain an up-to-date version of it across the cluster. private volatile HealthMetadata localHealthMetadata; - private HealthMetadataService(ClusterService clusterService, FeatureService featureService, Settings settings) { + private HealthMetadataService(ClusterService clusterService, Settings settings) { this.clusterService = clusterService; - this.featureService = featureService; this.clusterStateListener = this::updateOnClusterStateChange; this.enabled = ENABLED_SETTING.get(settings); this.localHealthMetadata = initialHealthMetadata(settings); this.taskQueue = clusterService.createTaskQueue("health metadata service", Priority.NORMAL, new Executor()); } - public static HealthMetadataService create(ClusterService clusterService, FeatureService featureService, Settings settings) { - HealthMetadataService healthMetadataService = new HealthMetadataService(clusterService, featureService, settings); + public static HealthMetadataService create(ClusterService clusterService, Settings settings) { + HealthMetadataService healthMetadataService = new HealthMetadataService(clusterService, settings); healthMetadataService.registerListeners(); return healthMetadataService; } diff --git a/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java index c975e1d1abd91..841973911d150 100644 --- a/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java @@ -18,7 +18,6 @@ import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; @@ -73,11 +72,9 @@ public class DiskHealthIndicatorService implements HealthIndicatorService { private static final String IMPACT_CLUSTER_FUNCTIONALITY_UNAVAILABLE_ID = "cluster_functionality_unavailable"; private final ClusterService clusterService; - private final FeatureService featureService; - public DiskHealthIndicatorService(ClusterService clusterService, FeatureService featureService) { + public DiskHealthIndicatorService(ClusterService clusterService) { this.clusterService = clusterService; - this.featureService = featureService; } @Override diff --git a/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java b/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java index 7c37a0ce5d927..48cbbb188dd91 100644 --- a/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.persistent.PersistentTaskParams; @@ -60,22 +59,15 @@ public final class HealthNodeTaskExecutor extends PersistentTasksExecutor currentTask = new AtomicReference<>(); private final ClusterStateListener taskStarter; private final ClusterStateListener shutdownListener; private volatile boolean enabled; - private HealthNodeTaskExecutor( - ClusterService clusterService, - PersistentTasksService persistentTasksService, - FeatureService featureService, - Settings settings - ) { + private HealthNodeTaskExecutor(ClusterService clusterService, PersistentTasksService persistentTasksService, Settings settings) { super(TASK_NAME, clusterService.threadPool().executor(ThreadPool.Names.MANAGEMENT)); this.clusterService = clusterService; this.persistentTasksService = persistentTasksService; - this.featureService = featureService; this.taskStarter = this::startTask; this.shutdownListener = this::shuttingDown; this.enabled = ENABLED_SETTING.get(settings); @@ -84,16 +76,10 @@ private HealthNodeTaskExecutor( public static HealthNodeTaskExecutor create( ClusterService clusterService, PersistentTasksService persistentTasksService, - FeatureService featureService, Settings settings, ClusterSettings clusterSettings ) { - HealthNodeTaskExecutor healthNodeTaskExecutor = new HealthNodeTaskExecutor( - clusterService, - persistentTasksService, - featureService, - settings - ); + HealthNodeTaskExecutor healthNodeTaskExecutor = new HealthNodeTaskExecutor(clusterService, persistentTasksService, settings); healthNodeTaskExecutor.registerListeners(clusterSettings); return healthNodeTaskExecutor; } diff --git a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java index 2cb49416580d2..aeca887947ce2 100644 --- a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java @@ -38,7 +38,6 @@ import org.elasticsearch.core.RefCounted; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; import org.elasticsearch.tasks.Task; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.ThreadPool; @@ -484,25 +483,22 @@ void dispatchRequest(final RestRequest restRequest, final RestChannel channel, f if (badRequestCause != null) { dispatcher.dispatchBadRequest(channel, threadContext, badRequestCause); } else { - populatePerRequestThreadContext0(restRequest, channel, threadContext); + try { + populatePerRequestThreadContext(restRequest, threadContext); + } catch (Exception e) { + try { + dispatcher.dispatchBadRequest(channel, threadContext, e); + } catch (Exception inner) { + inner.addSuppressed(e); + logger.error(() -> "failed to send failure response for uri [" + restRequest.uri() + "]", inner); + } + return; + } dispatcher.dispatchRequest(restRequest, channel, threadContext); } } } - private void populatePerRequestThreadContext0(RestRequest restRequest, RestChannel channel, ThreadContext threadContext) { - try { - populatePerRequestThreadContext(restRequest, threadContext); - } catch (Exception e) { - try { - channel.sendResponse(new RestResponse(channel, e)); - } catch (Exception inner) { - inner.addSuppressed(e); - logger.error(() -> "failed to send failure response for uri [" + restRequest.uri() + "]", inner); - } - } - } - protected void populatePerRequestThreadContext(RestRequest restRequest, ThreadContext threadContext) {} private void handleIncomingRequest(final HttpRequest httpRequest, final HttpChannel httpChannel, final Exception exception) { diff --git a/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java b/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java index f3d58fe4b051f..1f55b92982abe 100644 --- a/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java +++ b/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java @@ -28,7 +28,6 @@ import java.util.Iterator; import java.util.List; import java.util.function.Consumer; -import java.util.function.Supplier; import static org.elasticsearch.core.Strings.format; @@ -138,6 +137,24 @@ public void indexShardStateChanged( } } + @Override + public void beforeIndexShardMutableOperation(IndexShard indexShard, ActionListener listener) { + iterateBeforeIndexShardMutableOperation(indexShard, listener.delegateResponse((l, e) -> { + logger.warn(() -> format("%s failed to invoke the listener before ensuring shard mutability", indexShard.shardId()), e); + l.onFailure(e); + })); + } + + private void iterateBeforeIndexShardMutableOperation(IndexShard indexShard, ActionListener outerListener) { + callListeners( + indexShard, + listeners.stream() + .map(iel -> (Consumer>) (l) -> iel.beforeIndexShardMutableOperation(indexShard, l)) + .iterator(), + outerListener + ); + } + @Override public void beforeIndexCreated(Index index, Settings indexSettings) { for (IndexEventListener listener : listeners) { @@ -351,15 +368,4 @@ public void afterFilesRestoredFromRepository(IndexShard indexShard) { } } - @Override - public void onAcquirePrimaryOperationPermit(IndexShard indexShard, Supplier> onPermitAcquiredListenerSupplier) { - for (IndexEventListener listener : listeners) { - try { - listener.onAcquirePrimaryOperationPermit(indexShard, onPermitAcquiredListenerSupplier); - } catch (Exception e) { - logger.warn(() -> "[" + indexShard.shardId() + "] failed to invoke the listener on acquiring a primary permit", e); - throw e; - } - } - } } diff --git a/server/src/main/java/org/elasticsearch/index/IndexMode.java b/server/src/main/java/org/elasticsearch/index/IndexMode.java index 7287a0bf307b9..04bf820926660 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexMode.java +++ b/server/src/main/java/org/elasticsearch/index/IndexMode.java @@ -29,7 +29,6 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.MetadataFieldMapper; -import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.ProvidedIdFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.RoutingFields; @@ -156,9 +155,6 @@ private static String error(Setting unsupported) { @Override public void validateMapping(MappingLookup lookup) { - if (lookup.nestedLookup() != NestedLookup.EMPTY) { - throw new IllegalArgumentException("cannot have nested fields when index is in " + tsdbMode()); - } if (((RoutingFieldMapper) lookup.getMapper(RoutingFieldMapper.NAME)).required()) { throw new IllegalArgumentException(routingRequiredBad()); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index 5512dffdda53e..baba9e94db7a7 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -232,7 +232,8 @@ public IndexService( mapperMetrics ); this.indexFieldData = new IndexFieldDataService(indexSettings, indicesFieldDataCache, circuitBreakerService); - if (indexSettings.getIndexSortConfig().hasIndexSort()) { + boolean sourceOnly = Boolean.parseBoolean(indexSettings.getSettings().get("index.source_only")); + if (indexSettings.getIndexSortConfig().hasIndexSort() && sourceOnly == false) { // we delay the actual creation of the sort order for this index because the mapping has not been merged yet. // The sort order is validated right after the merge of the mapping later in the process. this.indexSortSupplier = () -> indexSettings.getIndexSortConfig() diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index 4895930eaefe4..4c7bda1c52f17 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper; import org.elasticsearch.index.mapper.Mapper; @@ -39,6 +40,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; @@ -683,6 +685,14 @@ public boolean isES87TSDBCodecEnabled() { Property.Final ); + public static final FeatureFlag DOC_VALUES_SKIPPER = new FeatureFlag("doc_values_skipper"); + public static final Setting USE_DOC_VALUES_SKIPPER = Setting.boolSetting( + "index.mapping.use_doc_values_skipper", + IndexSettings.DOC_VALUES_SKIPPER.isEnabled(), + Property.IndexScope, + Property.Final + ); + /** * The {@link IndexMode "mode"} of the index. */ @@ -722,9 +732,25 @@ public Iterator> settings() { Setting.Property.ServerlessPublic ); + public static final FeatureFlag RECOVERY_USE_SYNTHETIC_SOURCE = new FeatureFlag("index_recovery_use_synthetic_source"); public static final Setting RECOVERY_USE_SYNTHETIC_SOURCE_SETTING = Setting.boolSetting( "index.recovery.use_synthetic_source", - false, + settings -> { + boolean isSyntheticSourceRecoveryFeatureFlagEnabled = RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled(); + boolean isNewIndexVersion = SETTING_INDEX_VERSION_CREATED.get(settings) + .onOrAfter(IndexVersions.USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BY_DEFAULT); + boolean isIndexVersionInBackportRange = SETTING_INDEX_VERSION_CREATED.get(settings) + .between(IndexVersions.USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BY_DEFAULT_BACKPORT, IndexVersions.UPGRADE_TO_LUCENE_10_0_0); + + boolean useSyntheticRecoverySource = isSyntheticSourceRecoveryFeatureFlagEnabled + && (isNewIndexVersion || isIndexVersionInBackportRange); + + return String.valueOf( + useSyntheticRecoverySource + && Objects.equals(INDEX_MAPPER_SOURCE_MODE_SETTING.get(settings), SourceFieldMapper.Mode.SYNTHETIC) + ); + + }, new Setting.Validator<>() { @Override public void validate(Boolean value) {} @@ -904,6 +930,7 @@ private void setRetentionLeaseMillis(final TimeValue retentionLease) { private final SourceFieldMapper.Mode indexMappingSourceMode; private final boolean recoverySourceEnabled; private final boolean recoverySourceSyntheticEnabled; + private final boolean useDocValuesSkipper; /** * The maximum number of refresh listeners allows on this shard. @@ -1083,7 +1110,9 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti skipIgnoredSourceRead = scopedSettings.get(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING); indexMappingSourceMode = scopedSettings.get(INDEX_MAPPER_SOURCE_MODE_SETTING); recoverySourceEnabled = RecoverySettings.INDICES_RECOVERY_SOURCE_ENABLED_SETTING.get(nodeSettings); - recoverySourceSyntheticEnabled = scopedSettings.get(RECOVERY_USE_SYNTHETIC_SOURCE_SETTING); + recoverySourceSyntheticEnabled = DiscoveryNode.isStateless(nodeSettings) == false + && scopedSettings.get(RECOVERY_USE_SYNTHETIC_SOURCE_SETTING); + useDocValuesSkipper = DOC_VALUES_SKIPPER.isEnabled() && scopedSettings.get(USE_DOC_VALUES_SKIPPER); if (recoverySourceSyntheticEnabled) { if (DiscoveryNode.isStateless(settings)) { throw new IllegalArgumentException("synthetic recovery source is only allowed in stateful"); @@ -1803,6 +1832,10 @@ public boolean isRecoverySourceSyntheticEnabled() { return recoverySourceSyntheticEnabled; } + public boolean useDocValuesSkipper() { + return useDocValuesSkipper; + } + /** * The bounds for {@code @timestamp} on this index or * {@code null} if there are no bounds. diff --git a/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java b/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java index 94d1cc5182457..28de14ec04221 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java @@ -302,6 +302,15 @@ private static void validateIndexSortField(SortField sortField) { } } + public boolean hasSortOnField(final String fieldName) { + for (FieldSortSpec sortSpec : sortSpecs) { + if (sortSpec.field.equals(fieldName)) { + return true; + } + } + return false; + } + public static class FieldSortSpec { final String field; SortOrder order; diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index e801a07f11670..64f4c356bb124 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -24,7 +24,6 @@ import java.util.TreeMap; import java.util.TreeSet; import java.util.function.IntFunction; -import java.util.stream.Collectors; @SuppressWarnings("deprecation") public class IndexVersions { @@ -108,41 +107,45 @@ private static Version parseUnchecked(String version) { public static final IndexVersion UPGRADE_LUCENE_9_9_1 = def(8_500_008, Version.LUCENE_9_9_1); public static final IndexVersion ES_VERSION_8_12_1 = def(8_500_009, Version.LUCENE_9_9_1); public static final IndexVersion UPGRADE_8_12_1_LUCENE_9_9_2 = def(8_500_010, Version.LUCENE_9_9_2); - public static final IndexVersion NEW_INDEXVERSION_FORMAT = def(8_501_00_0, Version.LUCENE_9_9_1); - public static final IndexVersion UPGRADE_LUCENE_9_9_2 = def(8_502_00_0, Version.LUCENE_9_9_2); - public static final IndexVersion TIME_SERIES_ID_HASHING = def(8_502_00_1, Version.LUCENE_9_9_2); - public static final IndexVersion UPGRADE_TO_LUCENE_9_10 = def(8_503_00_0, Version.LUCENE_9_10_0); - public static final IndexVersion TIME_SERIES_ROUTING_HASH_IN_ID = def(8_504_00_0, Version.LUCENE_9_10_0); - public static final IndexVersion DEFAULT_DENSE_VECTOR_TO_INT8_HNSW = def(8_505_00_0, Version.LUCENE_9_10_0); - public static final IndexVersion DOC_VALUES_FOR_IGNORED_META_FIELD = def(8_505_00_1, Version.LUCENE_9_10_0); - public static final IndexVersion SOURCE_MAPPER_LOSSY_PARAMS_CHECK = def(8_506_00_0, Version.LUCENE_9_10_0); - public static final IndexVersion SEMANTIC_TEXT_FIELD_TYPE = def(8_507_00_0, Version.LUCENE_9_10_0); - public static final IndexVersion UPGRADE_TO_LUCENE_9_11 = def(8_508_00_0, Version.LUCENE_9_11_0); - public static final IndexVersion UNIQUE_TOKEN_FILTER_POS_FIX = def(8_509_00_0, Version.LUCENE_9_11_0); - public static final IndexVersion ADD_SECURITY_MIGRATION = def(8_510_00_0, Version.LUCENE_9_11_0); - public static final IndexVersion UPGRADE_TO_LUCENE_9_11_1 = def(8_511_00_0, Version.LUCENE_9_11_1); - public static final IndexVersion INDEX_SORTING_ON_NESTED = def(8_512_00_0, Version.LUCENE_9_11_1); - public static final IndexVersion LENIENT_UPDATEABLE_SYNONYMS = def(8_513_00_0, Version.LUCENE_9_11_1); - public static final IndexVersion ENABLE_IGNORE_MALFORMED_LOGSDB = def(8_514_00_0, Version.LUCENE_9_11_1); - public static final IndexVersion MERGE_ON_RECOVERY_VERSION = def(8_515_00_0, Version.LUCENE_9_11_1); - public static final IndexVersion UPGRADE_TO_LUCENE_9_12 = def(8_516_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion ENABLE_IGNORE_ABOVE_LOGSDB = def(8_517_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion ADD_ROLE_MAPPING_CLEANUP_MIGRATION = def(8_518_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT_BACKPORT = def(8_519_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID_BACKPORT = def(8_520_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion V8_DEPRECATE_SOURCE_MODE_MAPPER = def(8_521_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BACKPORT = def(8_522_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion UPGRADE_TO_LUCENE_9_12_1 = def(8_523_00_0, parseUnchecked("9.12.1")); - public static final IndexVersion INFERENCE_METADATA_FIELDS_BACKPORT = def(8_524_00_0, parseUnchecked("9.12.1")); - public static final IndexVersion LOGSB_OPTIONAL_SORTING_ON_HOST_NAME_BACKPORT = def(8_525_00_0, parseUnchecked("9.12.1")); - public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT = def(9_001_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID = def(9_002_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion DEPRECATE_SOURCE_MODE_MAPPER = def(9_003_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY = def(9_004_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion INFERENCE_METADATA_FIELDS = def(9_005_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion LOGSB_OPTIONAL_SORTING_ON_HOST_NAME = def(9_006_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP = def(9_007_00_0, Version.LUCENE_10_0_0); + public static final IndexVersion NEW_INDEXVERSION_FORMAT = def(8_501_0_00, Version.LUCENE_9_9_1); + public static final IndexVersion UPGRADE_LUCENE_9_9_2 = def(8_502_0_00, Version.LUCENE_9_9_2); + public static final IndexVersion TIME_SERIES_ID_HASHING = def(8_502_0_01, Version.LUCENE_9_9_2); + public static final IndexVersion UPGRADE_TO_LUCENE_9_10 = def(8_503_0_00, Version.LUCENE_9_10_0); + public static final IndexVersion TIME_SERIES_ROUTING_HASH_IN_ID = def(8_504_0_00, Version.LUCENE_9_10_0); + public static final IndexVersion DEFAULT_DENSE_VECTOR_TO_INT8_HNSW = def(8_505_0_00, Version.LUCENE_9_10_0); + public static final IndexVersion DOC_VALUES_FOR_IGNORED_META_FIELD = def(8_505_0_01, Version.LUCENE_9_10_0); + public static final IndexVersion SOURCE_MAPPER_LOSSY_PARAMS_CHECK = def(8_506_0_00, Version.LUCENE_9_10_0); + public static final IndexVersion SEMANTIC_TEXT_FIELD_TYPE = def(8_507_0_00, Version.LUCENE_9_10_0); + public static final IndexVersion UPGRADE_TO_LUCENE_9_11 = def(8_508_0_00, Version.LUCENE_9_11_0); + public static final IndexVersion UNIQUE_TOKEN_FILTER_POS_FIX = def(8_509_0_00, Version.LUCENE_9_11_0); + public static final IndexVersion ADD_SECURITY_MIGRATION = def(8_510_0_00, Version.LUCENE_9_11_0); + public static final IndexVersion UPGRADE_TO_LUCENE_9_11_1 = def(8_511_0_00, Version.LUCENE_9_11_1); + public static final IndexVersion INDEX_SORTING_ON_NESTED = def(8_512_0_00, Version.LUCENE_9_11_1); + public static final IndexVersion LENIENT_UPDATEABLE_SYNONYMS = def(8_513_0_00, Version.LUCENE_9_11_1); + public static final IndexVersion ENABLE_IGNORE_MALFORMED_LOGSDB = def(8_514_0_00, Version.LUCENE_9_11_1); + public static final IndexVersion MERGE_ON_RECOVERY_VERSION = def(8_515_0_00, Version.LUCENE_9_11_1); + public static final IndexVersion UPGRADE_TO_LUCENE_9_12 = def(8_516_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion ENABLE_IGNORE_ABOVE_LOGSDB = def(8_517_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion ADD_ROLE_MAPPING_CLEANUP_MIGRATION = def(8_518_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT_BACKPORT = def(8_519_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID_BACKPORT = def(8_520_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion V8_DEPRECATE_SOURCE_MODE_MAPPER = def(8_521_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BACKPORT = def(8_522_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion UPGRADE_TO_LUCENE_9_12_1 = def(8_523_0_00, parseUnchecked("9.12.1")); + public static final IndexVersion INFERENCE_METADATA_FIELDS_BACKPORT = def(8_524_0_00, parseUnchecked("9.12.1")); + public static final IndexVersion LOGSB_OPTIONAL_SORTING_ON_HOST_NAME_BACKPORT = def(8_525_0_00, parseUnchecked("9.12.1")); + public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BY_DEFAULT_BACKPORT = def(8_526_0_00, parseUnchecked("9.12.1")); + public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT = def(9_001_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID = def(9_002_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion DEPRECATE_SOURCE_MODE_MAPPER = def(9_003_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY = def(9_004_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion INFERENCE_METADATA_FIELDS = def(9_005_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion LOGSB_OPTIONAL_SORTING_ON_HOST_NAME = def(9_006_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP = def(9_007_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion HOSTNAME_DOC_VALUES_SPARSE_INDEX = def(9_008_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion UPGRADE_TO_LUCENE_10_1_0 = def(9_009_0_00, Version.LUCENE_10_1_0); + public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BY_DEFAULT = def(9_010_00_0, Version.LUCENE_10_1_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ @@ -158,17 +161,17 @@ private static Version parseUnchecked(String version) { * To add a new index version, add a new constant at the bottom of the list, above this comment. Don't add other lines, * comments, etc. The version id has the following layout: * - * M_NNN_SS_P + * M_NNN_S_PP * * M - The major version of Elasticsearch * NNN - The server version part - * SS - The serverless version part. It should always be 00 here, it is used by serverless only. - * P - The patch version part + * S - The subsidiary version part. It should always be 0 here, it is only used in subsidiary repositories. + * PP - The patch version part * * To determine the id of the next IndexVersion constant, do the following: * - Use the same major version, unless bumping majors * - Bump the server version part by 1, unless creating a patch version - * - Leave the serverless part as 00 + * - Leave the subsidiary part as 0 * - Bump the patch part if creating a patch version * * If a patch version is created, it should be placed sorted among the other existing constants. @@ -248,10 +251,6 @@ static NavigableMap getAllVersionIds(Class cls) { return Collections.unmodifiableNavigableMap(builder); } - static Collection getAllWriteVersions() { - return VERSION_IDS.values().stream().filter(v -> v.onOrAfter(IndexVersions.MINIMUM_COMPATIBLE)).collect(Collectors.toSet()); - } - static Collection getAllVersions() { return VERSION_IDS.values(); } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/Analysis.java b/server/src/main/java/org/elasticsearch/index/analysis/Analysis.java index 505e39a9590ef..a93a480463564 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/Analysis.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/Analysis.java @@ -233,7 +233,7 @@ public static List getWordList( } } - final Path path = env.configFile().resolve(wordListPath); + final Path path = env.configDir().resolve(wordListPath); try { return loadWordList(path, removeComments); @@ -337,7 +337,7 @@ public static Reader getReaderFromFile(Environment env, String filePath, String if (filePath == null) { return null; } - final Path path = env.configFile().resolve(filePath); + final Path path = env.configDir().resolve(filePath); try { return Files.newBufferedReader(path, StandardCharsets.UTF_8); } catch (CharacterCodingException ex) { diff --git a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java index c1c392ac07f18..06949a967eccd 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -12,7 +12,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FilterCodec; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.core.Nullable; @@ -46,7 +46,7 @@ public class CodecService implements CodecProvider { public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) { final var codecs = new HashMap(); - Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_SPEED, mapperService, bigArrays); + Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene101Codec.Mode.BEST_SPEED, mapperService, bigArrays); if (ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, mapperService, bigArrays)); } else { @@ -58,7 +58,7 @@ public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) BEST_COMPRESSION_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, mapperService, bigArrays) ); - Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); + Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene101Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); codecs.put(LEGACY_BEST_COMPRESSION_CODEC, legacyBestCompressionCodec); codecs.put(LUCENE_DEFAULT_CODEC, Codec.getDefault()); diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java index 9f46050f68f99..d58c4e2cdc34a 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java @@ -10,12 +10,12 @@ package org.elasticsearch.index.codec; import org.apache.lucene.backward_codecs.lucene912.Lucene912Codec; +import org.apache.lucene.backward_codecs.lucene912.Lucene912PostingsFormat; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.StoredFieldsFormat; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java index 4154a242c15ed..04428d5b37fba 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java @@ -9,13 +9,13 @@ package org.elasticsearch.index.codec; +import org.apache.lucene.backward_codecs.lucene100.Lucene100Codec; +import org.apache.lucene.backward_codecs.lucene912.Lucene912PostingsFormat; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.StoredFieldsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; @@ -23,8 +23,8 @@ import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; /** - * Elasticsearch codec as of 9.0. This extends the Lucene 10.0 codec to compressed stored fields with ZSTD instead of LZ4/DEFLATE. See - * {@link Zstd814StoredFieldsFormat}. + * Elasticsearch codec as of 9.0-snapshot relying on Lucene 10.0. This extends the Lucene 10.0 codec to compressed stored fields + * with ZSTD instead of LZ4/DEFLATE. See {@link Zstd814StoredFieldsFormat}. */ public class Elasticsearch900Codec extends CodecService.DeduplicateFieldInfosCodec { diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Lucene101Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Lucene101Codec.java new file mode 100644 index 0000000000000..ae7fa481a1caa --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Lucene101Codec.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.StoredFieldsFormat; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; +import org.apache.lucene.codecs.lucene101.Lucene101PostingsFormat; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; +import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; + +/** + * Elasticsearch codec as of 9.0 relying on Lucene 10.1. This extends the Lucene 10.1 codec to compressed + * stored fields with ZSTD instead of LZ4/DEFLATE. See {@link Zstd814StoredFieldsFormat}. + */ +public class Elasticsearch900Lucene101Codec extends CodecService.DeduplicateFieldInfosCodec { + + private final StoredFieldsFormat storedFieldsFormat; + + private final PostingsFormat defaultPostingsFormat; + private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() { + @Override + public PostingsFormat getPostingsFormatForField(String field) { + return Elasticsearch900Lucene101Codec.this.getPostingsFormatForField(field); + } + }; + + private final DocValuesFormat defaultDVFormat; + private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return Elasticsearch900Lucene101Codec.this.getDocValuesFormatForField(field); + } + }; + + private final KnnVectorsFormat defaultKnnVectorsFormat; + private final KnnVectorsFormat knnVectorsFormat = new PerFieldKnnVectorsFormat() { + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return Elasticsearch900Lucene101Codec.this.getKnnVectorsFormatForField(field); + } + }; + + /** Public no-arg constructor, needed for SPI loading at read-time. */ + public Elasticsearch900Lucene101Codec() { + this(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + } + + /** + * Constructor. Takes a {@link Zstd814StoredFieldsFormat.Mode} that describes whether to optimize for retrieval speed at the expense of + * worse space-efficiency or vice-versa. + */ + public Elasticsearch900Lucene101Codec(Zstd814StoredFieldsFormat.Mode mode) { + super("Elasticsearch900Lucene101", new Lucene101Codec()); + this.storedFieldsFormat = mode.getFormat(); + this.defaultPostingsFormat = new Lucene101PostingsFormat(); + this.defaultDVFormat = new Lucene90DocValuesFormat(); + this.defaultKnnVectorsFormat = new Lucene99HnswVectorsFormat(); + } + + @Override + public StoredFieldsFormat storedFieldsFormat() { + return storedFieldsFormat; + } + + @Override + public final PostingsFormat postingsFormat() { + return postingsFormat; + } + + @Override + public final DocValuesFormat docValuesFormat() { + return docValuesFormat; + } + + @Override + public final KnnVectorsFormat knnVectorsFormat() { + return knnVectorsFormat; + } + + /** + * Returns the postings format that should be used for writing new segments of field. + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation, + */ + public PostingsFormat getPostingsFormatForField(String field) { + return defaultPostingsFormat; + } + + /** + * Returns the docvalues format that should be used for writing new segments of field + * . + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public DocValuesFormat getDocValuesFormatForField(String field) { + return defaultDVFormat; + } + + /** + * Returns the vectors format that should be used for writing new segments of field + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return defaultKnnVectorsFormat; + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java index bf2c5a9f01e29..9e4ecb1a46c17 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java @@ -13,7 +13,7 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.mapper.MapperService; @@ -22,11 +22,11 @@ * Legacy version of {@link PerFieldMapperCodec}. This codec is preserved to give an escape hatch in case we encounter issues with new * changes in {@link PerFieldMapperCodec}. */ -public final class LegacyPerFieldMapperCodec extends Lucene100Codec { +public final class LegacyPerFieldMapperCodec extends Lucene101Codec { private final PerFieldFormatSupplier formatSupplier; - public LegacyPerFieldMapperCodec(Lucene100Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { + public LegacyPerFieldMapperCodec(Lucene101Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { super(compressionMode); this.formatSupplier = new PerFieldFormatSupplier(mapperService, bigArrays); // If the below assertion fails, it is a sign that Lucene released a new codec. You must create a copy of the current Elasticsearch diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java index 4d3d37ab4f3af..21721b68dbcac 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java @@ -60,18 +60,18 @@ private PostingsFormat internalGetPostingsFormatForField(String field) { if (mapperService != null) { Mapper mapper = mapperService.mappingLookup().getMapper(field); if (mapper instanceof CompletionFieldMapper) { - return PostingsFormatHolder.POSTINGS_FORMAT; + return CompletionPostingsFormatHolder.POSTINGS_FORMAT; } } // return our own posting format using PFOR return es812PostingsFormat; } - private static class PostingsFormatHolder { - private static final PostingsFormat POSTINGS_FORMAT = getPostingsFormat(); + private static class CompletionPostingsFormatHolder { + private static final PostingsFormat POSTINGS_FORMAT = getCompletionPostingsFormat(); - private static PostingsFormat getPostingsFormat() { - String defaultName = "Completion912"; // Caution: changing this name will result in exceptions if a field is created during a + private static PostingsFormat getCompletionPostingsFormat() { + String defaultName = "Completion101"; // Caution: changing this name will result in exceptions if a field is created during a // rolling upgrade and the new codec (specified by the name) is not available on all nodes in the cluster. String codecName = ExtensionLoader.loadSingleton(ServiceLoader.load(CompletionsPostingsFormatExtension.class)) .map(CompletionsPostingsFormatExtension::getFormatName) diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index b60b88da5949d..9a3055f96bba8 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -26,7 +26,7 @@ * per index in real time via the mapping API. If no specific postings format or vector format is * configured for a specific field the default postings or vector format is used. */ -public final class PerFieldMapperCodec extends Elasticsearch900Codec { +public final class PerFieldMapperCodec extends Elasticsearch900Lucene101Codec { private final PerFieldFormatSupplier formatSupplier; diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index 36fd18144ad6e..0589741a70281 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -2346,4 +2346,8 @@ public record FlushResult(boolean flushPerformed, long generation) { public void prepareForEngineReset() throws IOException { throw new UnsupportedOperationException("does not support engine reset"); } + + public long getLastUnsafeSegmentGenerationForGets() { + throw new UnsupportedOperationException("Doesn't support getting the latest segment generation"); + } } diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index d3d7dcd8e930f..7f6fe40dbaaf0 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -825,7 +825,8 @@ private GetResult getFromTranslog( mappingLookup, documentParser, config(), - translogInMemorySegmentsCount::incrementAndGet + translogInMemorySegmentsCount::incrementAndGet, + false ); final Searcher searcher = new Searcher( "realtime_get", diff --git a/server/src/main/java/org/elasticsearch/index/engine/LuceneSyntheticSourceChangesSnapshot.java b/server/src/main/java/org/elasticsearch/index/engine/LuceneSyntheticSourceChangesSnapshot.java index 20154c20b3634..73a92869e31ba 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LuceneSyntheticSourceChangesSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LuceneSyntheticSourceChangesSnapshot.java @@ -9,6 +9,8 @@ package org.elasticsearch.index.engine; +import com.carrotsearch.hppc.IntArrayList; + import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.ScoreDoc; @@ -191,8 +193,28 @@ private Translog.Operation[] loadDocuments(List documentRecords) t maxDoc = leafReaderContext.reader().maxDoc(); } while (docRecord.docID() >= docBase + maxDoc); - leafFieldLoader = storedFieldLoader.getLoader(leafReaderContext, null); - leafSourceLoader = sourceLoader.leaf(leafReaderContext.reader(), null); + // TODO: instead of building an array, consider just checking whether doc ids are dense. + // Note, field loaders then would lose the ability to optionally eagerly loading values. + IntArrayList nextDocIds = new IntArrayList(); + for (int j = i; j < documentRecords.size(); j++) { + var record = documentRecords.get(j); + if (record.isTombstone()) { + continue; + } + int docID = record.docID(); + if (docID >= docBase + maxDoc) { + break; + } + int segmentDocID = docID - docBase; + nextDocIds.add(segmentDocID); + } + + // This computed doc ids arrays us used by stored field loader as a heuristic to determine whether to use a sequential + // stored field reader (which bulk loads stored fields and avoids decompressing the same blocks multiple times). For + // source loader, it is also used as a heuristic for bulk reading doc values (E.g. SingletonDocValuesLoader). + int[] nextDocIdArray = nextDocIds.toArray(); + leafFieldLoader = storedFieldLoader.getLoader(leafReaderContext, nextDocIdArray); + leafSourceLoader = sourceLoader.leaf(leafReaderContext.reader(), nextDocIdArray); setNextSourceMetadataReader(leafReaderContext); } int segmentDocID = docRecord.docID() - docBase; diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java index ac5bf31c2b730..598fb076ba222 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java @@ -88,7 +88,8 @@ static DirectoryReader create( MappingLookup mappingLookup, DocumentParser documentParser, EngineConfig engineConfig, - Runnable onSegmentCreated + Runnable onSegmentCreated, + boolean forceSynthetic ) throws IOException { final Directory directory = new ByteBuffersDirectory(); boolean success = false; @@ -97,7 +98,7 @@ static DirectoryReader create( // When using synthetic source, the translog operation must always be reindexed into an in-memory Lucene to ensure consistent // output for realtime-get operations. However, this can degrade the performance of realtime-get and update operations. // If slight inconsistencies in realtime-get operations are acceptable, the translog operation can be reindexed lazily. - if (mappingLookup.isSourceSynthetic()) { + if (mappingLookup.isSourceSynthetic() || forceSynthetic) { onSegmentCreated.run(); leafReader = createInMemoryReader(shardId, engineConfig, directory, documentParser, mappingLookup, false, operation); } else { diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogOperationAsserter.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogOperationAsserter.java index 4170d06c4d6ea..d8b51648cb586 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogOperationAsserter.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogOperationAsserter.java @@ -38,7 +38,8 @@ public boolean assertSameIndexOperation(Translog.Index o1, Translog.Index o2) th if (super.assertSameIndexOperation(o1, o2)) { return true; } - if (engineConfig.getIndexSettings().isRecoverySourceSyntheticEnabled()) { + if (engineConfig.getIndexSettings().isRecoverySourceSyntheticEnabled() + || engineConfig.getMapperService().mappingLookup().inferenceFields().isEmpty() == false) { return super.assertSameIndexOperation(synthesizeSource(engineConfig, o1), o2) || super.assertSameIndexOperation(o1, synthesizeSource(engineConfig, o2)); } @@ -51,7 +52,7 @@ static Translog.Index synthesizeSource(EngineConfig engineConfig, Translog.Index final ShardId shardId = engineConfig.getShardId(); final MappingLookup mappingLookup = engineConfig.getMapperService().mappingLookup(); final DocumentParser documentParser = engineConfig.getMapperService().documentParser(); - try (var reader = TranslogDirectoryReader.create(shardId, op, mappingLookup, documentParser, engineConfig, () -> {})) { + try (var reader = TranslogDirectoryReader.create(shardId, op, mappingLookup, documentParser, engineConfig, () -> {}, true)) { final Engine.Searcher searcher = new Engine.Searcher( "assert_translog", reader, @@ -60,19 +61,7 @@ static Translog.Index synthesizeSource(EngineConfig engineConfig, Translog.Index TrivialQueryCachingPolicy.NEVER, () -> {} ); - try ( - LuceneSyntheticSourceChangesSnapshot snapshot = new LuceneSyntheticSourceChangesSnapshot( - engineConfig.getMapperService(), - searcher, - LuceneSyntheticSourceChangesSnapshot.DEFAULT_BATCH_SIZE, - Integer.MAX_VALUE, - op.seqNo(), - op.seqNo(), - true, - false, - engineConfig.getIndexSettings().getIndexVersionCreated() - ) - ) { + try (var snapshot = newSnapshot(engineConfig, op, searcher);) { final Translog.Operation normalized = snapshot.next(); assert normalized != null : "expected one operation; got zero"; return (Translog.Index) normalized; @@ -80,6 +69,34 @@ static Translog.Index synthesizeSource(EngineConfig engineConfig, Translog.Index } } + static Translog.Snapshot newSnapshot(EngineConfig engineConfig, Translog.Index op, Engine.Searcher searcher) throws IOException { + if (engineConfig.getIndexSettings().isRecoverySourceSyntheticEnabled()) { + return new LuceneSyntheticSourceChangesSnapshot( + engineConfig.getMapperService(), + searcher, + LuceneSyntheticSourceChangesSnapshot.DEFAULT_BATCH_SIZE, + Integer.MAX_VALUE, + op.seqNo(), + op.seqNo(), + true, + false, + engineConfig.getIndexSettings().getIndexVersionCreated() + ); + } else { + return new LuceneChangesSnapshot( + engineConfig.getMapperService(), + searcher, + LuceneSyntheticSourceChangesSnapshot.DEFAULT_BATCH_SIZE, + op.seqNo(), + op.seqNo(), + true, + false, + false, + engineConfig.getIndexSettings().getIndexVersionCreated() + ); + } + } + public boolean assertSameIndexOperation(Translog.Index o1, Translog.Index o2) throws IOException { return Translog.Index.equalsWithoutAutoGeneratedTimestamp(o1, o2); } diff --git a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java index 9ed9fc6dabf3d..f0f56333c7529 100644 --- a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; @@ -93,7 +92,7 @@ public GetResult get( FetchSourceContext fetchSourceContext, boolean forceSyntheticSource ) throws IOException { - return get( + return doGet( id, gFields, realtime, @@ -107,7 +106,7 @@ public GetResult get( ); } - public GetResult get( + public GetResult mget( String id, String[] gFields, boolean realtime, @@ -117,7 +116,7 @@ public GetResult get( boolean forceSyntheticSource, MultiEngineGet mget ) throws IOException { - return get( + return doGet( id, gFields, realtime, @@ -131,7 +130,7 @@ public GetResult get( ); } - private GetResult get( + private GetResult doGet( String id, String[] gFields, boolean realtime, @@ -144,21 +143,40 @@ private GetResult get( Function engineGetOperator ) throws IOException { currentMetric.inc(); + final long now = System.nanoTime(); try { - long now = System.nanoTime(); - GetResult getResult = innerGet( - id, - gFields, - realtime, - version, - versionType, - ifSeqNo, - ifPrimaryTerm, - fetchSourceContext, - forceSyntheticSource, - engineGetOperator - ); + var engineGet = new Engine.Get(realtime, realtime, id).version(version) + .versionType(versionType) + .setIfSeqNo(ifSeqNo) + .setIfPrimaryTerm(ifPrimaryTerm); + final GetResult getResult; + try (Engine.GetResult get = engineGetOperator.apply(engineGet)) { + if (get == null) { + getResult = null; + } else if (get.exists() == false) { + getResult = new GetResult( + shardId.getIndexName(), + id, + UNASSIGNED_SEQ_NO, + UNASSIGNED_PRIMARY_TERM, + -1, + false, + null, + null, + null + ); + } else { + // break between having loaded it from translog (so we only have _source), and having a document to load + getResult = innerGetFetch( + id, + gFields, + normalizeFetchSourceContent(fetchSourceContext, gFields), + get, + forceSyntheticSource + ); + } + } if (getResult != null && getResult.isExists()) { existsMetric.inc(System.nanoTime() - now); } else { @@ -179,7 +197,7 @@ public GetResult getFromTranslog( FetchSourceContext fetchSourceContext, boolean forceSyntheticSource ) throws IOException { - return get( + return doGet( id, gFields, realtime, @@ -193,12 +211,8 @@ public GetResult getFromTranslog( ); } - public GetResult getForUpdate(String id, long ifSeqNo, long ifPrimaryTerm) throws IOException { - return getForUpdate(id, ifSeqNo, ifPrimaryTerm, new String[] { RoutingFieldMapper.NAME }); - } - public GetResult getForUpdate(String id, long ifSeqNo, long ifPrimaryTerm, String[] gFields) throws IOException { - return get( + return doGet( id, gFields, true, @@ -259,35 +273,6 @@ private static FetchSourceContext normalizeFetchSourceContent(@Nullable FetchSou return FetchSourceContext.DO_NOT_FETCH_SOURCE; } - private GetResult innerGet( - String id, - String[] gFields, - boolean realtime, - long version, - VersionType versionType, - long ifSeqNo, - long ifPrimaryTerm, - FetchSourceContext fetchSourceContext, - boolean forceSyntheticSource, - Function engineGetOperator - ) throws IOException { - fetchSourceContext = normalizeFetchSourceContent(fetchSourceContext, gFields); - var engineGet = new Engine.Get(realtime, realtime, id).version(version) - .versionType(versionType) - .setIfSeqNo(ifSeqNo) - .setIfPrimaryTerm(ifPrimaryTerm); - try (Engine.GetResult get = engineGetOperator.apply(engineGet)) { - if (get == null) { - return null; - } - if (get.exists() == false) { - return new GetResult(shardId.getIndexName(), id, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM, -1, false, null, null, null); - } - // break between having loaded it from translog (so we only have _source), and having a document to load - return innerGetFetch(id, gFields, fetchSourceContext, get, forceSyntheticSource); - } - } - private GetResult innerGetFetch( String id, String[] storedFields, @@ -298,7 +283,6 @@ private GetResult innerGetFetch( assert get.exists() : "method should only be called if document could be retrieved"; // check first if stored fields to be loaded don't contain an object field MappingLookup mappingLookup = mapperService.mappingLookup(); - final IndexVersion indexVersion = indexSettings.getIndexVersionCreated(); final Set storedFieldSet = new HashSet<>(); boolean hasInferenceMetadataFields = false; if (storedFields != null) { @@ -338,6 +322,9 @@ private GetResult innerGetFetch( throw new ElasticsearchException("Failed to get id [" + id + "]", e); } + final boolean supportDocValuesForIgnoredMetaField = indexSettings.getIndexVersionCreated() + .onOrAfter(IndexVersions.DOC_VALUES_FOR_IGNORED_META_FIELD); + // put stored fields into result objects if (leafStoredFieldLoader.storedFields().isEmpty() == false) { Set needed = new HashSet<>(); @@ -351,8 +338,7 @@ private GetResult innerGetFetch( if (false == needed.contains(entry.getKey())) { continue; } - if (IgnoredFieldMapper.NAME.equals(entry.getKey()) - && indexVersion.onOrAfter(IndexVersions.DOC_VALUES_FOR_IGNORED_META_FIELD)) { + if (IgnoredFieldMapper.NAME.equals(entry.getKey()) && supportDocValuesForIgnoredMetaField) { continue; } MappedFieldType ft = mapperService.fieldType(entry.getKey()); @@ -371,9 +357,7 @@ private GetResult innerGetFetch( // NOTE: when _ignored is requested via `stored_fields` we need to load it from doc values instead of loading it from stored fields. // The _ignored field used to be stored, but as a result of supporting aggregations on it, it moved from using a stored field to // using doc values. - if (indexVersion.onOrAfter(IndexVersions.DOC_VALUES_FOR_IGNORED_META_FIELD) - && storedFields != null - && Arrays.asList(storedFields).contains(IgnoredFieldMapper.NAME)) { + if (supportDocValuesForIgnoredMetaField && storedFields != null && Arrays.asList(storedFields).contains(IgnoredFieldMapper.NAME)) { final DocumentField ignoredDocumentField = loadIgnoredMetadataField(docIdAndVersion); if (ignoredDocumentField != null) { if (metadataFields == null) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java index 7b4ceb67f04d7..451da5bfdbaf0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java @@ -417,6 +417,8 @@ interface BlockFactory { SingletonOrdinalsBuilder singletonOrdinalsBuilder(SortedDocValues ordinals, int count); // TODO support non-singleton ords + + AggregateMetricDoubleBuilder aggregateMetricDoubleBuilder(int count); } /** @@ -501,4 +503,16 @@ interface SingletonOrdinalsBuilder extends Builder { */ SingletonOrdinalsBuilder appendOrd(int value); } + + interface AggregateMetricDoubleBuilder extends Builder { + + DoubleBuilder min(); + + DoubleBuilder max(); + + DoubleBuilder sum(); + + IntBuilder count(); + + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java index f0c679d4f4994..af691c61abe2e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java @@ -392,7 +392,7 @@ public void parse(DocumentParserContext context) throws IOException { // parse XContentParser parser = context.parser(); Token token = parser.currentToken(); - Map inputMap = Maps.newMapWithExpectedSize(1); + Map inputMap = Maps.newMapWithExpectedSize(1); if (token == Token.VALUE_NULL) { // ignore null values return; @@ -405,7 +405,7 @@ public void parse(DocumentParserContext context) throws IOException { } // index - for (Map.Entry completionInput : inputMap.entrySet()) { + for (Map.Entry completionInput : inputMap.entrySet()) { String input = completionInput.getKey(); if (input.trim().isEmpty()) { context.addIgnoredField(mappedFieldType.name()); @@ -420,21 +420,33 @@ public void parse(DocumentParserContext context) throws IOException { } input = input.substring(0, len); } - CompletionInputMetadata metadata = completionInput.getValue(); + CompletionInputMetadataContainer cmc = completionInput.getValue(); if (fieldType().hasContextMappings()) { - fieldType().getContextMappings().addField(context.doc(), fieldType().name(), input, metadata.weight, metadata.contexts); + for (CompletionInputMetadata metadata : cmc.getValues()) { + fieldType().getContextMappings().addField(context.doc(), fieldType().name(), input, metadata.weight, metadata.contexts); + } } else { - context.doc().add(new SuggestField(fieldType().name(), input, metadata.weight)); + context.doc().add(new SuggestField(fieldType().name(), input, cmc.getWeight())); } } - context.addToFieldNames(fieldType().name()); - for (CompletionInputMetadata metadata : inputMap.values()) { - multiFields().parse( - this, - context, - () -> context.switchParser(new MultiFieldParser(metadata, fieldType().name(), context.parser().getTokenLocation())) - ); + for (CompletionInputMetadataContainer cmc : inputMap.values()) { + if (fieldType().hasContextMappings()) { + for (CompletionInputMetadata metadata : cmc.getValues()) { + multiFields().parse( + this, + context, + () -> context.switchParser(new MultiFieldParser(metadata, fieldType().name(), context.parser().getTokenLocation())) + ); + } + } else { + CompletionInputMetadata metadata = cmc.getValue(); + multiFields().parse( + this, + context, + () -> context.switchParser(new MultiFieldParser(metadata, fieldType().name(), context.parser().getTokenLocation())) + ); + } } } @@ -447,11 +459,13 @@ private void parse( DocumentParserContext documentParserContext, Token token, XContentParser parser, - Map inputMap + Map inputMap ) throws IOException { String currentFieldName = null; if (token == Token.VALUE_STRING) { - inputMap.put(parser.text(), new CompletionInputMetadata(parser.text(), Collections.>emptyMap(), 1)); + CompletionInputMetadataContainer cmc = new CompletionInputMetadataContainer(fieldType().hasContextMappings()); + cmc.add(new CompletionInputMetadata(parser.text(), Collections.emptyMap(), 1)); + inputMap.put(parser.text(), cmc); } else if (token == Token.START_OBJECT) { Set inputs = new HashSet<>(); int weight = 1; @@ -531,8 +545,14 @@ private void parse( } } for (String input : inputs) { - if (inputMap.containsKey(input) == false || inputMap.get(input).weight < weight) { - inputMap.put(input, new CompletionInputMetadata(input, contextsMap, weight)); + CompletionInputMetadata cm = new CompletionInputMetadata(input, contextsMap, weight); + CompletionInputMetadataContainer cmc = inputMap.get(input); + if (cmc != null) { + cmc.add(cm); + } else { + cmc = new CompletionInputMetadataContainer(fieldType().hasContextMappings()); + cmc.add(cm); + inputMap.put(input, cmc); } } } else { @@ -543,10 +563,46 @@ private void parse( } } + static class CompletionInputMetadataContainer { + private final boolean hasContexts; + private final List list; + private CompletionInputMetadata single; + + CompletionInputMetadataContainer(boolean hasContexts) { + this.hasContexts = hasContexts; + this.list = hasContexts ? new ArrayList<>() : null; + } + + void add(CompletionInputMetadata cm) { + if (hasContexts) { + list.add(cm); + } else { + if (single == null || single.weight < cm.weight) { + single = cm; + } + } + } + + List getValues() { + assert hasContexts; + return list; + } + + CompletionInputMetadata getValue() { + assert hasContexts == false; + return single; + } + + int getWeight() { + assert hasContexts == false; + return single.weight; + } + } + static class CompletionInputMetadata { - public final String input; - public final Map> contexts; - public final int weight; + private final String input; + private final Map> contexts; + private final int weight; CompletionInputMetadata(String input, Map> contexts, int weight) { this.input = input; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index cf0c355a22e65..0d488e47c2e4f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -164,12 +164,12 @@ public void validate(IndexSettings settings, boolean checkLimits) { throw new IllegalArgumentException("cannot have nested fields when index sort is activated"); } for (String field : settings.getValue(IndexSortConfig.INDEX_SORT_FIELD_SETTING)) { - for (NestedObjectMapper nestedObjectMapper : mappers().nestedLookup().getNestedMappers().values()) { - if (field.startsWith(nestedObjectMapper.fullPath())) { - throw new IllegalArgumentException( - "cannot apply index sort to field [" + field + "] under nested object [" + nestedObjectMapper.fullPath() + "]" - ); - } + NestedObjectMapper nestedMapper = mappers().nestedLookup().getNestedMappers().get(field); + String nestedParent = nestedMapper != null ? nestedMapper.fullPath() : mappers().nestedLookup().getNestedParent(field); + if (nestedParent != null) { + throw new IllegalArgumentException( + "cannot apply index sort to field [" + field + "] under nested object [" + nestedParent + "]" + ); } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index 51e4e9f4c1b5e..7b5e28dc1fbef 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -50,6 +50,13 @@ private Wrapper(ObjectMapper parent, DocumentParserContext in) { this.in = in; } + // Used to create a copy_to context. + // It is important to reset `dynamic` here since it is possible that we copy into a completely different object. + private Wrapper(RootObjectMapper root, DocumentParserContext in) { + super(root, ObjectMapper.Dynamic.getRootDynamic(in.mappingLookup()), in); + this.in = in; + } + @Override public Iterable nonRootDocuments() { return in.nonRootDocuments(); @@ -367,19 +374,6 @@ public final DocumentParserContext maybeCloneForArray(Mapper mapper) throws IOEx return this; } - /** - * Creates a sub-context from the current {@link DocumentParserContext} to indicate that the source for the sub-context has been - * recorded and avoid duplicate recording for parts of the sub-context. Applies to synthetic source only. - */ - public final DocumentParserContext cloneWithRecordedSource() throws IOException { - if (canAddIgnoredField()) { - DocumentParserContext subcontext = createChildContext(parent()); - subcontext.setRecordedSource(); // Avoids double-storing parts of the source for the same parser subtree. - return subcontext; - } - return this; - } - /** * Add the given {@code field} to the _field_names field * @@ -466,10 +460,6 @@ public boolean isCopyToDestinationField(String name) { return copyToFields.contains(name); } - public Set getCopyToFields() { - return copyToFields; - } - /** * Add a new mapper dynamically created while parsing. * @@ -675,8 +665,14 @@ public final DocumentParserContext createNestedContext(NestedObjectMapper nested if (idField != null) { // We just need to store the id as indexed field, so that IndexWriter#deleteDocuments(term) can then // delete it when the root document is deleted too. - // NOTE: we don't support nested fields in tsdb so it's safe to assume the standard id mapper. doc.add(new StringField(IdFieldMapper.NAME, idField.binaryValue(), Field.Store.NO)); + } else if (indexSettings().getMode() == IndexMode.TIME_SERIES) { + // For time series indices, the _id is generated from the _tsid, which in turn is generated from the values of the configured + // routing fields. At this point in document parsing, we can't guarantee that we've parsed all the routing fields yet, so the + // parent document's _id is not yet available. + // So we just add the child document without the parent _id, then in TimeSeriesIdFieldMapper#postParse we set the _id on all + // child documents once we've calculated it. + assert getRoutingFields().equals(RoutingFields.Noop.INSTANCE) == false; } else { throw new IllegalStateException("The root document of a nested document should have an _id field"); } @@ -706,8 +702,29 @@ public LuceneDocument doc() { * @param doc the document to target */ public final DocumentParserContext createCopyToContext(String copyToField, LuceneDocument doc) throws IOException { + /* + Mark field as containing copied data meaning it should not be present + in synthetic _source (to be consistent with stored _source). + Ignored source values take precedence over standard synthetic source implementation + so by adding the `XContentDataHelper.voidValue()` entry we disable the field in synthetic source. + Otherwise, it would be constructed f.e. from doc_values which leads to duplicate values + in copied field after reindexing. + */ + if (mappingLookup.isSourceSynthetic() && indexSettings().getSkipIgnoredSourceWrite() == false) { + ObjectMapper parent = root().findParentMapper(copyToField); + // There are scenarios when this is false: + // 1. all values of the field that is the source of copy_to are null + // 2. copy_to points at a field inside a disabled object + // 3. copy_to points at dynamic field which is not yet applied to mapping, we will process it properly after the dynamic update + if (parent != null) { + int offset = parent.isRoot() ? 0 : parent.fullPath().length() + 1; + ignoredFieldValues.add(new IgnoredSourceFieldMapper.NameValue(copyToField, offset, XContentDataHelper.voidValue(), doc)); + } + } + ContentPath path = new ContentPath(); XContentParser parser = DotExpandingXContentParser.expandDots(new CopyToParser(copyToField, parser()), path); + return new Wrapper(root(), this) { @Override public ContentPath path() { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FallbackSyntheticSourceBlockLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/FallbackSyntheticSourceBlockLoader.java new file mode 100644 index 0000000000000..28ea37ef73e33 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/FallbackSyntheticSourceBlockLoader.java @@ -0,0 +1,270 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SortedSetDocValues; +import org.elasticsearch.search.fetch.StoredFieldsSpec; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +/** + * Block loader for fields that use fallback synthetic source implementation. + *
+ * Usually fields have doc_values or stored fields and block loaders use them directly. In some cases neither is available + * and we would fall back to (potentially synthetic) _source. However, in case of synthetic source, there is actually no need to + * construct the entire _source. We know that there is no doc_values and stored fields, and therefore we will be using fallback synthetic + * source. That is equivalent to just reading _ignored_source stored field directly and doing an in-place synthetic source just + * for this field. + *
+ * See {@link IgnoredSourceFieldMapper}. + */ +public abstract class FallbackSyntheticSourceBlockLoader implements BlockLoader { + private final Reader reader; + private final String fieldName; + + protected FallbackSyntheticSourceBlockLoader(Reader reader, String fieldName) { + this.reader = reader; + this.fieldName = fieldName; + } + + @Override + public ColumnAtATimeReader columnAtATimeReader(LeafReaderContext context) throws IOException { + return null; + } + + @Override + public RowStrideReader rowStrideReader(LeafReaderContext context) throws IOException { + return new IgnoredSourceRowStrideReader<>(fieldName, reader); + } + + @Override + public StoredFieldsSpec rowStrideStoredFieldSpec() { + return new StoredFieldsSpec(false, false, Set.of(IgnoredSourceFieldMapper.NAME)); + } + + @Override + public boolean supportsOrdinals() { + return false; + } + + @Override + public SortedSetDocValues ordinals(LeafReaderContext context) throws IOException { + throw new UnsupportedOperationException(); + } + + private record IgnoredSourceRowStrideReader(String fieldName, Reader reader) implements RowStrideReader { + @Override + public void read(int docId, StoredFields storedFields, Builder builder) throws IOException { + var ignoredSource = storedFields.storedFields().get(IgnoredSourceFieldMapper.NAME); + if (ignoredSource == null) { + return; + } + + Map> valuesForFieldAndParents = new HashMap<>(); + + // Contains name of the field and all its parents + Set fieldNames = new HashSet<>() { + { + add("_doc"); + } + }; + + var current = new StringBuilder(); + for (String part : fieldName.split("\\.")) { + if (current.isEmpty() == false) { + current.append('.'); + } + current.append(part); + fieldNames.add(current.toString()); + } + + for (Object value : ignoredSource) { + IgnoredSourceFieldMapper.NameValue nameValue = IgnoredSourceFieldMapper.decode(value); + if (fieldNames.contains(nameValue.name())) { + valuesForFieldAndParents.computeIfAbsent(nameValue.name(), k -> new ArrayList<>()).add(nameValue); + } + } + + // TODO figure out how to handle XContentDataHelper#voidValue() + + var blockValues = new ArrayList(); + + var leafFieldValue = valuesForFieldAndParents.get(fieldName); + if (leafFieldValue != null) { + readFromFieldValue(leafFieldValue, blockValues); + } else { + readFromParentValue(valuesForFieldAndParents, blockValues); + } + + if (blockValues.isEmpty() == false) { + if (blockValues.size() > 1) { + builder.beginPositionEntry(); + } + + reader.writeToBlock(blockValues, builder); + + if (blockValues.size() > 1) { + builder.endPositionEntry(); + } + } else { + builder.appendNull(); + } + } + + private void readFromFieldValue(List nameValues, List blockValues) throws IOException { + if (nameValues.isEmpty()) { + return; + } + + for (var nameValue : nameValues) { + // Leaf field is stored directly (not as a part of a parent object), let's try to decode it. + Optional singleValue = XContentDataHelper.decode(nameValue.value()); + if (singleValue.isPresent()) { + reader.convertValue(singleValue.get(), blockValues); + continue; + } + + // We have a value for this field but it's an array or an object + var type = XContentDataHelper.decodeType(nameValue.value()); + assert type.isPresent(); + + try ( + XContentParser parser = type.get() + .xContent() + .createParser( + XContentParserConfiguration.EMPTY, + nameValue.value().bytes, + nameValue.value().offset + 1, + nameValue.value().length - 1 + ) + ) { + parser.nextToken(); + parseWithReader(parser, blockValues); + } + } + } + + private void readFromParentValue( + Map> valuesForFieldAndParents, + List blockValues + ) throws IOException { + if (valuesForFieldAndParents.isEmpty()) { + return; + } + + // If a parent object is stored at a particular level its children won't be stored. + // So we should only ever have one parent here. + assert valuesForFieldAndParents.size() == 1 : "_ignored_source field contains multiple levels of the same object"; + var parentValues = valuesForFieldAndParents.values().iterator().next(); + + for (var nameValue : parentValues) { + parseFieldFromParent(nameValue, blockValues); + } + } + + private void parseFieldFromParent(IgnoredSourceFieldMapper.NameValue nameValue, List blockValues) throws IOException { + var type = XContentDataHelper.decodeType(nameValue.value()); + assert type.isPresent(); + + String nameAtThisLevel = fieldName.substring(nameValue.name().length() + 1); + var filterParserConfig = XContentParserConfiguration.EMPTY.withFiltering(null, Set.of(nameAtThisLevel), Set.of(), true); + try ( + XContentParser parser = type.get() + .xContent() + .createParser(filterParserConfig, nameValue.value().bytes, nameValue.value().offset + 1, nameValue.value().length - 1) + ) { + parser.nextToken(); + var fieldNameInParser = new StringBuilder(nameValue.name()); + while (true) { + if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { + fieldNameInParser.append('.').append(parser.currentName()); + if (fieldNameInParser.toString().equals(fieldName)) { + parser.nextToken(); + break; + } + } + parser.nextToken(); + } + parseWithReader(parser, blockValues); + } + } + + private void parseWithReader(XContentParser parser, List blockValues) throws IOException { + if (parser.currentToken() == XContentParser.Token.START_ARRAY) { + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + reader.parse(parser, blockValues); + } + return; + } + + reader.parse(parser, blockValues); + } + + @Override + public boolean canReuse(int startingDocID) { + return true; + } + } + + /** + * Field-specific implementation that converts data stored in _ignored_source field to block loader values. + * @param + */ + public interface Reader { + /** + * Converts a raw stored value for this field to a value in a format suitable for block loader and adds it to the provided + * accumulator. + * @param value raw decoded value from _ignored_source field (synthetic _source value) + * @param accumulator list containing the result of conversion + */ + void convertValue(Object value, List accumulator); + + /** + * Parses one or more complex values using a provided parser and adds them to the provided accumulator. + * @param parser parser of a value from _ignored_source field (synthetic _source value) + * @param accumulator list containing the results of parsing + */ + void parse(XContentParser parser, List accumulator) throws IOException; + + void writeToBlock(List values, Builder blockBuilder); + } + + public abstract static class ReaderWithNullValueSupport implements Reader { + private final T nullValue; + + public ReaderWithNullValueSupport(T nullValue) { + this.nullValue = nullValue; + } + + @Override + public void parse(XContentParser parser, List accumulator) throws IOException { + if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { + if (nullValue != null) { + convertValue(nullValue, accumulator); + } + return; + } + + parseNonNullValue(parser, accumulator); + } + + abstract void parseNonNullValue(XContentParser parser, List accumulator) throws IOException; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 7c1f3678a5dc9..11db4d3d5aa64 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -62,7 +62,6 @@ public abstract class FieldMapper extends Mapper { private static final Logger logger = LogManager.getLogger(FieldMapper.class); - public static final Setting IGNORE_MALFORMED_SETTING = Setting.boolSetting("index.mapping.ignore_malformed", settings -> { if (IndexSettings.MODE.get(settings) == IndexMode.LOGSDB && IndexMetadata.SETTING_INDEX_VERSION_CREATED.get(settings).onOrAfter(IndexVersions.ENABLE_IGNORE_MALFORMED_LOGSDB)) { @@ -851,6 +850,10 @@ public boolean isConfigured() { return isSet && Objects.equals(value, getDefaultValue()) == false; } + public boolean isSet() { + return isSet; + } + /** * Allows the parameter to accept a {@code null} value */ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java index 5f553ac8d2252..d8d8200baac31 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java @@ -25,8 +25,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.Set; @@ -161,33 +159,7 @@ public void postParse(DocumentParserContext context) { return; } - Collection ignoredValuesToWrite = context.getIgnoredFieldValues(); - if (context.getCopyToFields().isEmpty() == false && indexSettings.getSkipIgnoredSourceWrite() == false) { - /* - Mark fields as containing copied data meaning they should not be present - in synthetic _source (to be consistent with stored _source). - Ignored source values take precedence over standard synthetic source implementation - so by adding the `XContentDataHelper.voidValue()` entry we disable the field in synthetic source. - Otherwise, it would be constructed f.e. from doc_values which leads to duplicate values - in copied field after reindexing. - */ - var mutableList = new ArrayList<>(ignoredValuesToWrite); - for (String copyToField : context.getCopyToFields()) { - ObjectMapper parent = context.parent().findParentMapper(copyToField); - if (parent == null) { - // There are scenarios when this can happen: - // 1. all values of the field that is the source of copy_to are null - // 2. copy_to points at a field inside a disabled object - // 3. copy_to points at dynamic field which is not yet applied to mapping, we will process it properly on re-parse. - continue; - } - int offset = parent.isRoot() ? 0 : parent.fullPath().length() + 1; - mutableList.add(new IgnoredSourceFieldMapper.NameValue(copyToField, offset, XContentDataHelper.voidValue(), context.doc())); - } - ignoredValuesToWrite = mutableList; - } - - for (NameValue nameValue : ignoredValuesToWrite) { + for (NameValue nameValue : context.getIgnoredFieldValues()) { nameValue.doc().add(new StoredField(NAME, encode(nameValue))); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/InferenceMetadataFieldsMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/InferenceMetadataFieldsMapper.java index be4237fec3303..4039bccf1fd18 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/InferenceMetadataFieldsMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/InferenceMetadataFieldsMapper.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.query.SearchExecutionContext; @@ -41,6 +42,10 @@ public abstract class InferenceMetadataFieldsMapper extends MetadataFieldMapper Setting.Property.InternalIndex ); + // Check index version SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP because that index version was added in the same serverless promotion + // where the new format was enabled by default + public static final IndexVersion USE_NEW_SEMANTIC_TEXT_FORMAT_BY_DEFAULT = IndexVersions.SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP; + public static final String NAME = "_inference_fields"; public static final String CONTENT_TYPE = "_inference_fields"; @@ -86,10 +91,12 @@ public abstract ValueFetcher valueFetcher( */ public static boolean isEnabled(Settings settings) { var version = IndexMetadata.SETTING_INDEX_VERSION_CREATED.get(settings); - if (version.before(IndexVersions.INFERENCE_METADATA_FIELDS) - && version.between(IndexVersions.INFERENCE_METADATA_FIELDS_BACKPORT, IndexVersions.UPGRADE_TO_LUCENE_10_0_0) == false) { + if ((version.before(IndexVersions.INFERENCE_METADATA_FIELDS) + && version.between(IndexVersions.INFERENCE_METADATA_FIELDS_BACKPORT, IndexVersions.UPGRADE_TO_LUCENE_10_0_0) == false) + || (version.before(USE_NEW_SEMANTIC_TEXT_FORMAT_BY_DEFAULT) && USE_LEGACY_SEMANTIC_TEXT_FORMAT.exists(settings) == false)) { return false; } + return USE_LEGACY_SEMANTIC_TEXT_FORMAT.get(settings) == false; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 7b14739d36246..df308a4bf983c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -18,6 +18,7 @@ import org.apache.lucene.document.InvertableType; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StoredField; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; @@ -38,7 +39,10 @@ import org.elasticsearch.common.lucene.search.AutomatonQueries; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSortConfig; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldData; @@ -63,6 +67,7 @@ import org.elasticsearch.search.runtime.StringScriptFieldTermQuery; import org.elasticsearch.search.runtime.StringScriptFieldWildcardQuery; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.io.UncheckedIOException; @@ -70,6 +75,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; @@ -78,6 +84,7 @@ import static org.apache.lucene.index.IndexWriter.MAX_TERM_LENGTH; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.index.IndexSettings.IGNORE_ABOVE_SETTING; +import static org.elasticsearch.index.IndexSettings.USE_DOC_VALUES_SKIPPER; /** * A field mapper for keywords. This mapper accepts strings and indexes them as-is. @@ -87,9 +94,11 @@ public final class KeywordFieldMapper extends FieldMapper { private static final Logger logger = LogManager.getLogger(KeywordFieldMapper.class); public static final String CONTENT_TYPE = "keyword"; + private static final String HOST_NAME = "host.name"; public static class Defaults { public static final FieldType FIELD_TYPE; + public static final FieldType FIELD_TYPE_WITH_SKIP_DOC_VALUES; static { FieldType ft = new FieldType(); @@ -100,6 +109,16 @@ public static class Defaults { FIELD_TYPE = freezeAndDeduplicateFieldType(ft); } + static { + FieldType ft = new FieldType(); + ft.setTokenized(false); + ft.setOmitNorms(true); + ft.setIndexOptions(IndexOptions.NONE); + ft.setDocValuesType(DocValuesType.SORTED_SET); + ft.setDocValuesSkipIndexType(DocValuesSkipIndexType.RANGE); + FIELD_TYPE_WITH_SKIP_DOC_VALUES = freezeAndDeduplicateFieldType(ft); + } + public static final TextSearchInfo TEXT_SEARCH_INFO = new TextSearchInfo( FIELD_TYPE, null, @@ -154,7 +173,8 @@ public static final class Builder extends FieldMapper.DimensionBuilder { ); private final Parameter ignoreAbove; private final int ignoreAboveDefault; - + private final IndexSortConfig indexSortConfig; + private final IndexMode indexMode; private final Parameter indexOptions = TextParams.keywordIndexOptions(m -> toType(m).indexOptions); private final Parameter hasNorms = TextParams.norms(false, m -> toType(m).fieldType.omitNorms() == false); private final Parameter similarity = TextParams.similarity( @@ -182,6 +202,7 @@ public static final class Builder extends FieldMapper.DimensionBuilder { private final IndexAnalyzers indexAnalyzers; private final ScriptCompiler scriptCompiler; private final IndexVersion indexCreatedVersion; + private final boolean useDocValuesSkipper; public Builder(final String name, final MappingParserContext mappingParserContext) { this( @@ -189,7 +210,10 @@ public Builder(final String name, final MappingParserContext mappingParserContex mappingParserContext.getIndexAnalyzers(), mappingParserContext.scriptCompiler(), IGNORE_ABOVE_SETTING.get(mappingParserContext.getSettings()), - mappingParserContext.getIndexSettings().getIndexVersionCreated() + mappingParserContext.getIndexSettings().getIndexVersionCreated(), + mappingParserContext.getIndexSettings().getMode(), + mappingParserContext.getIndexSettings().getIndexSortConfig(), + USE_DOC_VALUES_SKIPPER.get(mappingParserContext.getSettings()) ); } @@ -199,6 +223,19 @@ public Builder(final String name, final MappingParserContext mappingParserContex ScriptCompiler scriptCompiler, int ignoreAboveDefault, IndexVersion indexCreatedVersion + ) { + this(name, indexAnalyzers, scriptCompiler, ignoreAboveDefault, indexCreatedVersion, IndexMode.STANDARD, null, false); + } + + private Builder( + String name, + IndexAnalyzers indexAnalyzers, + ScriptCompiler scriptCompiler, + int ignoreAboveDefault, + IndexVersion indexCreatedVersion, + IndexMode indexMode, + IndexSortConfig indexSortConfig, + boolean useDocValuesSkipper ) { super(name); this.indexAnalyzers = indexAnalyzers; @@ -233,6 +270,9 @@ public Builder(final String name, final MappingParserContext mappingParserContex throw new IllegalArgumentException("[ignore_above] must be positive, got [" + v + "]"); } }); + this.indexSortConfig = indexSortConfig; + this.indexMode = indexMode; + this.useDocValuesSkipper = useDocValuesSkipper; } public Builder(String name, IndexVersion indexCreatedVersion) { @@ -359,15 +399,27 @@ private KeywordFieldType buildFieldType(MapperBuilderContext context, FieldType @Override public KeywordFieldMapper build(MapperBuilderContext context) { - FieldType fieldtype = new FieldType(Defaults.FIELD_TYPE); + FieldType fieldtype = resolveFieldType( + useDocValuesSkipper, + indexCreatedVersion, + indexSortConfig, + indexMode, + context.buildFullName(leafName()) + ); fieldtype.setOmitNorms(this.hasNorms.getValue() == false); - fieldtype.setIndexOptions(TextParams.toIndexOptions(this.indexed.getValue(), this.indexOptions.getValue())); fieldtype.setStored(this.stored.getValue()); fieldtype.setDocValuesType(this.hasDocValues.getValue() ? DocValuesType.SORTED_SET : DocValuesType.NONE); + if (fieldtype.equals(Defaults.FIELD_TYPE_WITH_SKIP_DOC_VALUES) == false) { + // NOTE: override index options only if we are not using a sparse doc values index (and we use an inverted index) + fieldtype.setIndexOptions(TextParams.toIndexOptions(this.indexed.getValue(), this.indexOptions.getValue())); + } if (fieldtype.equals(Defaults.FIELD_TYPE)) { // deduplicate in the common default case to save some memory fieldtype = Defaults.FIELD_TYPE; } + if (fieldtype.equals(Defaults.FIELD_TYPE_WITH_SKIP_DOC_VALUES)) { + fieldtype = Defaults.FIELD_TYPE_WITH_SKIP_DOC_VALUES; + } super.hasScript = script.get() != null; super.onScriptError = onScriptError.getValue(); return new KeywordFieldMapper( @@ -376,9 +428,41 @@ public KeywordFieldMapper build(MapperBuilderContext context) { buildFieldType(context, fieldtype), builderParams(this, context), context.isSourceSynthetic(), + useDocValuesSkipper, this ); } + + private FieldType resolveFieldType( + final boolean useDocValuesSkipper, + final IndexVersion indexCreatedVersion, + final IndexSortConfig indexSortConfig, + final IndexMode indexMode, + final String fullFieldName + ) { + if (useDocValuesSkipper + && indexCreatedVersion.onOrAfter(IndexVersions.HOSTNAME_DOC_VALUES_SPARSE_INDEX) + && shouldUseDocValuesSkipper(hasDocValues.getValue(), indexSortConfig, indexMode, fullFieldName)) { + return new FieldType(Defaults.FIELD_TYPE_WITH_SKIP_DOC_VALUES); + } + return new FieldType(Defaults.FIELD_TYPE); + } + + private static boolean shouldUseDocValuesSkipper( + final boolean hasDocValues, + final IndexSortConfig indexSortConfig, + final IndexMode indexMode, + final String fullFieldName + ) { + return hasDocValues + && IndexMode.LOGSDB.equals(indexMode) + && HOST_NAME.equals(fullFieldName) + && indexSortConfigByHostName(indexSortConfig); + } + + private static boolean indexSortConfigByHostName(final IndexSortConfig indexSortConfig) { + return indexSortConfig != null && indexSortConfig.hasIndexSort() && indexSortConfig.hasSortOnField(HOST_NAME); + } } public static final TypeParser PARSER = createTypeParserWithLegacySupport(Builder::new); @@ -392,6 +476,9 @@ public static final class KeywordFieldType extends StringFieldType { private final FieldValues scriptValues; private final boolean isDimension; private final boolean isSyntheticSource; + private final IndexMode indexMode; + private final IndexSortConfig indexSortConfig; + private final boolean hasDocValuesSkipper; public KeywordFieldType( String name, @@ -417,6 +504,9 @@ public KeywordFieldType( this.scriptValues = builder.scriptValues(); this.isDimension = builder.dimension.getValue(); this.isSyntheticSource = isSyntheticSource; + this.indexMode = builder.indexMode; + this.indexSortConfig = builder.indexSortConfig; + this.hasDocValuesSkipper = DocValuesSkipIndexType.NONE.equals(fieldType.docValuesSkipIndexType()) == false; } public KeywordFieldType(String name, boolean isIndexed, boolean hasDocValues, Map meta) { @@ -428,6 +518,9 @@ public KeywordFieldType(String name, boolean isIndexed, boolean hasDocValues, Ma this.scriptValues = null; this.isDimension = false; this.isSyntheticSource = false; + this.indexMode = IndexMode.STANDARD; + this.indexSortConfig = null; + this.hasDocValuesSkipper = false; } public KeywordFieldType(String name) { @@ -450,6 +543,9 @@ public KeywordFieldType(String name, FieldType fieldType) { this.scriptValues = null; this.isDimension = false; this.isSyntheticSource = false; + this.indexMode = IndexMode.STANDARD; + this.indexSortConfig = null; + this.hasDocValuesSkipper = DocValuesSkipIndexType.NONE.equals(fieldType.docValuesSkipIndexType()) == false; } public KeywordFieldType(String name, NamedAnalyzer analyzer) { @@ -461,6 +557,9 @@ public KeywordFieldType(String name, NamedAnalyzer analyzer) { this.scriptValues = null; this.isDimension = false; this.isSyntheticSource = false; + this.indexMode = IndexMode.STANDARD; + this.indexSortConfig = null; + this.hasDocValuesSkipper = false; } @Override @@ -627,10 +726,54 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { if (isStored()) { return new BlockStoredFieldsReader.BytesFromBytesRefsBlockLoader(name()); } + + if (isSyntheticSource) { + return new FallbackSyntheticSourceBlockLoader(fallbackSyntheticSourceBlockLoaderReader(), name()) { + @Override + public Builder builder(BlockFactory factory, int expectedCount) { + return factory.bytesRefs(expectedCount); + } + }; + } + SourceValueFetcher fetcher = sourceValueFetcher(blContext.sourcePaths(name())); return new BlockSourceReader.BytesRefsBlockLoader(fetcher, sourceBlockLoaderLookup(blContext)); } + private FallbackSyntheticSourceBlockLoader.Reader fallbackSyntheticSourceBlockLoaderReader() { + var nullValueBytes = nullValue != null ? new BytesRef(nullValue) : null; + return new FallbackSyntheticSourceBlockLoader.ReaderWithNullValueSupport<>(nullValueBytes) { + @Override + public void convertValue(Object value, List accumulator) { + String stringValue = ((BytesRef) value).utf8ToString(); + String adjusted = applyIgnoreAboveAndNormalizer(stringValue); + if (adjusted != null) { + // TODO what if the value didn't change? + accumulator.add(new BytesRef(adjusted)); + } + } + + @Override + public void parseNonNullValue(XContentParser parser, List accumulator) throws IOException { + assert parser.currentToken() == XContentParser.Token.VALUE_STRING : "Unexpected token " + parser.currentToken(); + + var value = applyIgnoreAboveAndNormalizer(parser.text()); + if (value != null) { + accumulator.add(new BytesRef(value)); + } + } + + @Override + public void writeToBlock(List values, BlockLoader.Builder blockBuilder) { + var bytesRefBuilder = (BlockLoader.BytesRefBuilder) blockBuilder; + + for (var value : values) { + bytesRefBuilder.appendBytesRef(value); + } + } + }; + } + private BlockSourceReader.LeafIteratorLookup sourceBlockLoaderLookup(BlockLoaderContext blContext) { if (getTextSearchInfo().hasNorms()) { return BlockSourceReader.lookupFromNorms(name()); @@ -710,15 +853,19 @@ private SourceValueFetcher sourceValueFetcher(Set sourcePaths) { @Override protected String parseSourceValue(Object value) { String keywordValue = value.toString(); - if (keywordValue.length() > ignoreAbove) { - return null; - } - - return normalizeValue(normalizer(), name(), keywordValue); + return applyIgnoreAboveAndNormalizer(keywordValue); } }; } + private String applyIgnoreAboveAndNormalizer(String value) { + if (value.length() > ignoreAbove) { + return null; + } + + return normalizeValue(normalizer(), name(), value); + } + @Override public Object valueForDisplay(Object value) { if (value == null) { @@ -851,6 +998,18 @@ public boolean hasScriptValues() { public boolean hasNormalizer() { return normalizer != Lucene.KEYWORD_ANALYZER; } + + public IndexMode getIndexMode() { + return indexMode; + } + + public IndexSortConfig getIndexSortConfig() { + return indexSortConfig; + } + + public boolean hasDocValuesSkipper() { + return hasDocValuesSkipper; + } } private final boolean indexed; @@ -866,7 +1025,9 @@ public boolean hasNormalizer() { private final IndexAnalyzers indexAnalyzers; private final int ignoreAboveDefault; - private final int ignoreAbove; + private final IndexMode indexMode; + private final IndexSortConfig indexSortConfig; + private final boolean useDocValuesSkipper; private KeywordFieldMapper( String simpleName, @@ -874,6 +1035,7 @@ private KeywordFieldMapper( KeywordFieldType mappedFieldType, BuilderParams builderParams, boolean isSyntheticSource, + boolean useDocValuesSkipper, Builder builder ) { super(simpleName, mappedFieldType, builderParams); @@ -890,7 +1052,9 @@ private KeywordFieldMapper( this.indexCreatedVersion = builder.indexCreatedVersion; this.isSyntheticSource = isSyntheticSource; this.ignoreAboveDefault = builder.ignoreAboveDefault; - this.ignoreAbove = builder.ignoreAbove.getValue(); + this.indexMode = builder.indexMode; + this.indexSortConfig = builder.indexSortConfig; + this.useDocValuesSkipper = useDocValuesSkipper; } @Override @@ -1008,9 +1172,16 @@ public Map indexAnalyzers() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(leafName(), indexAnalyzers, scriptCompiler, ignoreAboveDefault, indexCreatedVersion).dimension( - fieldType().isDimension() - ).init(this); + return new Builder( + leafName(), + indexAnalyzers, + scriptCompiler, + ignoreAboveDefault, + indexCreatedVersion, + indexMode, + indexSortConfig, + useDocValuesSkipper + ).dimension(fieldType().isDimension()).init(this); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 4b68e20673572..d65a8cbd8411b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -41,6 +41,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.fetch.subphase.FetchFieldsPhase; +import org.elasticsearch.search.fetch.subphase.highlight.DefaultHighlighter; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; @@ -217,6 +218,13 @@ public TimeSeriesParams.MetricType getMetricType() { return null; } + /** + * Returns the default highlighter type to use when highlighting the field. + */ + public String getDefaultHighlighter() { + return DefaultHighlighter.NAME; + } + /** Generates a query that will only match documents that contain the given value. * The default implementation returns a {@link TermQuery} over the value bytes * @throws IllegalArgumentException if {@code value} cannot be converted to the expected data type or if the field is not searchable diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index 9b1abb1a4d533..42d35cb2ef09f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -27,8 +27,11 @@ public class MapperFeatures implements FeatureSpecification { "mapper.counted_keyword.synthetic_source_native_support" ); + public static final NodeFeature TSDB_NESTED_FIELD_SUPPORT = new NodeFeature("mapper.tsdb_nested_field_support"); public static final NodeFeature META_FETCH_FIELDS_ERROR_CODE_CHANGED = new NodeFeature("meta_fetch_fields_error_code_changed"); public static final NodeFeature SPARSE_VECTOR_STORE_SUPPORT = new NodeFeature("mapper.sparse_vector.store_support"); + public static final NodeFeature SORT_FIELDS_CHECK_FOR_NESTED_OBJECT_FIX = new NodeFeature("mapper.nested.sorting_fields_check_fix"); + public static final NodeFeature DYNAMIC_HANDLING_IN_COPY_TO = new NodeFeature("mapper.copy_to.dynamic_handling"); @Override public Set getTestFeatures() { @@ -45,6 +48,9 @@ public Set getTestFeatures() { META_FETCH_FIELDS_ERROR_CODE_CHANGED, SPARSE_VECTOR_STORE_SUPPORT, COUNTED_KEYWORD_SYNTHETIC_SOURCE_NATIVE_SUPPORT, + SORT_FIELDS_CHECK_FOR_NESTED_OBJECT_FIX, + DYNAMIC_HANDLING_IN_COPY_TO, + TSDB_NESTED_FIELD_SUPPORT, SourceFieldMapper.SYNTHETIC_RECOVERY_SOURCE, ObjectMapper.SUBOBJECTS_FALSE_MAPPING_UPDATE_FIX ); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index b181225d80ddf..7958fd8e51525 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -801,7 +801,8 @@ public static boolean isMetadataFieldStatic(String fieldName) { * this method considers all mapper plugins */ public boolean isMetadataField(String field) { - return mapperRegistry.getMetadataMapperParsers(indexVersionCreated).containsKey(field); + var mapper = mappingLookup().getMapper(field); + return mapper instanceof MetadataFieldMapper; } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 76528ccf0667e..078faa25938f5 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -269,7 +269,7 @@ public NumberFieldMapper build(MapperBuilderContext context) { dimension.setValue(true); } - MappedFieldType ft = new NumberFieldType(context.buildFullName(leafName()), this); + MappedFieldType ft = new NumberFieldType(context.buildFullName(leafName()), this, context.isSourceSynthetic()); hasScript = script.get() != null; onScriptError = onScriptErrorParam.getValue(); return new NumberFieldMapper(leafName(), ft, builderParams(this, context), context.isSourceSynthetic(), this); @@ -463,6 +463,11 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup); } + + @Override + BlockLoader blockLoaderFromFallbackSyntheticSource(String fieldName, Number nullValue, boolean coerce) { + return floatingPointBlockLoaderFromFallbackSyntheticSource(this, fieldName, nullValue, coerce); + } }, FLOAT("float", NumericType.FLOAT) { @Override @@ -647,6 +652,11 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup); } + + @Override + BlockLoader blockLoaderFromFallbackSyntheticSource(String fieldName, Number nullValue, boolean coerce) { + return floatingPointBlockLoaderFromFallbackSyntheticSource(this, fieldName, nullValue, coerce); + } }, DOUBLE("double", NumericType.DOUBLE) { @Override @@ -797,6 +807,11 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup); } + + @Override + BlockLoader blockLoaderFromFallbackSyntheticSource(String fieldName, Number nullValue, boolean coerce) { + return floatingPointBlockLoaderFromFallbackSyntheticSource(this, fieldName, nullValue, coerce); + } }, BYTE("byte", NumericType.BYTE) { @Override @@ -911,6 +926,11 @@ BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSo return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup); } + @Override + BlockLoader blockLoaderFromFallbackSyntheticSource(String fieldName, Number nullValue, boolean coerce) { + return integerBlockLoaderFromFallbackSyntheticSource(this, fieldName, nullValue, coerce); + } + private boolean isOutOfRange(Object value) { double doubleValue = objectToDouble(value); return doubleValue < Byte.MIN_VALUE || doubleValue > Byte.MAX_VALUE; @@ -1024,6 +1044,11 @@ BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSo return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup); } + @Override + BlockLoader blockLoaderFromFallbackSyntheticSource(String fieldName, Number nullValue, boolean coerce) { + return integerBlockLoaderFromFallbackSyntheticSource(this, fieldName, nullValue, coerce); + } + private boolean isOutOfRange(Object value) { double doubleValue = objectToDouble(value); return doubleValue < Short.MIN_VALUE || doubleValue > Short.MAX_VALUE; @@ -1210,6 +1235,11 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup); } + + @Override + BlockLoader blockLoaderFromFallbackSyntheticSource(String fieldName, Number nullValue, boolean coerce) { + return integerBlockLoaderFromFallbackSyntheticSource(this, fieldName, nullValue, coerce); + } }, LONG("long", NumericType.LONG) { @Override @@ -1358,6 +1388,26 @@ BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSo return new BlockSourceReader.LongsBlockLoader(sourceValueFetcher, lookup); } + @Override + BlockLoader blockLoaderFromFallbackSyntheticSource(String fieldName, Number nullValue, boolean coerce) { + var reader = new NumberFallbackSyntheticSourceReader(this, nullValue, coerce) { + @Override + public void writeToBlock(List values, BlockLoader.Builder blockBuilder) { + var builder = (BlockLoader.LongBuilder) blockBuilder; + for (var value : values) { + builder.appendLong(value.longValue()); + } + } + }; + + return new FallbackSyntheticSourceBlockLoader(reader, fieldName) { + @Override + public Builder builder(BlockFactory factory, int expectedCount) { + return factory.longs(expectedCount); + } + }; + } + private boolean isOutOfRange(Object value) { if (value instanceof Long) { return false; @@ -1626,6 +1676,106 @@ protected void writeValue(XContentBuilder b, long value) throws IOException { abstract BlockLoader blockLoaderFromDocValues(String fieldName); abstract BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup); + + abstract BlockLoader blockLoaderFromFallbackSyntheticSource(String fieldName, Number nullValue, boolean coerce); + + // All values that fit into integer are returned as integers + private static BlockLoader integerBlockLoaderFromFallbackSyntheticSource( + NumberType type, + String fieldName, + Number nullValue, + boolean coerce + ) { + var reader = new NumberFallbackSyntheticSourceReader(type, nullValue, coerce) { + @Override + public void writeToBlock(List values, BlockLoader.Builder blockBuilder) { + var builder = (BlockLoader.IntBuilder) blockBuilder; + for (var value : values) { + builder.appendInt(value.intValue()); + } + } + }; + + return new FallbackSyntheticSourceBlockLoader(reader, fieldName) { + @Override + public Builder builder(BlockFactory factory, int expectedCount) { + return factory.ints(expectedCount); + } + }; + } + + // All floating point values are returned as doubles + private static BlockLoader floatingPointBlockLoaderFromFallbackSyntheticSource( + NumberType type, + String fieldName, + Number nullValue, + boolean coerce + ) { + var reader = new NumberFallbackSyntheticSourceReader(type, nullValue, coerce) { + @Override + public void writeToBlock(List values, BlockLoader.Builder blockBuilder) { + var builder = (BlockLoader.DoubleBuilder) blockBuilder; + for (var value : values) { + builder.appendDouble(value.doubleValue()); + } + } + }; + + return new FallbackSyntheticSourceBlockLoader(reader, fieldName) { + @Override + public Builder builder(BlockFactory factory, int expectedCount) { + return factory.doubles(expectedCount); + } + }; + } + + abstract static class NumberFallbackSyntheticSourceReader extends FallbackSyntheticSourceBlockLoader.ReaderWithNullValueSupport< + Number> { + private final NumberType type; + private final Number nullValue; + private final boolean coerce; + + NumberFallbackSyntheticSourceReader(NumberType type, Number nullValue, boolean coerce) { + super(nullValue); + this.type = type; + this.nullValue = nullValue; + this.coerce = coerce; + } + + @Override + public void convertValue(Object value, List accumulator) { + if (coerce && value.equals("")) { + if (nullValue != null) { + accumulator.add(nullValue); + } + } + + try { + var converted = type.parse(value, coerce); + accumulator.add(converted); + } catch (Exception e) { + // Malformed value, skip it + } + } + + @Override + public void parseNonNullValue(XContentParser parser, List accumulator) throws IOException { + // Aligned with implementation of `value(XContentParser)` + if (coerce && parser.currentToken() == Token.VALUE_STRING && parser.textLength() == 0) { + if (nullValue != null) { + accumulator.add(nullValue); + } + } + + try { + Number rawValue = type.parse(parser, coerce); + // Transform number to correct type (e.g. reduce precision) + accumulator.add(type.parse(rawValue, coerce)); + } catch (Exception e) { + // Malformed value, skip it + } + } + }; } public static class NumberFieldType extends SimpleMappedFieldType { @@ -1637,6 +1787,7 @@ public static class NumberFieldType extends SimpleMappedFieldType { private final boolean isDimension; private final MetricType metricType; private final IndexMode indexMode; + private final boolean isSyntheticSource; public NumberFieldType( String name, @@ -1650,7 +1801,8 @@ public NumberFieldType( FieldValues script, boolean isDimension, MetricType metricType, - IndexMode indexMode + IndexMode indexMode, + boolean isSyntheticSource ) { super(name, isIndexed, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); this.type = Objects.requireNonNull(type); @@ -1660,9 +1812,10 @@ public NumberFieldType( this.isDimension = isDimension; this.metricType = metricType; this.indexMode = indexMode; + this.isSyntheticSource = isSyntheticSource; } - NumberFieldType(String name, Builder builder) { + NumberFieldType(String name, Builder builder, boolean isSyntheticSource) { this( name, builder.type, @@ -1675,7 +1828,8 @@ public NumberFieldType( builder.scriptValues(), builder.dimension.getValue(), builder.metric.getValue(), - builder.indexMode + builder.indexMode, + isSyntheticSource ); } @@ -1684,7 +1838,7 @@ public NumberFieldType(String name, NumberType type) { } public NumberFieldType(String name, NumberType type, boolean isIndexed) { - this(name, type, isIndexed, false, true, true, null, Collections.emptyMap(), null, false, null, null); + this(name, type, isIndexed, false, true, true, null, Collections.emptyMap(), null, false, null, null, false); } @Override @@ -1761,6 +1915,11 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { if (hasDocValues()) { return type.blockLoaderFromDocValues(name()); } + + if (isSyntheticSource) { + return type.blockLoaderFromFallbackSyntheticSource(name(), nullValue, coerce); + } + BlockSourceReader.LeafIteratorLookup lookup = isStored() || isIndexed() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) : BlockSourceReader.lookupMatchingAll(); @@ -1876,7 +2035,7 @@ public MetricType getMetricType() { private final MetricType metricType; private boolean allowMultipleValues; private final IndexVersion indexCreatedVersion; - private final boolean storeMalformedFields; + private final boolean isSyntheticSource; private final IndexMode indexMode; @@ -1884,7 +2043,7 @@ private NumberFieldMapper( String simpleName, MappedFieldType mappedFieldType, BuilderParams builderParams, - boolean storeMalformedFields, + boolean isSyntheticSource, Builder builder ) { super(simpleName, mappedFieldType, builderParams); @@ -1904,7 +2063,7 @@ private NumberFieldMapper( this.metricType = builder.metric.getValue(); this.allowMultipleValues = builder.allowMultipleValues; this.indexCreatedVersion = builder.indexCreatedVersion; - this.storeMalformedFields = storeMalformedFields; + this.isSyntheticSource = isSyntheticSource; this.indexMode = builder.indexMode; } @@ -1939,7 +2098,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio } catch (IllegalArgumentException e) { if (ignoreMalformed.value() && context.parser().currentToken().isValue()) { context.addIgnoredField(mappedFieldType.name()); - if (storeMalformedFields) { + if (isSyntheticSource) { // Save a copy of the field so synthetic source can load it context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser())); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index abca8e057f3b8..4c9214015fba6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -1127,7 +1127,7 @@ public boolean setIgnoredValues(Map decode(BytesRef r) { + return switch ((char) r.bytes[r.offset]) { + case BINARY_ENCODING -> Optional.of(TypeUtils.EMBEDDED_OBJECT.decode(r)); + case CBOR_OBJECT_ENCODING, JSON_OBJECT_ENCODING, YAML_OBJECT_ENCODING, SMILE_OBJECT_ENCODING -> Optional.empty(); + case BIG_DECIMAL_ENCODING -> Optional.of(TypeUtils.BIG_DECIMAL.decode(r)); + case FALSE_ENCODING, TRUE_ENCODING -> Optional.of(TypeUtils.BOOLEAN.decode(r)); + case BIG_INTEGER_ENCODING -> Optional.of(TypeUtils.BIG_INTEGER.decode(r)); + case STRING_ENCODING -> Optional.of(TypeUtils.STRING.decode(r)); + case INTEGER_ENCODING -> Optional.of(TypeUtils.INTEGER.decode(r)); + case LONG_ENCODING -> Optional.of(TypeUtils.LONG.decode(r)); + case DOUBLE_ENCODING -> Optional.of(TypeUtils.DOUBLE.decode(r)); + case FLOAT_ENCODING -> Optional.of(TypeUtils.FLOAT.decode(r)); + case NULL_ENCODING -> Optional.ofNullable(TypeUtils.NULL.decode(r)); + case VOID_ENCODING -> Optional.of(TypeUtils.VOID.decode(r)); + default -> throw new IllegalArgumentException("Can't decode " + r); + }; + } + /** * Determines if the given {@link BytesRef}, encoded with {@link XContentDataHelper#encodeToken(XContentParser)}, * is an encoded object. @@ -339,6 +361,11 @@ byte[] encode(XContentParser parser) throws IOException { return bytes; } + @Override + Object decode(BytesRef r) { + return new BytesRef(r.bytes, r.offset + 1, r.length - 1); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { b.value(new BytesRef(r.bytes, r.offset + 1, r.length - 1).utf8ToString()); @@ -359,6 +386,11 @@ byte[] encode(XContentParser parser) throws IOException { return bytes; } + @Override + Object decode(BytesRef r) { + return ByteUtils.readIntLE(r.bytes, 1 + r.offset); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { b.value(ByteUtils.readIntLE(r.bytes, 1 + r.offset)); @@ -379,6 +411,11 @@ byte[] encode(XContentParser parser) throws IOException { return bytes; } + @Override + Object decode(BytesRef r) { + return ByteUtils.readLongLE(r.bytes, 1 + r.offset); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { b.value(ByteUtils.readLongLE(r.bytes, 1 + r.offset)); @@ -399,6 +436,11 @@ byte[] encode(XContentParser parser) throws IOException { return bytes; } + @Override + Object decode(BytesRef r) { + return ByteUtils.readDoubleLE(r.bytes, 1 + r.offset); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { b.value(ByteUtils.readDoubleLE(r.bytes, 1 + r.offset)); @@ -419,6 +461,11 @@ byte[] encode(XContentParser parser) throws IOException { return bytes; } + @Override + Object decode(BytesRef r) { + return ByteUtils.readFloatLE(r.bytes, 1 + r.offset); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { b.value(ByteUtils.readFloatLE(r.bytes, 1 + r.offset)); @@ -437,6 +484,11 @@ byte[] encode(XContentParser parser) throws IOException { return bytes; } + @Override + Object decode(BytesRef r) { + return new BigInteger(r.bytes, r.offset + 1, r.length - 1); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { b.value(new BigInteger(r.bytes, r.offset + 1, r.length - 1)); @@ -455,6 +507,15 @@ byte[] encode(XContentParser parser) throws IOException { return bytes; } + @Override + Object decode(BytesRef r) { + if (r.length < 5) { + throw new IllegalArgumentException("Can't decode " + r); + } + int scale = ByteUtils.readIntLE(r.bytes, r.offset + 1); + return new BigDecimal(new BigInteger(r.bytes, r.offset + 5, r.length - 5), scale); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { if (r.length < 5) { @@ -477,6 +538,15 @@ byte[] encode(XContentParser parser) throws IOException { return bytes; } + @Override + Object decode(BytesRef r) { + if (r.length != 1) { + throw new IllegalArgumentException("Can't decode " + r); + } + assert r.bytes[r.offset] == 't' || r.bytes[r.offset] == 'f' : r.bytes[r.offset]; + return r.bytes[r.offset] == 't'; + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { if (r.length != 1) { @@ -499,6 +569,11 @@ byte[] encode(XContentParser parser) throws IOException { return bytes; } + @Override + Object decode(BytesRef r) { + return null; + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { b.nullValue(); @@ -517,6 +592,11 @@ byte[] encode(XContentParser parser) throws IOException { return bytes; } + @Override + Object decode(BytesRef r) { + return new BytesRef(r.bytes, r.offset + 1, r.length - 1); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { b.value(r.bytes, r.offset + 1, r.length - 1); @@ -538,6 +618,11 @@ byte[] encode(XContentParser parser) throws IOException { } } + @Override + Object decode(BytesRef r) { + throw new UnsupportedOperationException(); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException { switch ((char) r.bytes[r.offset]) { @@ -562,6 +647,11 @@ byte[] encode(XContentParser parser) { return bytes; } + @Override + Object decode(BytesRef r) { + throw new UnsupportedOperationException(); + } + @Override void decodeAndWrite(XContentBuilder b, BytesRef r) { // NOOP @@ -591,6 +681,8 @@ void assertValidEncoding(byte[] encodedValue) { abstract byte[] encode(XContentParser parser) throws IOException; + abstract Object decode(BytesRef r); + abstract void decodeAndWrite(XContentBuilder b, BytesRef r) throws IOException; static byte[] encode(BigInteger n, Byte encoding) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 0d514408c912f..ce41c2164e205 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -2404,6 +2404,12 @@ public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf } KnnVectorValues.DocIndexIterator iterator = values.iterator(); return docId -> { + if (iterator.docID() > docId) { + return hasValue = false; + } + if (iterator.docID() == docId) { + return hasValue = true; + } hasValue = docId == iterator.advance(docId); hasMagnitude = hasValue && magnitudeReader != null && magnitudeReader.advanceExact(docId); ord = iterator.index(); @@ -2414,6 +2420,12 @@ public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf if (byteVectorValues != null) { KnnVectorValues.DocIndexIterator iterator = byteVectorValues.iterator(); return docId -> { + if (iterator.docID() > docId) { + return hasValue = false; + } + if (iterator.docID() == docId) { + return hasValue = true; + } hasValue = docId == iterator.advance(docId); ord = iterator.index(); return hasValue; @@ -2476,6 +2488,12 @@ public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf return null; } return docId -> { + if (values.docID() > docId) { + return hasValue = false; + } + if (values.docID() == docId) { + return hasValue = true; + } hasValue = docId == values.advance(docId); return hasValue; }; diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java b/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java index b2b37ad834178..96c39ed356f90 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java @@ -126,7 +126,7 @@ protected static IntervalsSource combineSources(List sources, i if (maxGaps == 0 && ordered) { return Intervals.phrase(sourcesArray); } - IntervalsSource inner = ordered ? XIntervals.ordered(sourcesArray) : XIntervals.unordered(sourcesArray); + IntervalsSource inner = ordered ? Intervals.ordered(sourcesArray) : Intervals.unordered(sourcesArray); if (maxGaps == -1) { return inner; } diff --git a/server/src/main/java/org/elasticsearch/index/query/RankDocsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RankDocsQueryBuilder.java index 889fa40b79aa1..524310c547597 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RankDocsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RankDocsQueryBuilder.java @@ -70,7 +70,9 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws changed |= newQueryBuilders[i] != queryBuilders[i]; } if (changed) { - return new RankDocsQueryBuilder(rankDocs, newQueryBuilders, onlyRankDocs); + RankDocsQueryBuilder clone = new RankDocsQueryBuilder(rankDocs, newQueryBuilders, onlyRankDocs); + clone.queryName(queryName()); + return clone; } } return super.doRewrite(queryRewriteContext); diff --git a/server/src/main/java/org/elasticsearch/index/query/XIntervals.java b/server/src/main/java/org/elasticsearch/index/query/XIntervals.java deleted file mode 100644 index 7d8552e18f790..0000000000000 --- a/server/src/main/java/org/elasticsearch/index/query/XIntervals.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.index.query; - -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.queries.intervals.IntervalIterator; -import org.apache.lucene.queries.intervals.IntervalMatchesIterator; -import org.apache.lucene.queries.intervals.Intervals; -import org.apache.lucene.queries.intervals.IntervalsSource; -import org.apache.lucene.search.QueryVisitor; - -import java.io.IOException; -import java.util.Collection; -import java.util.Objects; - -/** - * Copy of {@link Intervals} that exposes versions of {@link Intervals#ordered} and {@link Intervals#unordered} - * that preserve their inner gaps. - * NOTE: Remove this hack when a version of Lucene with https://github.com/apache/lucene/pull/13819 is used (10.1.0). - */ -public final class XIntervals { - - /** - * Create an ordered {@link IntervalsSource} - * - *

Returns intervals in which the subsources all appear in the given order - * - * @param subSources an ordered set of {@link IntervalsSource} objects - */ - public static IntervalsSource ordered(IntervalsSource... subSources) { - return new DelegateIntervalsSource(Intervals.ordered(subSources)); - } - - /** - * Create an ordered {@link IntervalsSource} - * - *

Returns intervals in which the subsources all appear in the given order - * - * @param subSources an ordered set of {@link IntervalsSource} objects - */ - public static IntervalsSource unordered(IntervalsSource... subSources) { - return new DelegateIntervalsSource(Intervals.unordered(subSources)); - } - - /** - * Wraps a source to avoid aggressive flattening of the ordered and unordered sources. - * The flattening modifies the final gap and is removed in the latest unreleased version of Lucene (10.1). - */ - private static class DelegateIntervalsSource extends IntervalsSource { - private final IntervalsSource delegate; - - private DelegateIntervalsSource(IntervalsSource delegate) { - this.delegate = delegate; - } - - @Override - public IntervalIterator intervals(String field, LeafReaderContext ctx) throws IOException { - return delegate.intervals(field, ctx); - } - - @Override - public IntervalMatchesIterator matches(String field, LeafReaderContext ctx, int doc) throws IOException { - return delegate.matches(field, ctx, doc); - } - - @Override - public void visit(String field, QueryVisitor visitor) { - delegate.visit(field, visitor); - } - - @Override - public int minExtent() { - return delegate.minExtent(); - } - - @Override - public Collection pullUpDisjunctions() { - return delegate.pullUpDisjunctions(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DelegateIntervalsSource that = (DelegateIntervalsSource) o; - return Objects.equals(delegate, that.delegate); - } - - @Override - public int hashCode() { - return Objects.hash(delegate); - } - - @Override - public String toString() { - return delegate.toString(); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java b/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java index e5104948cc426..3cd594e646f4e 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java @@ -17,8 +17,6 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason; -import java.util.function.Supplier; - /** * An index event listener is the primary extension point for plugins and build-in services * to react / listen to per-index and per-shard events. These listeners are registered per-index @@ -88,6 +86,25 @@ default void indexShardStateChanged( @Nullable String reason ) {} + /** + * Invoked before a shard performs a mutable operation. Mutable operations include, but are not limited to: + *

    + *
  • Indexing operations
  • + *
  • Force merges
  • + *
+ * + * This method ensures that the shard is ready to accept mutating operations. This is particularly useful in cases + * where the shard initializes its internal {@link org.elasticsearch.index.engine.Engine} lazily, which may take some time. + * The provided listener should be notified once the shard is prepared to proceed with the operation. + * This can be called from a transport thread and therefore the function should be lightweight and not block the thread. + * + * @param indexShard the shard where the mutable operation will be performed + * @param listener the listener to be notified when the shard is ready to proceed + */ + default void beforeIndexShardMutableOperation(IndexShard indexShard, ActionListener listener) { + listener.onResponse(null); + } + /** * Called before the index gets created. Note that this is also called * when the index is created on data nodes @@ -192,14 +209,4 @@ default void afterIndexShardRecovery(IndexShard indexShard, ActionListener * @param indexShard the shard that is recovering */ default void afterFilesRestoredFromRepository(IndexShard indexShard) {} - - /** - * Called when a single primary permit is acquired for the given shard (see - * {@link IndexShard#acquirePrimaryOperationPermit(ActionListener, java.util.concurrent.Executor)}). - * - * @param indexShard the shard of which a primary permit is requested - * @param onPermitAcquiredListenerSupplier call this immediately to get a listener when the permit is acquired. The listener must be - * completed in order for the permit to be given to the acquiring operation. - */ - default void onAcquirePrimaryOperationPermit(IndexShard indexShard, Supplier> onPermitAcquiredListenerSupplier) {} } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index bfa286858f8ba..d56d7471d498e 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -34,7 +34,6 @@ import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.replication.PendingReplicationActions; import org.elasticsearch.action.support.replication.ReplicationResponse; @@ -3206,7 +3205,12 @@ void checkIndex() throws IOException { try { doCheckIndex(); } catch (IOException e) { - store.markStoreCorrupted(e); + if (ExceptionsHelper.unwrap(e, AlreadyClosedException.class) != null) { + // Cache-based read operations on Lucene files can throw an AlreadyClosedException wrapped into an IOException in case + // of evictions. We don't want to mark the store as corrupted for this. + } else { + store.markStoreCorrupted(e); + } throw e; } finally { store.decRef(); @@ -3585,19 +3589,7 @@ public void acquirePrimaryOperationPermit( ) { verifyNotClosed(); assert shardRouting.primary() : "acquirePrimaryOperationPermit should only be called on primary shard: " + shardRouting; - - ActionListener onPermitAcquiredWrapped = onPermitAcquired.delegateFailureAndWrap((delegate, releasable) -> { - final ActionListener wrappedListener = indexShardOperationPermits.wrapContextPreservingActionListener( - delegate, - executorOnDelay, - forceExecution - ); - try (var listeners = new RefCountingListener(wrappedListener.map(unused -> releasable))) { - indexEventListener.onAcquirePrimaryOperationPermit(this, () -> listeners.acquire()); - } - }); - - indexShardOperationPermits.acquire(wrapPrimaryOperationPermitListener(onPermitAcquiredWrapped), executorOnDelay, forceExecution); + indexShardOperationPermits.acquire(wrapPrimaryOperationPermitListener(onPermitAcquired), executorOnDelay, forceExecution); } public boolean isPrimaryMode() { @@ -4321,17 +4313,15 @@ public void resetEngine() { assert waitForEngineOrClosedShardListeners.isDone(); try { synchronized (engineMutex) { - final var currentEngine = getEngine(); - currentEngine.prepareForEngineReset(); - var engineConfig = newEngineConfig(replicationTracker); verifyNotClosed(); - IOUtils.close(currentEngine); - var newEngine = createEngine(engineConfig); - currentEngineReference.set(newEngine); + getEngine().prepareForEngineReset(); + var newEngine = createEngine(newEngineConfig(replicationTracker)); + IOUtils.close(currentEngineReference.getAndSet(newEngine)); onNewEngine(newEngine); } onSettingsChanged(); } catch (Exception e) { + // we want to fail the shard in the case prepareForEngineReset throws failShard("unable to reset engine", e); } } @@ -4511,4 +4501,19 @@ public void waitForPrimaryTermAndGeneration(long primaryTerm, long segmentGenera ); } + /** + * Ensures that the shard is ready to perform mutable operations. + * This method is particularly useful when the shard initializes its internal + * {@link org.elasticsearch.index.engine.Engine} lazily, as it may take some time before becoming mutable. + * + * The provided listener will be notified once the shard is ready for mutating operations. + * + * @param listener the listener to be notified when the shard is mutable + */ + public void ensureMutable(ActionListener listener) { + indexEventListener.beforeIndexShardMutableOperation(this, listener.delegateFailure((l, unused) -> { + // TODO ES-10826: Acquire ref to engine and retry if it's immutable again? + l.onResponse(null); + })); + } } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java index 79f5d054df30d..0427e9c99ea35 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java @@ -230,7 +230,7 @@ private void innerAcquire( onAcquired.onResponse(releasable); } - public ActionListener wrapContextPreservingActionListener( + private ActionListener wrapContextPreservingActionListener( ActionListener listener, @Nullable final Executor executorOnDelay, final boolean forceExecution diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 8cf631b660b1e..36b6709661017 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -29,6 +29,7 @@ import org.elasticsearch.index.engine.TranslogOperationAsserter; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.lookup.Source; import java.io.Closeable; import java.io.IOException; @@ -298,8 +299,10 @@ private synchronized boolean assertNoSeqNumberConflict(long seqNo, BytesReferenc + "], with different data. " + "prvOp [" + prvOp + + (prvOp instanceof Translog.Index index ? " source: " + Source.fromBytes(index.source()).source() : "") + "], newOp [" + newOp + + (newOp instanceof Translog.Index index ? " source: " + Source.fromBytes(index.source()).source() : "") + "]", previous.v2() ); diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 1df5bddeff9e3..3212488cc3011 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -79,7 +79,6 @@ import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.ShardLock; import org.elasticsearch.env.ShardLockObtainFailedException; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.gateway.MetaStateService; import org.elasticsearch.gateway.MetadataStateFormat; import org.elasticsearch.index.CloseUtils; @@ -236,7 +235,6 @@ public class IndicesService extends AbstractLifecycleComponent private final ScriptService scriptService; private final ClusterService clusterService; private final Client client; - private final FeatureService featureService; private volatile Map indices = Map.of(); private final Map> pendingDeletes = new HashMap<>(); private final AtomicInteger numUncompletedDeletes = new AtomicInteger(); @@ -309,7 +307,6 @@ protected void doStart() { this.scriptService = builder.scriptService; this.clusterService = builder.clusterService; this.client = builder.client; - this.featureService = builder.featureService; this.idFieldDataEnabled = INDICES_ID_FIELD_DATA_ENABLED_SETTING.get(clusterService.getSettings()); clusterService.getClusterSettings().addSettingsUpdateConsumer(INDICES_ID_FIELD_DATA_ENABLED_SETTING, this::setIdFieldDataEnabled); this.indicesFieldDataCache = new IndicesFieldDataCache(settings, new IndexFieldDataCache.Listener() { diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesServiceBuilder.java b/server/src/main/java/org/elasticsearch/indices/IndicesServiceBuilder.java index 66e8f98f77fef..36c40b57d1aca 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesServiceBuilder.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesServiceBuilder.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.Nullable; import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.gateway.MetaStateService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.SlowLogFieldProvider; @@ -67,7 +66,6 @@ public class IndicesServiceBuilder { ScriptService scriptService; ClusterService clusterService; Client client; - FeatureService featureService; MetaStateService metaStateService; Collection>> engineFactoryProviders = List.of(); Map directoryFactories = Map.of(); @@ -173,11 +171,6 @@ public IndicesServiceBuilder client(Client client) { return this; } - public IndicesServiceBuilder featureService(FeatureService featureService) { - this.featureService = featureService; - return this; - } - public IndicesServiceBuilder metaStateService(MetaStateService metaStateService) { this.metaStateService = metaStateService; return this; @@ -230,7 +223,6 @@ public IndicesService build() { Objects.requireNonNull(scriptService); Objects.requireNonNull(clusterService); Objects.requireNonNull(client); - Objects.requireNonNull(featureService); Objects.requireNonNull(metaStateService); Objects.requireNonNull(engineFactoryProviders); Objects.requireNonNull(directoryFactories); diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index 886c5e4bf6d3a..bfe1cd9b28de1 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -122,7 +122,7 @@ public Dictionary getDictionary(String locale) { } private static Path resolveHunspellDirectory(Environment env) { - return env.configFile().resolve("hunspell"); + return env.configDir().resolve("hunspell"); } /** @@ -193,7 +193,7 @@ private Dictionary loadDictionary(String locale, Settings nodeSettings, Environm affixStream = Files.newInputStream(affixFiles[0]); - try (Directory tmp = new NIOFSDirectory(env.tmpFile())) { + try (Directory tmp = new NIOFSDirectory(env.tmpDir())) { return new Dictionary(tmp, "hunspell", affixStream, dicStreams, ignoreCase); } diff --git a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 0e9901bc05682..9e31bc1aef9a7 100644 --- a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -116,6 +116,18 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple Setting.Property.NodeScope ); + /** + * Maximum number of shards to try and close concurrently. Defaults to the smaller of {@code node.processors} and {@code 10}, but can be + * set to any positive integer. + */ + public static final Setting CONCURRENT_SHARD_CLOSE_LIMIT = Setting.intSetting( + "indices.store.max_concurrent_closing_shards", + settings -> Integer.toString(Math.min(10, EsExecutors.NODE_PROCESSORS_SETTING.get(settings).roundUp())), + 1, + Integer.MAX_VALUE, + Setting.Property.NodeScope + ); + final AllocatedIndices> indicesService; private final ClusterService clusterService; private final ThreadPool threadPool; @@ -1347,7 +1359,7 @@ enum IndexRemovalReason { } } - private static class ShardCloseExecutor implements Executor { + static class ShardCloseExecutor implements Executor { private final ThrottledTaskRunner throttledTaskRunner; @@ -1360,8 +1372,11 @@ private static class ShardCloseExecutor implements Executor { // can't close the old ones down fast enough. Maybe we could block or throttle new shards starting while old shards are still // shutting down, given that starting new shards is already async. Since this seems unlikely in practice, we opt for the simple // approach here. - final var maxThreads = Math.max(EsExecutors.NODE_PROCESSORS_SETTING.get(settings).roundUp(), 10); - throttledTaskRunner = new ThrottledTaskRunner(IndicesClusterStateService.class.getCanonicalName(), maxThreads, delegate); + throttledTaskRunner = new ThrottledTaskRunner( + IndicesClusterStateService.class.getCanonicalName(), + CONCURRENT_SHARD_CLOSE_LIMIT.get(settings), + delegate + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index eba96e15de50c..1528b11773a67 100644 --- a/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -229,6 +229,11 @@ private void deleteShardIfExistElseWhere( long clusterStateVersion, IndexShardRoutingTable indexShardRoutingTable ) { + if (DiscoveryNode.isStateless(clusterService.getSettings())) { + deleteShardStoreOnApplierThread(indexShardRoutingTable.shardId(), clusterStateVersion); + return; + } + List> requests = new ArrayList<>(indexShardRoutingTable.size()); String indexUUID = indexShardRoutingTable.shardId().getIndex().getUUID(); for (int copy = 0; copy < indexShardRoutingTable.size(); copy++) { @@ -320,34 +325,37 @@ private void allNodesResponded() { return; } - clusterService.getClusterApplierService() - .runOnApplierThread("indices_store ([" + shardId + "] active fully on other nodes)", Priority.HIGH, currentState -> { - if (clusterStateVersion != currentState.getVersion()) { - logger.trace( - "not deleting shard {}, the update task state version[{}] is not equal to cluster state before " - + "shard active api call [{}]", - shardId, - currentState.getVersion(), - clusterStateVersion - ); - return; - } - try { - indicesService.deleteShardStore("no longer used", shardId, currentState); - } catch (Exception ex) { - logger.debug(() -> format("%s failed to delete unallocated shard, ignoring", shardId), ex); - } - }, new ActionListener<>() { - @Override - public void onResponse(Void unused) {} - - @Override - public void onFailure(Exception e) { - logger.error(() -> format("%s unexpected error during deletion of unallocated shard", shardId), e); - } - }); + deleteShardStoreOnApplierThread(shardId, clusterStateVersion); } + } + private void deleteShardStoreOnApplierThread(ShardId shardId, long clusterStateVersion) { + clusterService.getClusterApplierService() + .runOnApplierThread("indices_store ([" + shardId + "] active fully on other nodes)", Priority.HIGH, currentState -> { + if (clusterStateVersion != currentState.getVersion()) { + logger.trace( + "not deleting shard {}, the update task state version[{}] is not equal to cluster state before " + + "shard active api call [{}]", + shardId, + currentState.getVersion(), + clusterStateVersion + ); + return; + } + try { + indicesService.deleteShardStore("no longer used", shardId, currentState); + } catch (Exception ex) { + logger.debug(() -> format("%s failed to delete unallocated shard, ignoring", shardId), ex); + } + }, new ActionListener<>() { + @Override + public void onResponse(Void unused) {} + + @Override + public void onFailure(Exception e) { + logger.error(() -> format("%s unexpected error during deletion of unallocated shard", shardId), e); + } + }); } private class ShardActiveRequestHandler implements TransportRequestHandler { diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 7982024911beb..3dfeb21dd6d9f 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.index.VersionType; @@ -190,8 +191,8 @@ public T getFieldValue(String path, Class clazz) { * or if the field that is found at the provided path is not of the expected type. */ public T getFieldValue(String path, Class clazz, boolean ignoreMissing) { - FieldPath fieldPath = new FieldPath(path); - Object context = fieldPath.initialContext; + final FieldPath fieldPath = FieldPath.of(path); + Object context = fieldPath.initialContext(this); for (String pathElement : fieldPath.pathElements) { ResolveResult result = resolve(pathElement, path, context); if (result.wasSuccessful) { @@ -261,8 +262,8 @@ public boolean hasField(String path) { * @throws IllegalArgumentException if the path is null, empty or invalid. */ public boolean hasField(String path, boolean failOutOfRange) { - FieldPath fieldPath = new FieldPath(path); - Object context = fieldPath.initialContext; + final FieldPath fieldPath = FieldPath.of(path); + Object context = fieldPath.initialContext(this); for (int i = 0; i < fieldPath.pathElements.length - 1; i++) { String pathElement = fieldPath.pathElements[i]; if (context == null) { @@ -329,8 +330,8 @@ public boolean hasField(String path, boolean failOutOfRange) { * @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist. */ public void removeField(String path) { - FieldPath fieldPath = new FieldPath(path); - Object context = fieldPath.initialContext; + final FieldPath fieldPath = FieldPath.of(path); + Object context = fieldPath.initialContext(this); for (int i = 0; i < fieldPath.pathElements.length - 1; i++) { ResolveResult result = resolve(fieldPath.pathElements[i], path, context); if (result.wasSuccessful) { @@ -544,8 +545,8 @@ public void setFieldValue(String path, Object value, boolean ignoreEmptyValue) { } private void setFieldValue(String path, Object value, boolean append, boolean allowDuplicates) { - FieldPath fieldPath = new FieldPath(path); - Object context = fieldPath.initialContext; + final FieldPath fieldPath = FieldPath.of(path); + Object context = fieldPath.initialContext(this); for (int i = 0; i < fieldPath.pathElements.length - 1; i++) { String pathElement = fieldPath.pathElements[i]; if (context == null) { @@ -998,21 +999,45 @@ public String getFieldName() { } } - private class FieldPath { + private static final class FieldPath { - private final String[] pathElements; - private final Object initialContext; + private static final int MAX_SIZE = 512; + private static final Map CACHE = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency(); - private FieldPath(String path) { + // constructing a new FieldPath requires that we parse a String (e.g. "foo.bar.baz") into an array + // of path elements (e.g. ["foo", "bar", "baz"]). Calling String#split results in the allocation + // of an ArrayList to hold the results, then a new String is created for each path element, and + // then finally a String[] is allocated to hold the actual result -- in addition to all that, we + // do some processing ourselves on the path and path elements to validate and prepare them. + // the above CACHE and the below 'FieldPath.of' method allow us to almost always avoid this work. + + static FieldPath of(String path) { if (Strings.isEmpty(path)) { throw new IllegalArgumentException("path cannot be null nor empty"); } + FieldPath res = CACHE.get(path); + if (res != null) { + return res; + } + res = new FieldPath(path); + if (CACHE.size() > MAX_SIZE) { + CACHE.clear(); + } + CACHE.put(path, res); + return res; + } + + private final String[] pathElements; + private final boolean useIngestContext; + + // you shouldn't call this directly, use the FieldPath.of method above instead! + private FieldPath(String path) { String newPath; if (path.startsWith(INGEST_KEY_PREFIX)) { - initialContext = ingestMetadata; + useIngestContext = true; newPath = path.substring(INGEST_KEY_PREFIX.length()); } else { - initialContext = ctxMap; + useIngestContext = false; if (path.startsWith(SOURCE_PREFIX)) { newPath = path.substring(SOURCE_PREFIX.length()); } else { @@ -1025,6 +1050,9 @@ private FieldPath(String path) { } } + public Object initialContext(IngestDocument document) { + return useIngestContext ? document.getIngestMetadata() : document.getCtxMap(); + } } private static class ResolveResult { diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index b819a1686d23c..4c61a41f7cf8d 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.bulk.FailureStoreMetrics; +import org.elasticsearch.action.bulk.IndexDocFailureStoreStatus; import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.DeletePipelineRequest; @@ -729,12 +730,34 @@ void validatePipeline(Map ingestInfos, String pipelin ExceptionsHelper.rethrowAndSuppress(exceptions); } - private record IngestPipelinesExecutionResult(boolean success, boolean shouldKeep, Exception exception, String failedIndex) { + private record IngestPipelinesExecutionResult( + boolean success, + boolean shouldKeep, + Exception exception, + String failedIndex, + IndexDocFailureStoreStatus failureStoreStatus + ) { - private static final IngestPipelinesExecutionResult SUCCESSFUL_RESULT = new IngestPipelinesExecutionResult(true, true, null, null); - private static final IngestPipelinesExecutionResult DISCARD_RESULT = new IngestPipelinesExecutionResult(true, false, null, null); + private static final IngestPipelinesExecutionResult SUCCESSFUL_RESULT = new IngestPipelinesExecutionResult( + true, + true, + null, + null, + IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN + ); + private static final IngestPipelinesExecutionResult DISCARD_RESULT = new IngestPipelinesExecutionResult( + true, + false, + null, + null, + IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN + ); private static IngestPipelinesExecutionResult failAndStoreFor(String index, Exception e) { - return new IngestPipelinesExecutionResult(false, true, e, index); + return new IngestPipelinesExecutionResult(false, true, e, index, IndexDocFailureStoreStatus.USED); + } + + private static IngestPipelinesExecutionResult failWithoutStoringIn(String index, Exception e) { + return new IngestPipelinesExecutionResult(false, true, e, index, IndexDocFailureStoreStatus.NOT_ENABLED); } } @@ -764,7 +787,7 @@ public void executeBulkRequest( final IntConsumer onDropped, final Function resolveFailureStore, final TriConsumer onStoreFailure, - final BiConsumer onFailure, + final TriConsumer onFailure, final BiConsumer onCompletion, final Executor executor ) { @@ -821,18 +844,26 @@ public void onResponse(IngestPipelinesExecutionResult result) { firstPipeline.getMetrics().postIngestBytes(indexRequest.ramBytesUsed()); } } else { - // We were given a failure result in the onResponse method, so we must store the failure - // Recover the original document state, track a failed ingest, and pass it along - updateIndexRequestMetadata(indexRequest, originalDocumentMetadata); totalMetrics.ingestFailed(); - onStoreFailure.apply(slot, result.failedIndex, result.exception); + if (IndexDocFailureStoreStatus.NOT_ENABLED.equals(result.failureStoreStatus)) { + // A failure result, but despite the target being a data stream, it does not have failure + // storage enabled currently. Capture the status in the onFailure call and skip any further + // processing + onFailure.apply(slot, result.exception, result.failureStoreStatus); + } else { + // We were given a failure result in the onResponse method, so we must store the failure + // Recover the original document state, track a failed ingest, and pass it along + updateIndexRequestMetadata(indexRequest, originalDocumentMetadata); + onStoreFailure.apply(slot, result.failedIndex, result.exception); + } } } @Override public void onFailure(Exception e) { + // The target of the request does not allow failure storage, or failed for unforeseen reason totalMetrics.ingestFailed(); - onFailure.accept(slot, e); + onFailure.apply(slot, e, IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN); } }, () -> { @@ -954,15 +985,15 @@ private void executePipelines( if (failureStoreResolution != null && failureStoreResolution) { failureStoreMetrics.incrementFailureStore(originalIndex, errorType, FailureStoreMetrics.ErrorLocation.PIPELINE); listener.onResponse(IngestPipelinesExecutionResult.failAndStoreFor(originalIndex, e)); + } else if (failureStoreResolution != null) { + // If this document targeted a data stream that didn't have the failure store enabled, we increment + // the rejected counter. + // We also increment the total counter because this request will not reach the code that increments + // the total counter for non-rejected documents. + failureStoreMetrics.incrementTotal(originalIndex); + failureStoreMetrics.incrementRejected(originalIndex, errorType, FailureStoreMetrics.ErrorLocation.PIPELINE, false); + listener.onResponse(IngestPipelinesExecutionResult.failWithoutStoringIn(originalIndex, e)); } else { - if (failureStoreResolution != null) { - // If this document targeted a data stream that didn't have the failure store enabled, we increment - // the rejected counter. - // We also increment the total counter because this request will not reach the code that increments - // the total counter for non-rejected documents. - failureStoreMetrics.incrementTotal(originalIndex); - failureStoreMetrics.incrementRejected(originalIndex, errorType, FailureStoreMetrics.ErrorLocation.PIPELINE, false); - } listener.onFailure(e); } }; diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestStats.java b/server/src/main/java/org/elasticsearch/ingest/IngestStats.java index c75cd3a022cb8..da1b99f4f0759 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestStats.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestStats.java @@ -57,14 +57,17 @@ public record IngestStats(Stats totalStats, List pipelineStats, Ma * Read from a stream. */ public static IngestStats read(StreamInput in) throws IOException { - var stats = new Stats(in); + var stats = readStats(in); var size = in.readVInt(); + if (stats == Stats.IDENTITY && size == 0) { + return IDENTITY; + } var pipelineStats = new ArrayList(size); var processorStats = Maps.>newMapWithExpectedSize(size); for (var i = 0; i < size; i++) { var pipelineId = in.readString(); - var pipelineStat = new Stats(in); + var pipelineStat = readStats(in); var byteStat = in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0) ? new ByteStats(in) : new ByteStats(0, 0); pipelineStats.add(new PipelineStat(pipelineId, pipelineStat, byteStat)); int processorsSize = in.readVInt(); @@ -72,7 +75,7 @@ public static IngestStats read(StreamInput in) throws IOException { for (var j = 0; j < processorsSize; j++) { var processorName = in.readString(); var processorType = in.readString(); - var processorStat = new Stats(in); + var processorStat = readStats(in); processorStatsPerPipeline.add(new ProcessorStat(processorName, processorType, processorStat)); } processorStats.put(pipelineId, Collections.unmodifiableList(processorStatsPerPipeline)); @@ -167,6 +170,21 @@ static Map> merge(Map> f return totalsPerPipelineProcessor; } + /** + * Read {@link Stats} from a stream. + */ + private static Stats readStats(StreamInput in) throws IOException { + long ingestCount = in.readVLong(); + long ingestTimeInMillis = in.readVLong(); + long ingestCurrent = in.readVLong(); + long ingestFailedCount = in.readVLong(); + if (ingestCount == 0 && ingestTimeInMillis == 0 && ingestCurrent == 0 && ingestFailedCount == 0) { + return Stats.IDENTITY; + } else { + return new Stats(ingestCount, ingestTimeInMillis, ingestCurrent, ingestFailedCount); + } + } + public record Stats(long ingestCount, long ingestTimeInMillis, long ingestCurrent, long ingestFailedCount) implements Writeable, @@ -174,13 +192,6 @@ public record Stats(long ingestCount, long ingestTimeInMillis, long ingestCurren public static final Stats IDENTITY = new Stats(0, 0, 0, 0); - /** - * Read from a stream. - */ - public Stats(StreamInput in) throws IOException { - this(in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong()); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(ingestCount); diff --git a/server/src/main/java/org/elasticsearch/internal/VersionExtension.java b/server/src/main/java/org/elasticsearch/internal/VersionExtension.java index 5a6c7c1f3671d..fc947738c9e33 100644 --- a/server/src/main/java/org/elasticsearch/internal/VersionExtension.java +++ b/server/src/main/java/org/elasticsearch/internal/VersionExtension.java @@ -12,16 +12,16 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.index.IndexVersion; -import java.util.List; +import java.util.Collection; /** * Allows plugging in current version elements. */ public interface VersionExtension { /** - * Returns list of {@link TransportVersion} defined by extension + * Returns additional {@link TransportVersion} defined by extension */ - List getTransportVersions(); + Collection getTransportVersions(); /** * Returns the {@link IndexVersion} that Elasticsearch should use. diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 80c9aafaa84b4..f4b390c9863f2 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -652,7 +652,7 @@ protected void validateNodeBeforeAcceptingRequests( * Writes a file to the logs dir containing the ports for the given transport type */ private void writePortsFile(String type, BoundTransportAddress boundAddress) { - Path tmpPortsFile = environment.logsFile().resolve(type + ".ports.tmp"); + Path tmpPortsFile = environment.logsDir().resolve(type + ".ports.tmp"); try (BufferedWriter writer = Files.newBufferedWriter(tmpPortsFile, Charset.forName("UTF-8"))) { for (TransportAddress address : boundAddress.boundAddresses()) { InetAddress inetAddress = InetAddress.getByName(address.getAddress()); @@ -661,7 +661,7 @@ private void writePortsFile(String type, BoundTransportAddress boundAddress) { } catch (IOException e) { throw new RuntimeException("Failed to write ports file", e); } - Path portsFile = environment.logsFile().resolve(type + ".ports"); + Path portsFile = environment.logsDir().resolve(type + ".ports"); try { Files.move(tmpPortsFile, portsFile, StandardCopyOption.ATOMIC_MOVE); } catch (IOException e) { diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 61ac8bbbfc69a..da568be30e0df 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -446,7 +446,7 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr ); } - if (initialEnvironment.dataFiles().length > 1) { + if (initialEnvironment.dataDirs().length > 1) { // NOTE: we use initialEnvironment here, but assertEquivalent below ensures the data paths do not change deprecationLogger.warn( DeprecationCategory.SETTINGS, @@ -467,10 +467,10 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr if (logger.isDebugEnabled()) { logger.debug( "using config [{}], data [{}], logs [{}], plugins [{}]", - initialEnvironment.configFile(), - Arrays.toString(initialEnvironment.dataFiles()), - initialEnvironment.logsFile(), - initialEnvironment.pluginsFile() + initialEnvironment.configDir(), + Arrays.toString(initialEnvironment.dataDirs()), + initialEnvironment.logsDir(), + initialEnvironment.pluginsDir() ); } @@ -487,7 +487,7 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr * Create the environment based on the finalized view of the settings. This is to ensure that components get the same setting * values, no matter they ask for them from. */ - environment = new Environment(settings, initialEnvironment.configFile()); + environment = new Environment(settings, initialEnvironment.configDir()); Environment.assertEquivalent(initialEnvironment, environment); modules.bindToInstance(Environment.class, environment); @@ -847,7 +847,6 @@ public Map searchFields() { .scriptService(scriptService) .clusterService(clusterService) .client(client) - .featureService(featureService) .metaStateService(metaStateService) .valuesSourceRegistry(searchModule.getValuesSourceRegistry()) .requestCacheKeyDifferentiator(searchModule.getRequestCacheKeyDifferentiator()) @@ -1067,7 +1066,7 @@ public Map searchFields() { actionModule.getReservedClusterStateService().installStateHandler(new ReservedRepositoryAction(repositoriesService)); actionModule.getReservedClusterStateService().installStateHandler(new ReservedPipelineAction()); - FileSettingsHealthIndicatorService fileSettingsHealthIndicatorService = new FileSettingsHealthIndicatorService(); + FileSettingsHealthIndicatorService fileSettingsHealthIndicatorService = new FileSettingsHealthIndicatorService(settings); FileSettingsService fileSettingsService = new FileSettingsService( clusterService, actionModule.getReservedClusterStateService(), @@ -1147,7 +1146,6 @@ public Map searchFields() { clusterService, threadPool, systemIndices, - featureService, clusterModule.getIndexNameExpressionResolver(), metadataUpdateSettingsService, metadataCreateIndexService @@ -1161,7 +1159,6 @@ public Map searchFields() { discoveryModule.getCoordinator(), clusterService, transportService, - featureService, threadPool, telemetryProvider, repositoriesService, @@ -1333,7 +1330,6 @@ private Module loadDiagnosticServices( Coordinator coordinator, ClusterService clusterService, TransportService transportService, - FeatureService featureService, ThreadPool threadPool, TelemetryProvider telemetryProvider, RepositoriesService repositoriesService, @@ -1351,7 +1347,7 @@ private Module loadDiagnosticServices( var serverHealthIndicatorServices = Stream.of( new StableMasterHealthIndicatorService(coordinationDiagnosticsService, clusterService), new RepositoryIntegrityHealthIndicatorService(clusterService), - new DiskHealthIndicatorService(clusterService, featureService), + new DiskHealthIndicatorService(clusterService), new ShardsCapacityHealthIndicatorService(clusterService), fileSettingsHealthIndicatorService ); @@ -1369,7 +1365,7 @@ private Module loadDiagnosticServices( healthService, telemetryProvider ); - HealthMetadataService healthMetadataService = HealthMetadataService.create(clusterService, featureService, settings); + HealthMetadataService healthMetadataService = HealthMetadataService.create(clusterService, settings); List> healthTrackers = List.of( new DiskHealthTracker(nodeService, clusterService), @@ -1622,7 +1618,7 @@ private DiscoveryModule createDiscoveryModule( pluginsService.filterPlugins(DiscoveryPlugin.class).toList(), pluginsService.filterPlugins(ClusterCoordinationPlugin.class).toList(), allocationService, - environment.configFile(), + environment.configDir(), gatewayMetaState, rerouteService, fsHealthService, @@ -1644,7 +1640,6 @@ private Module loadPersistentTasksService( ClusterService clusterService, ThreadPool threadPool, SystemIndices systemIndices, - FeatureService featureService, IndexNameExpressionResolver indexNameExpressionResolver, MetadataUpdateSettingsService metadataUpdateSettingsService, MetadataCreateIndexService metadataCreateIndexService @@ -1661,7 +1656,6 @@ private Module loadPersistentTasksService( HealthNodeTaskExecutor healthNodeTaskExecutor = HealthNodeTaskExecutor.create( clusterService, persistentTasksService, - featureService, settingsModule.getSettings(), clusterService.getClusterSettings() ); diff --git a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java index 4b7524a7ac011..b9e58863cad6c 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java +++ b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java @@ -53,7 +53,7 @@ class NodeServiceProvider { PluginsService newPluginService(Environment initialEnvironment, PluginsLoader pluginsLoader) { // this creates a PluginsService with an empty list of classpath plugins - return new PluginsService(initialEnvironment.settings(), initialEnvironment.configFile(), pluginsLoader); + return new PluginsService(initialEnvironment.settings(), initialEnvironment.configDir(), pluginsLoader); } ScriptService newScriptService( diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java index 003aaa20d6ac5..bfec7b3723231 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java @@ -113,7 +113,7 @@ public void createPersistentTask( Params taskParams, ActionListener> listener ) { - submitUnbatchedTask("create persistent task", new ClusterStateUpdateTask() { + submitUnbatchedTask("create persistent task " + taskName + " [" + taskId + "]", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { PersistentTasksCustomMetadata.Builder builder = builder(currentState); @@ -166,9 +166,9 @@ public void completePersistentTask(String id, long allocationId, Exception failu final String source; if (failure != null) { logger.warn("persistent task " + id + " failed", failure); - source = "finish persistent task (failed)"; + source = "finish persistent task [" + id + "] (failed)"; } else { - source = "finish persistent task (success)"; + source = "finish persistent task [" + id + "] (success)"; } submitUnbatchedTask(source, new ClusterStateUpdateTask() { @Override @@ -212,7 +212,7 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) * @param listener the listener that will be called when task is removed */ public void removePersistentTask(String id, ActionListener> listener) { - submitUnbatchedTask("remove persistent task", new ClusterStateUpdateTask() { + submitUnbatchedTask("remove persistent task [" + id + "]", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { PersistentTasksCustomMetadata.Builder tasksInProgress = builder(currentState); @@ -295,7 +295,7 @@ public void unassignPersistentTask( final String reason, final ActionListener> listener ) { - submitUnbatchedTask("unassign persistent task from any node", new ClusterStateUpdateTask() { + submitUnbatchedTask("unassign persistent task [" + taskId + "] from any node", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) throws Exception { PersistentTasksCustomMetadata.Builder tasksInProgress = builder(currentState); diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java index 9eb9a93439bc1..93de387845567 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java @@ -8,6 +8,8 @@ */ package org.elasticsearch.persistent; +import org.elasticsearch.core.Strings; + import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -23,7 +25,17 @@ public class PersistentTasksExecutorRegistry { public PersistentTasksExecutorRegistry(Collection> taskExecutors) { Map> map = new HashMap<>(); for (PersistentTasksExecutor executor : taskExecutors) { - map.put(executor.getTaskName(), executor); + final var old = map.put(executor.getTaskName(), executor); + if (old != null) { + final var message = Strings.format( + "task [%s] is already registered with [%s], cannot re-register with [%s]", + executor.getTaskName(), + old, + executor + ); + assert false : message; + throw new IllegalStateException(message); + } } this.taskExecutors = Collections.unmodifiableMap(map); } diff --git a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java index f5670ebd8a543..bb9f7ad4b7bf1 100644 --- a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java @@ -290,8 +290,7 @@ public RetrieverSpec(ParseField name, RetrieverParser parser) { /** * Specification of custom {@link RetrieverBuilder}. * - * @param name the name by which this retriever might be parsed or deserialized. Make sure that the retriever builder returns - * this name for {@link NamedWriteable#getWriteableName()}. + * @param name the name by which this retriever might be parsed or deserialized. * @param parser the parser the reads the retriever builder from xcontent */ public RetrieverSpec(String name, RetrieverParser parser) { diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java index 2ebbf24d65670..2c429954f5f49 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java @@ -605,6 +605,11 @@ public int hashCode() { return Objects.hash(snapshotIds, snapshotsDetails, indices, indexSnapshots, shardGenerations, indexMetaDataGenerations); } + @Override + public String toString() { + return Strings.format("RepositoryData[uuid=%s,gen=%s]", uuid, genId); + } + /** * Resolve the index name to the index id specific to the repository, * throwing an exception if the index could not be resolved. diff --git a/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java b/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java index b88524586abe0..f284faf8f304a 100644 --- a/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java @@ -92,13 +92,13 @@ public FsRepository( ); throw new RepositoryException(metadata.name(), "missing location"); } - Path locationFile = environment.resolveRepoFile(location); + Path locationFile = environment.resolveRepoDir(location); if (locationFile == null) { - if (environment.repoFiles().length > 0) { + if (environment.repoDirs().length > 0) { logger.warn( "The specified location [{}] doesn't start with any " + "repository paths specified by the path.repo setting: [{}] ", location, - environment.repoFiles() + environment.repoDirs() ); throw new RepositoryException( metadata.name(), @@ -127,7 +127,7 @@ public FsRepository( @Override protected BlobStore createBlobStore() throws Exception { final String location = REPOSITORIES_LOCATION_SETTING.get(getMetadata().settings()); - final Path locationFile = environment.resolveRepoFile(location); + final Path locationFile = environment.resolveRepoDir(location); return new FsBlobStore(bufferSize, locationFile, isReadOnly()); } diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index e36604f9a58c8..9d1f6b1d50676 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -21,6 +21,8 @@ import org.elasticsearch.cluster.metadata.ReservedStateMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.file.MasterNodeFileWatchingService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; @@ -80,7 +82,7 @@ public FileSettingsService( Environment environment, FileSettingsHealthIndicatorService healthIndicatorService ) { - super(clusterService, environment.configFile().toAbsolutePath().resolve(OPERATOR_DIRECTORY).resolve(SETTINGS_FILE_NAME)); + super(clusterService, environment.configDir().toAbsolutePath().resolve(OPERATOR_DIRECTORY).resolve(SETTINGS_FILE_NAME)); this.stateService = stateService; this.healthIndicatorService = healthIndicatorService; } @@ -212,7 +214,7 @@ protected void onProcessFileChangesException(Exception e) { } @Override - protected void processInitialFileMissing() throws ExecutionException, InterruptedException, IOException { + protected void processInitialFileMissing() throws ExecutionException, InterruptedException { PlainActionFuture completion = new PlainActionFuture<>(); logger.info("setting file [{}] not found, initializing [{}] as empty", watchedFile(), NAMESPACE); stateService.initEmpty(NAMESPACE, completion); @@ -236,11 +238,29 @@ public static class FileSettingsHealthIndicatorService implements HealthIndicato ) ); + /** + * We want a length limit so we don't blow past the indexing limit in the case of a long description string. + * This is an {@code OperatorDynamic} setting so that if the truncation hampers troubleshooting efforts, + * the operator could override it and retry the operation without necessarily restarting the cluster. + */ + public static final String DESCRIPTION_LENGTH_LIMIT_KEY = "fileSettings.descriptionLengthLimit"; + static final Setting DESCRIPTION_LENGTH_LIMIT = Setting.intSetting( + DESCRIPTION_LENGTH_LIMIT_KEY, + 100, + 1, // Need room for the ellipsis + Setting.Property.OperatorDynamic + ); + + private final Settings settings; private boolean isActive = false; private long changeCount = 0; private long failureStreak = 0; private String mostRecentFailure = null; + public FileSettingsHealthIndicatorService(Settings settings) { + this.settings = settings; + } + public synchronized void startOccurred() { isActive = true; failureStreak = 0; @@ -262,7 +282,16 @@ public synchronized void successOccurred() { public synchronized void failureOccurred(String description) { ++failureStreak; - mostRecentFailure = description; + mostRecentFailure = limitLength(description); + } + + private String limitLength(String description) { + int descriptionLengthLimit = DESCRIPTION_LENGTH_LIMIT.get(settings); + if (description.length() > descriptionLengthLimit) { + return description.substring(0, descriptionLengthLimit - 1) + "…"; + } else { + return description; + } } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java index da7a7d3379ee0..9ab7f275252ae 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestUpdateDesiredNodesAction.java @@ -12,8 +12,6 @@ import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesAction; import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.cluster.metadata.DesiredNode; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -27,10 +25,6 @@ public class RestUpdateDesiredNodesAction extends BaseRestHandler { - private final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestUpdateDesiredNodesAction.class); - private static final String VERSION_DEPRECATION_MESSAGE = - "[version removal] Specifying node_version in desired nodes requests is deprecated."; - @Override public String getName() { return "update_desired_nodes"; @@ -59,10 +53,6 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli ); } - if (updateDesiredNodesRequest.getNodes().stream().anyMatch(DesiredNode::hasVersion)) { - deprecationLogger.compatibleCritical("desired_nodes_version", VERSION_DEPRECATION_MESSAGE); - } - return restChannel -> client.execute( UpdateDesiredNodesAction.INSTANCE, updateDesiredNodesRequest, diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java index 9083c781ae167..334e68648d853 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java @@ -26,5 +26,11 @@ public class CreateIndexCapabilities { */ private static final String LOOKUP_INDEX_MODE_CAPABILITY = "lookup_index_mode"; - public static final Set CAPABILITIES = Set.of(LOGSDB_INDEX_MODE_CAPABILITY, LOOKUP_INDEX_MODE_CAPABILITY); + private static final String NESTED_DENSE_VECTOR_SYNTHETIC_TEST = "nested_dense_vector_synthetic_test"; + + public static final Set CAPABILITIES = Set.of( + LOGSDB_INDEX_MODE_CAPABILITY, + LOOKUP_INDEX_MODE_CAPABILITY, + NESTED_DENSE_VECTOR_SYNTHETIC_TEST + ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java index 06e9b02a92934..7659e096c115f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeCapabilities; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -19,6 +20,7 @@ import java.io.IOException; import java.util.List; +import java.util.Set; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -49,4 +51,9 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC } } + @Override + public Set supportedCapabilities() { + return AnalyzeCapabilities.CAPABILITIES; + } + } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResolveClusterAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResolveClusterAction.java index 886c4da47d974..3dbbde65c6428 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResolveClusterAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResolveClusterAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestCancellableNodeClient; @@ -71,6 +72,12 @@ protected BaseRestHandler.RestChannelConsumer prepareRequest(RestRequest request clusterInfoOnly, true ); + + String timeout = request.param("timeout"); + if (timeout != null) { + resolveRequest.setTimeout(TimeValue.parseTimeValue(timeout, "timeout")); + } + return channel -> new RestCancellableNodeClient(client, request.getHttpChannel()).admin() .indices() .execute(TransportResolveClusterAction.TYPE, resolveRequest, new RestToXContentListener<>(channel)); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java index 94fa60762800d..f8dc26e9c468a 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java @@ -9,11 +9,9 @@ package org.elasticsearch.rest.action.cat; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.core.TimeValue; @@ -99,24 +97,6 @@ protected Table getTableWithHeader(RestRequest request) { private Table buildTable(RestRequest req, GetSnapshotsResponse getSnapshotsResponse) { Table table = getTableWithHeader(req); - if (getSnapshotsResponse.isFailed()) { - ElasticsearchException causes = null; - - for (ElasticsearchException e : getSnapshotsResponse.getFailures().values()) { - if (causes == null) { - causes = e; - } else { - causes.addSuppressed(e); - } - } - throw new ElasticsearchException( - "Repositories [" - + Strings.collectionToCommaDelimitedString(getSnapshotsResponse.getFailures().keySet()) - + "] failed to retrieve snapshots", - causes - ); - } - for (SnapshotInfo snapshotStatus : getSnapshotsResponse.getSnapshots()) { table.startRow(); diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index 47d3ed337af73..7095d3ec92c72 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -21,7 +21,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.routing.IndexRouting; import org.elasticsearch.common.lucene.search.Queries; @@ -77,6 +76,7 @@ import org.elasticsearch.search.slice.SliceBuilder; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestionSearchContext; +import org.elasticsearch.tasks.CancellableTask; import java.io.IOException; import java.io.UncheckedIOException; @@ -131,7 +131,7 @@ final class DefaultSearchContext extends SearchContext { private CollapseContext collapse; // filter for sliced scroll private SliceBuilder sliceBuilder; - private SearchShardTask task; + private CancellableTask task; private QueryPhaseRankShardContext queryPhaseRankShardContext; /** @@ -433,7 +433,7 @@ public void preProcess() { this.query = buildFilteredQuery(query); if (lowLevelCancellation) { searcher().addQueryCancellation(() -> { - final SearchShardTask task = getTask(); + final CancellableTask task = getTask(); if (task != null) { task.ensureNotCancelled(); } @@ -907,12 +907,12 @@ public void setProfilers(Profilers profilers) { } @Override - public void setTask(SearchShardTask task) { + public void setTask(CancellableTask task) { this.task = task; } @Override - public SearchShardTask getTask() { + public CancellableTask getTask() { return task; } diff --git a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java index 98dd7f9388c1f..3970b6effe70c 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java +++ b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java @@ -17,16 +17,20 @@ public final class SearchFeatures implements FeatureSpecification { public static final NodeFeature LUCENE_10_0_0_UPGRADE = new NodeFeature("lucene_10_upgrade"); + public static final NodeFeature LUCENE_10_1_0_UPGRADE = new NodeFeature("lucene_10_1_upgrade"); @Override public Set getFeatures() { - return Set.of(LUCENE_10_0_0_UPGRADE); + return Set.of(LUCENE_10_0_0_UPGRADE, LUCENE_10_1_0_UPGRADE); } public static final NodeFeature RETRIEVER_RESCORER_ENABLED = new NodeFeature("search.retriever.rescorer.enabled"); + public static final NodeFeature COMPLETION_FIELD_SUPPORTS_DUPLICATE_SUGGESTIONS = new NodeFeature( + "search.completion_field.duplicate.support" + ); @Override public Set getTestFeatures() { - return Set.of(RETRIEVER_RESCORER_ENABLED); + return Set.of(RETRIEVER_RESCORER_ENABLED, COMPLETION_FIELD_SUPPORTS_DUPLICATE_SUGGESTIONS); } } diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 6716c03a3a935..2183ce5646293 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -913,7 +913,7 @@ private static Map setupHighlighters(Settings settings, Lis NamedRegistry highlighters = new NamedRegistry<>("highlighter"); highlighters.register("fvh", new FastVectorHighlighter(settings)); highlighters.register("plain", new PlainHighlighter()); - highlighters.register("unified", new DefaultHighlighter()); + highlighters.register(DefaultHighlighter.NAME, new DefaultHighlighter()); highlighters.extractAndRegister(plugins, SearchPlugin::getHighlighters); return unmodifiableMap(highlighters.getRegistry()); diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index efa27b2f3448c..9a6860349a8b3 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -126,6 +126,7 @@ import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.Scheduler; @@ -581,33 +582,36 @@ private void loadOrExecuteQueryPhase(final ShardSearchRequest request, final Sea } } - public void executeQueryPhase(ShardSearchRequest request, SearchShardTask task, ActionListener listener) { - ActionListener finalListener = maybeWrapListenerForStackTrace(listener, request.getChannelVersion(), threadPool); + public void executeQueryPhase(ShardSearchRequest request, CancellableTask task, ActionListener listener) { assert request.canReturnNullResponseIfMatchNoDocs() == false || request.numberOfShards() > 1 : "empty responses require more than one shard"; final IndexShard shard = getShard(request); - rewriteAndFetchShardRequest(shard, request, finalListener.delegateFailure((l, orig) -> { - // check if we can shortcut the query phase entirely. - if (orig.canReturnNullResponseIfMatchNoDocs()) { - assert orig.scroll() == null; - ShardSearchRequest clone = new ShardSearchRequest(orig); - CanMatchContext canMatchContext = new CanMatchContext( - clone, - indicesService::indexServiceSafe, - this::findReaderContext, - defaultKeepAlive, - maxKeepAlive - ); - CanMatchShardResponse canMatchResp = canMatch(canMatchContext, false); - if (canMatchResp.canMatch() == false) { - finalListener.onResponse(QuerySearchResult.nullInstance()); - return; + rewriteAndFetchShardRequest( + shard, + request, + maybeWrapListenerForStackTrace(listener, request.getChannelVersion(), threadPool).delegateFailure((l, orig) -> { + // check if we can shortcut the query phase entirely. + if (orig.canReturnNullResponseIfMatchNoDocs()) { + assert orig.scroll() == null; + ShardSearchRequest clone = new ShardSearchRequest(orig); + CanMatchContext canMatchContext = new CanMatchContext( + clone, + indicesService::indexServiceSafe, + this::findReaderContext, + defaultKeepAlive, + maxKeepAlive + ); + CanMatchShardResponse canMatchResp = canMatch(canMatchContext, false); + if (canMatchResp.canMatch() == false) { + l.onResponse(QuerySearchResult.nullInstance()); + return; + } } - } - // TODO: i think it makes sense to always do a canMatch here and - // return an empty response (not null response) in case canMatch is false? - ensureAfterSeqNoRefreshed(shard, orig, () -> executeQueryPhase(orig, task), l); - })); + // TODO: i think it makes sense to always do a canMatch here and + // return an empty response (not null response) in case canMatch is false? + ensureAfterSeqNoRefreshed(shard, orig, () -> executeQueryPhase(orig, task), l); + }) + ); } private void ensureAfterSeqNoRefreshed( @@ -729,7 +733,7 @@ private static void runAsync( * It is the responsibility of the caller to ensure that the ref count is correctly decremented * when the object is no longer needed. */ - private SearchPhaseResult executeQueryPhase(ShardSearchRequest request, SearchShardTask task) throws Exception { + private SearchPhaseResult executeQueryPhase(ShardSearchRequest request, CancellableTask task) throws Exception { final ReaderContext readerContext = createOrGetReaderContext(request); try ( Releasable scope = tracer.withScope(task); @@ -953,7 +957,7 @@ public void executeFetchPhase( }, wrapFailureListener(listener, readerContext, markAsUsed)); } - public void executeFetchPhase(ShardFetchRequest request, SearchShardTask task, ActionListener listener) { + public void executeFetchPhase(ShardFetchRequest request, CancellableTask task, ActionListener listener) { final ReaderContext readerContext = findReaderContext(request.contextId(), request); final ShardSearchRequest shardSearchRequest = readerContext.getShardSearchRequest(request.getShardSearchRequest()); final Releasable markAsUsed = readerContext.markAsUsed(getKeepAlive(shardSearchRequest)); @@ -991,7 +995,7 @@ public void executeFetchPhase(ShardFetchRequest request, SearchShardTask task, A })); } - protected void checkCancelled(SearchShardTask task) { + protected void checkCancelled(CancellableTask task) { // check cancellation as early as possible, as it avoids opening up a Lucene reader on FrozenEngine try { task.ensureNotCancelled(); @@ -1122,7 +1126,7 @@ public void openReaderContext(ShardId shardId, TimeValue keepAlive, ActionListen protected SearchContext createContext( ReaderContext readerContext, ShardSearchRequest request, - SearchShardTask task, + CancellableTask task, ResultsType resultsType, boolean includeAggregations ) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java index ef21e4103fd88..f763ac8f795ff 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java @@ -211,9 +211,9 @@ private List reducePipelineBuckets(AggregationReduceContext reduceContext, Pi List reducedBuckets = new ArrayList<>(); for (B bucket : getBuckets()) { List aggs = new ArrayList<>(); - for (Aggregation agg : bucket.getAggregations()) { + for (InternalAggregation agg : bucket.getAggregations()) { PipelineTree subTree = pipelineTree.subTree(agg.getName()); - aggs.add(((InternalAggregation) agg).reducePipelines((InternalAggregation) agg, reduceContext, subTree)); + aggs.add(agg.reducePipelines(agg, reduceContext, subTree)); } reducedBuckets.add(createBucket(InternalAggregations.from(aggs), bucket)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java index afeeaa9bd6752..2bc00aeedb832 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java @@ -26,6 +26,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Objects; @@ -188,33 +189,22 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public Comparator> partiallyBuiltBucketComparator(Aggregator aggregator) { - List>> comparators = new ArrayList<>(orderElements.size()); - for (BucketOrder order : orderElements) { - comparators.add(order.partiallyBuiltBucketComparator(aggregator)); + Iterator iterator = orderElements.iterator(); + Comparator> comparator = iterator.next().partiallyBuiltBucketComparator(aggregator); + while (iterator.hasNext()) { + comparator = comparator.thenComparing(iterator.next().partiallyBuiltBucketComparator(aggregator)); } - return (lhs, rhs) -> { - for (Comparator> c : comparators) { - int result = c.compare(lhs, rhs); - if (result != 0) { - return result; - } - } - return 0; - }; + return comparator; } @Override public Comparator comparator() { - List> comparators = orderElements.stream().map(BucketOrder::comparator).toList(); - return (lhs, rhs) -> { - for (Comparator c : comparators) { - int result = c.compare(lhs, rhs); - if (result != 0) { - return result; - } - } - return 0; - }; + Iterator iterator = orderElements.iterator(); + Comparator comparator = iterator.next().comparator(); + while (iterator.hasNext()) { + comparator = comparator.thenComparing(iterator.next().comparator()); + } + return comparator; } @Override @@ -222,18 +212,12 @@ Comparator, AggregationReduceContext, B> reduce, AggregationReduceContext reduceContext ) { - List>> comparators = orderElements.stream() - .map(b -> b.delayedBucketComparator(reduce, reduceContext)) - .toList(); - return (lhs, rhs) -> { - for (Comparator> c : comparators) { - int result = c.compare(lhs, rhs); - if (result != 0) { - return result; - } - } - return 0; - }; + Iterator iterator = orderElements.iterator(); + Comparator> comparator = iterator.next().delayedBucketComparator(reduce, reduceContext); + while (iterator.hasNext()) { + comparator = comparator.thenComparing(iterator.next().delayedBucketComparator(reduce, reduceContext)); + } + return comparator; } @Override @@ -285,12 +269,13 @@ public Comparator comparator() { return comparator; } + @SuppressWarnings({ "rawtypes", "unchecked" }) @Override Comparator> delayedBucketComparator( BiFunction, AggregationReduceContext, B> reduce, AggregationReduceContext reduceContext ) { - return delayedBucketCompator::compare; + return (Comparator) delayedBucketCompator; } @Override @@ -453,16 +438,7 @@ public static boolean isKeyDesc(BucketOrder order) { * @return {@code true} if the order matches, {@code false} otherwise. */ private static boolean isOrder(BucketOrder order, BucketOrder expected) { - if (order == expected) { - return true; - } else if (order instanceof CompoundOrder) { - // check if its a compound order with the first element that matches - List orders = ((CompoundOrder) order).orderElements; - if (orders.size() >= 1) { - return isOrder(orders.get(0), expected); - } - } - return false; + return order == expected || (order instanceof CompoundOrder compoundOrder && compoundOrder.orderElements.getFirst() == expected); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InvalidAggregationPathException.java b/server/src/main/java/org/elasticsearch/search/aggregations/InvalidAggregationPathException.java index 739dc58743332..34fcf58e43bd3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InvalidAggregationPathException.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InvalidAggregationPathException.java @@ -20,10 +20,6 @@ public InvalidAggregationPathException(String msg) { super(msg); } - public InvalidAggregationPathException(String msg, Throwable cause) { - super(msg, cause); - } - public InvalidAggregationPathException(StreamInput in) throws IOException { super(in); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java index 592f7b4887598..e85d01930807c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java @@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.tasks.TaskCancelledException; import java.io.IOException; import java.util.AbstractList; @@ -163,6 +164,10 @@ protected void prepareSubAggs(LongArray ordsToCollect) throws IOException {} * array of ordinals */ protected final IntFunction buildSubAggsForBuckets(LongArray bucketOrdsToCollect) throws IOException { + if (context.isCancelled()) { + throw new TaskCancelledException("not building sub-aggregations due to task cancellation"); + } + prepareSubAggs(bucketOrdsToCollect); InternalAggregation[][] aggregations = new InternalAggregation[subAggregators.length][]; for (int i = 0; i < subAggregators.length; i++) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index 441b30f872a35..4d946bfb3bb99 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -461,7 +461,7 @@ protected LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCt // Visit documents sorted by the leading source of the composite definition and terminates // when the leading source value is guaranteed to be greater than the lowest composite bucket // in the queue. - DocIdSet docIdSet = sortedDocsProducer.processLeaf(topLevelQuery(), queue, aggCtx.getLeafReaderContext(), fillDocIdSet); + DocIdSet docIdSet = sortedDocsProducer.processLeaf(queue, aggCtx.getLeafReaderContext(), fillDocIdSet); if (fillDocIdSet) { entries.add(new Entry(aggCtx, docIdSet)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java index 7c920abfe2451..e88c9724edba1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.PointValues; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.DocIdSet; -import org.apache.lucene.search.Query; import org.apache.lucene.util.DocIdSetBuilder; import java.io.IOException; @@ -36,8 +35,7 @@ class PointsSortedDocsProducer extends SortedDocsProducer { } @Override - DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) - throws IOException { + DocIdSet processLeaf(CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) throws IOException { final PointValues values = context.reader().getPointValues(field); if (values == null) { // no value for the field diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java index 4503758c55b04..2d1b628482d45 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java @@ -12,7 +12,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.Query; import org.apache.lucene.util.Bits; import org.apache.lucene.util.DocIdSetBuilder; import org.elasticsearch.core.Nullable; @@ -99,6 +98,5 @@ public void collect(int doc, long bucket) throws IOException { * Returns the {@link DocIdSet} of the documents that contain a top composite bucket in this leaf or * {@link DocIdSet#EMPTY} if fillDocIdSet is false. */ - abstract DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) - throws IOException; + abstract DocIdSet processLeaf(CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsSortedDocsProducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsSortedDocsProducer.java index e2aaba06a19ec..3b62cb8f57d8b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsSortedDocsProducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsSortedDocsProducer.java @@ -14,7 +14,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.DocIdSet; -import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.DocIdSetBuilder; @@ -29,8 +28,7 @@ class TermsSortedDocsProducer extends SortedDocsProducer { } @Override - DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) - throws IOException { + DocIdSet processLeaf(CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) throws IOException { final Terms terms = context.reader().terms(field); if (terms == null) { // no value for the field diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java index a9ec0ba878ec0..bf6fb39d43c4b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java @@ -462,7 +462,7 @@ private static class FilterMatchingDisiWrapper extends DisiWrapper { final int filterOrd; FilterMatchingDisiWrapper(Scorer scorer, int ord) { - super(scorer); + super(scorer, false); this.filterOrd = ord; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java index 7291a099dd7f7..9994a2bca08bf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java @@ -25,15 +25,7 @@ public class InternalDateRange extends InternalRange aggregations, - boolean keyed, - DocValueFormat formatter - ) { + public Bucket(String key, double from, double to, long docCount, List aggregations, DocValueFormat formatter) { super(key, from, to, docCount, InternalAggregations.from(aggregations), formatter); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java index 17982043e8e20..b65b0e1ec010a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java @@ -75,24 +75,6 @@ public interface BucketOrdsEnum { * Read the current value. */ void readValue(BytesRef dest); - - /** - * An {@linkplain BucketOrdsEnum} that is empty. - */ - BucketOrdsEnum EMPTY = new BucketOrdsEnum() { - @Override - public boolean next() { - return false; - } - - @Override - public long ord() { - return 0; - } - - @Override - public void readValue(BytesRef dest) {} - }; } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 439b61cc43ddf..0e0b7f3f5f5ec 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -290,7 +290,6 @@ static class LowCardinality extends GlobalOrdinalsStringTermsAggregator { BucketCountThresholds bucketCountThresholds, AggregationContext context, Aggregator parent, - boolean remapGlobalOrds, SubAggCollectionMode collectionMode, boolean showTermDocCountError, Map metadata, @@ -308,7 +307,7 @@ static class LowCardinality extends GlobalOrdinalsStringTermsAggregator { ALWAYS_TRUE, context, parent, - remapGlobalOrds, + false, collectionMode, showTermDocCountError, CardinalityUpperBound.ONE, @@ -385,7 +384,7 @@ protected void doClose() { Releasables.close(resultStrategy, segmentDocCounts, collectionStrategy); } - private void mapSegmentCountsToGlobalCounts(LongUnaryOperator mapping) throws IOException { + private void mapSegmentCountsToGlobalCounts(LongUnaryOperator mapping) { for (long i = 1; i < segmentDocCounts.size(); i++) { // We use set(...) here, because we need to reset the slow to 0. // segmentDocCounts get reused over the segments and otherwise counts would be too high. @@ -395,7 +394,7 @@ private void mapSegmentCountsToGlobalCounts(LongUnaryOperator mapping) throws IO } long ord = i - 1; // remember we do +1 when counting long globalOrd = mapping.applyAsLong(ord); - incrementBucketDocCount(collectionStrategy.globalOrdToBucketOrd(0, globalOrd), inc); + incrementBucketDocCount(collectionStrategy.globalOrdToBucketOrd(globalOrd), inc); } } } @@ -440,7 +439,7 @@ abstract static class CollectionStrategy implements Releasable { /** * Convert a global ordinal into a bucket ordinal. */ - abstract long globalOrdToBucketOrd(long owningBucketOrd, long globalOrd); + abstract long globalOrdToBucketOrd(long globalOrd); /** * Create the aggregation result @@ -491,8 +490,7 @@ void collectGlobalOrd(long owningBucketOrd, int doc, long globalOrd, LeafBucketC } @Override - long globalOrdToBucketOrd(long owningBucketOrd, long globalOrd) { - assert owningBucketOrd == 0; + long globalOrdToBucketOrd(long globalOrd) { return globalOrd; } @@ -659,8 +657,8 @@ void collectGlobalOrd(long owningBucketOrd, int doc, long globalOrd, LeafBucketC } @Override - long globalOrdToBucketOrd(long owningBucketOrd, long globalOrd) { - return bucketOrds.find(owningBucketOrd, globalOrd); + long globalOrdToBucketOrd(long globalOrd) { + return bucketOrds.find(0, globalOrd); } private void collectZeroDocEntriesIfNeeded(long owningBucketOrd) throws IOException { @@ -789,7 +787,7 @@ InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOExc /** * Strategy for building results. */ - abstract class ResultStrategy< + abstract static class ResultStrategy< R extends InternalAggregation, B extends InternalMultiBucketAggregation.InternalBucket, TB extends InternalMultiBucketAggregation.InternalBucket> implements Releasable { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index 5108793b8a809..9db9a41016621 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -72,7 +72,7 @@ protected Bucket(long subsetDf, long supersetDf, InternalAggregations aggregatio /** * Read from a stream. */ - protected Bucket(StreamInput in, DocValueFormat format) { + protected Bucket(DocValueFormat format) { this.format = format; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificanceLookup.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificanceLookup.java index a5534f75d4f6a..9bd0e7eac35a0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificanceLookup.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificanceLookup.java @@ -89,8 +89,7 @@ interface BackgroundFrequencyForLong extends Releasable { this.backgroundFilter = null; } } else { - Query contextFiltered = context.filterQuery(backgroundQuery); - this.backgroundFilter = contextFiltered; + this.backgroundFilter = context.filterQuery(backgroundQuery); } /* * We need to use a superset size that includes deleted docs or we diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTerms.java index 17ea290b7aaaf..807514b30ab5b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTerms.java @@ -37,7 +37,7 @@ public Bucket(long subsetDf, long supersetDf, long term, InternalAggregations ag } Bucket(StreamInput in, DocValueFormat format) throws IOException { - super(in, format); + super(format); subsetDf = in.readVLong(); supersetDf = in.readVLong(); term = in.readLong(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTerms.java index b255f17d2843b..d55228304666f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTerms.java @@ -48,7 +48,7 @@ public Bucket( * Read from a stream. */ public Bucket(StreamInput in, DocValueFormat format) throws IOException { - super(in, format); + super(format); termBytes = in.readBytesRef(); subsetDf = in.readVLong(); supersetDf = in.readVLong(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java index 080cac9cbfb85..1b68d55260fe0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.index.query.QueryBuilder; @@ -38,6 +37,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.search.internal.ShardSearchRequest; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xcontent.ParseField; import java.io.IOException; @@ -128,7 +128,7 @@ private static SignificantTermsAggregatorSupplier bytesSupplier() { *

* Some searches that will never match can still fall through and we endup running query that will produce no results. * However even in that case we sometimes do expensive things like loading global ordinals. This method should prevent this. - * Note that if {@link org.elasticsearch.search.SearchService#executeQueryPhase(ShardSearchRequest, SearchShardTask, ActionListener)} + * Note that if {@link org.elasticsearch.search.SearchService#executeQueryPhase(ShardSearchRequest, CancellableTask, ActionListener)} * always do a can match then we don't need this code here. */ static boolean matchNoDocs(AggregationContext context, Aggregator parent) { @@ -422,6 +422,10 @@ public static ExecutionMode fromString(String value, final DeprecationLogger dep if ("global_ordinals".equals(value)) { return GLOBAL_ORDINALS; } else if ("global_ordinals_hash".equals(value)) { + /* + * We have no plans to remove this so we don't break anyone, no matter + * how few people still use this or how long it's been deprecated. + */ deprecationLogger.warn( DeprecationCategory.AGGREGATIONS, "global_ordinals_hash", diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java index da5ae37b08228..5a692b934a41c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java @@ -511,7 +511,6 @@ Aggregator create( bucketCountThresholds, context, parent, - false, subAggCollectMode, showTermDocCountError, metadata, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedSignificantTerms.java index 6d1370f147f36..82011528fef71 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedSignificantTerms.java @@ -8,7 +8,6 @@ */ package org.elasticsearch.search.aggregations.bucket.terms; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -40,7 +39,7 @@ public class UnmappedSignificantTerms extends InternalSignificantTerms { - private Bucket(BytesRef term, long subsetDf, long supersetDf, InternalAggregations aggregations, DocValueFormat format) { + private Bucket(long subsetDf, long supersetDf, InternalAggregations aggregations, DocValueFormat format) { super(subsetDf, supersetDf, aggregations, format); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java index 72e1db245338e..8b0617e116cc1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java @@ -116,8 +116,7 @@ protected DoubleHistogram getState(long bucketOrd) { if (bucketOrd >= states.size()) { return null; } - final DoubleHistogram state = states.get(bucketOrd); - return state; + return states.get(bucketOrd); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLogPlusPlus.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLogPlusPlus.java index 00d7890e4710f..2605fc1c09361 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLogPlusPlus.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLogPlusPlus.java @@ -44,9 +44,6 @@ public AbstractHyperLogLogPlusPlus(int precision) { /** Get HyperLogLog algorithm */ protected abstract AbstractHyperLogLog.RunLenIterator getHyperLogLog(long bucketOrd); - /** Get the number of data structures */ - public abstract long maxOrd(); - /** Collect a value in the given bucket */ public abstract void collect(long bucketOrd, long hash); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractLinearCounting.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractLinearCounting.java index 1f1cbd0b34a67..908fb4bb0a2e3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractLinearCounting.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractLinearCounting.java @@ -39,11 +39,6 @@ public AbstractLinearCounting(int precision) { */ protected abstract int size(long bucketOrd); - /** - * return the current values in the counter. - */ - protected abstract HashesIterator values(long bucketOrd); - public int collect(long bucketOrd, long hash) { final int k = encodeHash(hash, p); return addEncoded(bucketOrd, k); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java index 5edcf745c418d..08e9de383691b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java @@ -68,11 +68,6 @@ public interface ExtendedStats extends Stats { */ String getStdDeviationSamplingAsString(); - /** - * The upper or lower bounds of stdDev of the collected values as a String. - */ - String getStdDeviationBoundAsString(Bounds bound); - /** * The sum of the squares of the collected values as a String. */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java index 21bfffa883f50..af47141730e60 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java @@ -28,6 +28,7 @@ import java.io.IOException; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.Set; public class ExtendedStatsAggregationBuilder extends ValuesSourceAggregationBuilder.MetricsAggregationBuilder< @@ -87,6 +88,11 @@ public Set metricNames() { return InternalExtendedStats.METRIC_NAMES; } + @Override + public Optional> getOutputFieldNames() { + return Optional.of(InternalExtendedStats.Fields.OUTPUT_FORMAT); + } + @Override protected ValuesSourceType defaultValueSourceType() { return CoreValuesSourceType.NUMERIC; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java index 5af6a50a8c4a6..16dfbdada4b0a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java @@ -87,7 +87,6 @@ public HyperLogLogPlusPlus(int precision, BigArrays bigArrays, long initialBucke this.algorithm = algorithm; } - @Override public long maxOrd() { return hll.maxOrd(); } @@ -322,8 +321,7 @@ protected int size(long bucketOrd) { return size; } - @Override - protected HashesIterator values(long bucketOrd) { + private HashesIterator values(long bucketOrd) { // Make a fresh BytesRef for reading scratch work because this method can be called on many threads return new LinearCountingIterator(this, new BytesRef(), bucketOrd); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparse.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparse.java index 1736b5ea7656d..8b1dcfb8a2f85 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparse.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparse.java @@ -36,11 +36,6 @@ final class HyperLogLogPlusPlusSparse extends AbstractHyperLogLogPlusPlus implem this.lc = new LinearCounting(precision, bigArrays, initialBuckets); } - @Override - public long maxOrd() { - return lc.sizes.size(); - } - /** Needs to be called before adding elements into a bucket */ protected void ensureCapacity(long bucketOrd, long size) { lc.ensureCapacity(bucketOrd, size); @@ -135,8 +130,7 @@ protected int size(long bucketOrd) { return size; } - @Override - protected HashesIterator values(long bucketOrd) { + private HashesIterator values(long bucketOrd) { return new LinearCountingIterator(values.get(bucketOrd), size(bucketOrd)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalBounds.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalBounds.java index c3a106bd9af41..8a128b77a7300 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalBounds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalBounds.java @@ -73,9 +73,8 @@ public Object getProperty(List path) { }; } else if (path.size() == 2) { BoundingBox bbox = resolveBoundingBox(); - T cornerPoint = null; String cornerString = path.get(0); - cornerPoint = switch (cornerString) { + T cornerPoint = switch (cornerString) { case "top_left" -> bbox.topLeft(); case "bottom_right" -> bbox.bottomRight(); default -> throw new IllegalArgumentException("Found unknown path element [" + cornerString + "] in [" + getName() + "]"); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java index f74206c7af8b7..c6f4adc735c0c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.search.aggregations.metrics; +import org.elasticsearch.common.TriConsumer; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -19,6 +20,7 @@ import java.io.IOException; import java.util.Collections; +import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -243,8 +245,7 @@ public String getStdDeviationSamplingAsString() { return valueAsString(Metrics.std_deviation_sampling.name()); } - @Override - public String getStdDeviationBoundAsString(Bounds bound) { + private String getStdDeviationBoundAsString(Bounds bound) { return switch (bound) { case UPPER -> valueAsString(Metrics.std_upper.name()); case LOWER -> valueAsString(Metrics.std_lower.name()); @@ -337,6 +338,67 @@ static class Fields { public static final String LOWER_POPULATION = "lower_population"; public static final String UPPER_SAMPLING = "upper_sampling"; public static final String LOWER_SAMPLING = "lower_sampling"; + + static final Set OUTPUT_FORMAT = Set.of( + Metrics.count.name(), + Metrics.sum.name(), + Metrics.min.name(), + Metrics.max.name(), + Metrics.avg.name(), + SUM_OF_SQRS, + VARIANCE, + VARIANCE_POPULATION, + VARIANCE_SAMPLING, + STD_DEVIATION, + STD_DEVIATION_POPULATION, + STD_DEVIATION_SAMPLING, + STD_DEVIATION_BOUNDS + "." + UPPER, + STD_DEVIATION_BOUNDS + "." + LOWER, + STD_DEVIATION_BOUNDS + "." + UPPER_POPULATION, + STD_DEVIATION_BOUNDS + "." + LOWER_POPULATION, + STD_DEVIATION_BOUNDS + "." + UPPER_SAMPLING, + STD_DEVIATION_BOUNDS + "." + LOWER_SAMPLING + ); + } + + public Map asIndexableMap() { + if (count != 0) { + // NumberFieldMapper will invalidate non-finite doubles + TriConsumer, String, Double> putIfValidDouble = (map, key, value) -> { + if (Double.isFinite(value)) { + map.put(key, value); + } + }; + var extendedStatsMap = new HashMap(13); + extendedStatsMap.put(Metrics.count.name(), getCount()); + putIfValidDouble.apply(extendedStatsMap, Metrics.sum.name(), getSum()); + putIfValidDouble.apply(extendedStatsMap, Metrics.min.name(), getMin()); + putIfValidDouble.apply(extendedStatsMap, Metrics.max.name(), getMax()); + putIfValidDouble.apply(extendedStatsMap, Metrics.avg.name(), getAvg()); + + putIfValidDouble.apply(extendedStatsMap, Fields.SUM_OF_SQRS, sumOfSqrs); + putIfValidDouble.apply(extendedStatsMap, Fields.VARIANCE, getVariance()); + putIfValidDouble.apply(extendedStatsMap, Fields.VARIANCE_POPULATION, getVariancePopulation()); + putIfValidDouble.apply(extendedStatsMap, Fields.VARIANCE_SAMPLING, getVarianceSampling()); + putIfValidDouble.apply(extendedStatsMap, Fields.STD_DEVIATION, getStdDeviation()); + putIfValidDouble.apply(extendedStatsMap, Fields.STD_DEVIATION_POPULATION, getStdDeviationPopulation()); + putIfValidDouble.apply(extendedStatsMap, Fields.STD_DEVIATION_SAMPLING, getStdDeviationSampling()); + + var stdDevBounds = new HashMap(6); + putIfValidDouble.apply(stdDevBounds, Fields.UPPER, getStdDeviationBound(Bounds.UPPER)); + putIfValidDouble.apply(stdDevBounds, Fields.LOWER, getStdDeviationBound(Bounds.LOWER)); + putIfValidDouble.apply(stdDevBounds, Fields.UPPER_POPULATION, getStdDeviationBound(Bounds.UPPER_POPULATION)); + putIfValidDouble.apply(stdDevBounds, Fields.LOWER_POPULATION, getStdDeviationBound(Bounds.LOWER_POPULATION)); + putIfValidDouble.apply(stdDevBounds, Fields.UPPER_SAMPLING, getStdDeviationBound(Bounds.UPPER_SAMPLING)); + putIfValidDouble.apply(stdDevBounds, Fields.LOWER_SAMPLING, getStdDeviationBound(Bounds.LOWER_SAMPLING)); + if (stdDevBounds.isEmpty() == false) { + extendedStatsMap.put(Fields.STD_DEVIATION_BOUNDS, stdDevBounds); + } + + return extendedStatsMap; + } else { + return Map.of(); + } } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalNumericMetricsAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalNumericMetricsAggregation.java index 48adad3cee618..e537c7348da6f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalNumericMetricsAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalNumericMetricsAggregation.java @@ -90,15 +90,6 @@ protected MultiValue(StreamInput in) throws IOException { super(in); } - /** - * Read from a stream. - * - * @param readFormat whether to read the "format" field - */ - protected MultiValue(StreamInput in, boolean readFormat) throws IOException { - super(in, readFormat); - } - public abstract double value(String name); public String valueAsString(String name) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java index ac37b287736aa..2ec30b411928a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java @@ -662,7 +662,7 @@ public static TopHitsAggregationBuilder parse(String aggregationName, XContentPa factory.fetchSource(FetchSourceContext.fromXContent(parser)); } else if (SearchSourceBuilder.SCRIPT_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { List scriptFields = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { String scriptFieldName = parser.currentName(); token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { @@ -740,12 +740,12 @@ public static TopHitsAggregationBuilder parse(String aggregationName, XContentPa parser ); } else if (SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { FieldAndFormat ff = FieldAndFormat.fromXContent(parser); factory.docValueField(ff.field, ff.format); } } else if (SearchSourceBuilder.FETCH_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { FieldAndFormat ff = FieldAndFormat.fromXContent(parser); factory.fetchField(ff); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java index 32d0ae6596c85..3f7da293dfa14 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java @@ -68,8 +68,7 @@ public void writeTo(StreamOutput out) throws IOException { */ @Override public final PipelineAggregator create() { - PipelineAggregator aggregator = createInternal(this.metadata); - return aggregator; + return createInternal(this.metadata); } @SuppressWarnings("unchecked") diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsParser.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsParser.java index e372e328ec88e..8765657e8a4d3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsParser.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsParser.java @@ -56,7 +56,7 @@ public final BucketMetricsPipelineAggregationBuilder parse(String pipelineAgg } else if (token == XContentParser.Token.START_ARRAY) { if (BUCKETS_PATH.match(currentFieldName, parser.getDeprecationHandler())) { List paths = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { String path = parser.text(); paths.add(path); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java index 802aef5be68f3..1213b1a71761d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java @@ -28,8 +28,8 @@ public class InternalBucketMetricValue extends InternalNumericMetricsAggregation public static final String NAME = "bucket_metric_value"; static final ParseField KEYS_FIELD = new ParseField("keys"); - private double value; - private String[] keys; + private final double value; + private final String[] keys; public InternalBucketMetricValue(String name, String[] keys, double value, DocValueFormat formatter, Map metadata) { super(name, formatter, metadata); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java index 667e34d85b791..beb125608cbe4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java @@ -26,9 +26,9 @@ import java.util.Objects; public class InternalPercentilesBucket extends InternalNumericMetricsAggregation.MultiValue implements PercentilesBucket { - private double[] percentiles; - private double[] percents; - private boolean keyed = true; + private final double[] percentiles; + private final double[] percents; + private final boolean keyed; private final transient Map percentileLookups = new HashMap<>(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java index 8337d644c9a9b..86807e9772a2b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java @@ -21,7 +21,7 @@ public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAggregator { private final double[] percents; - private boolean keyed = true; + private final boolean keyed; private List data; PercentilesBucketPipelineAggregator( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java index 2537d79a40bf5..03b4867f6036b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java @@ -12,6 +12,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -169,11 +170,11 @@ public static SerialDiffPipelineAggregationBuilder parse(String reducerName, XCo } else if (token == XContentParser.Token.START_ARRAY) { if (BUCKETS_PATH.match(currentFieldName, parser.getDeprecationHandler())) { List paths = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { String path = parser.text(); paths.add(path); } - bucketsPaths = paths.toArray(new String[paths.size()]); + bucketsPaths = paths.toArray(Strings.EMPTY_ARRAY); } else { throw new ParsingException( parser.getTokenLocation(), diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java index c720f3d9465a3..02f300df48385 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java @@ -30,7 +30,6 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.DocCountFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.Rewriteable; @@ -309,14 +308,6 @@ public final AggregationUsageService getUsageService() { public abstract Set sourcePath(String fullName); - /** - * Returns the MappingLookup for the index, if one is initialized. - */ - @Nullable - public MappingLookup getMappingLookup() { - return null; - } - /** * Does this index have a {@code _doc_count} field in any segment? */ @@ -621,11 +612,6 @@ public Set sourcePath(String fullName) { return context.sourcePath(fullName); } - @Override - public MappingLookup getMappingLookup() { - return context.getMappingLookup(); - } - @Override public void close() { /* diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java index 31adf423d74c9..23ccf1d940849 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java @@ -19,8 +19,6 @@ public class AggregationUsageService implements ReportingService { private static final String ES_SEARCH_QUERY_AGGREGATIONS_TOTAL_COUNT = "es.search.query.aggregations.total"; - private final String AGGREGATION_NAME_KEY = "aggregation_name"; - private final String VALUES_SOURCE_KEY = "values_source"; private final LongCounter aggregationsUsageCounter; private final Map> aggs; private final AggregationInfo info; @@ -83,6 +81,8 @@ public void incAggregationUsage(String aggregationName, String valuesSourceType) } assert valuesSourceMap != null : "Unknown aggregation [" + aggregationName + "][" + valuesSourceType + "]"; // tests will have a no-op implementation here + String VALUES_SOURCE_KEY = "values_source"; + String AGGREGATION_NAME_KEY = "aggregation_name"; aggregationsUsageCounter.incrementBy(1, Map.of(AGGREGATION_NAME_KEY, aggregationName, VALUES_SOURCE_KEY, valuesSourceType)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParseHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParseHelper.java index ffdbfffbce9e9..3e74d163b0d9f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParseHelper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParseHelper.java @@ -57,10 +57,9 @@ public static void declareCommon( * @param timezoneAware - allows specifying timezone * @param filterable - allows specifying filters on the values * @param heterogeneous - allows specifying value-source specific format and user value type hint - * @param - values source type * @param - parser context */ - public static void declareField( + public static void declareField( String fieldName, AbstractObjectParser, T> objectParser, boolean scriptable, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java index 472619da78622..313f8e43014d0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java @@ -263,11 +263,7 @@ private boolean isInvalidDoc(int docId) throws IOException { // true if the TSID ord has changed since the last time we checked boolean shouldPop() throws IOException { - if (tsidOrd != tsids.ordValue()) { - return true; - } else { - return false; - } + return tsidOrd != tsids.ordValue(); } } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java index c47f815c18639..9f888c1f08baa 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java @@ -50,6 +50,8 @@ public class DefaultHighlighter implements Highlighter { + public static final String NAME = "unified"; + @Override public boolean canHighlight(MappedFieldType fieldType) { return true; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index c356c383d103a..54c265deb948d 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -66,7 +66,7 @@ public void process(HitContext hitContext) throws IOException { Map> contextBuilders = fieldContext.builders; for (String field : contextBuilders.keySet()) { FieldHighlightContext fieldContext = contextBuilders.get(field).apply(hitContext); - Highlighter highlighter = getHighlighter(fieldContext.field); + Highlighter highlighter = getHighlighter(fieldContext.field, fieldContext.fieldType); HighlightField highlightField = highlighter.highlight(fieldContext); if (highlightField != null) { // Note that we make sure to use the original field name in the response. This is because the @@ -80,10 +80,10 @@ public void process(HitContext hitContext) throws IOException { }; } - private Highlighter getHighlighter(SearchHighlightContext.Field field) { + private Highlighter getHighlighter(SearchHighlightContext.Field field, MappedFieldType fieldType) { String highlighterType = field.fieldOptions().highlighterType(); if (highlighterType == null) { - highlighterType = "unified"; + highlighterType = fieldType.getDefaultHighlighter(); } Highlighter highlighter = highlighters.get(highlighterType); if (highlighter == null) { @@ -103,8 +103,6 @@ private FieldContext contextBuilders( Map> builders = new LinkedHashMap<>(); StoredFieldsSpec storedFieldsSpec = StoredFieldsSpec.NO_REQUIREMENTS; for (SearchHighlightContext.Field field : highlightContext.fields()) { - Highlighter highlighter = getHighlighter(field); - Collection fieldNamesToHighlight = context.getSearchExecutionContext().getMatchingFieldNames(field.field()); boolean fieldNameContainsWildcards = field.field().contains("*"); @@ -112,6 +110,7 @@ private FieldContext contextBuilders( boolean sourceRequired = false; for (String fieldName : fieldNamesToHighlight) { MappedFieldType fieldType = context.getSearchExecutionContext().getFieldType(fieldName); + Highlighter highlighter = getHighlighter(field, fieldType); // We should prevent highlighting if a field is anything but a text, match_only_text, // or keyword field. diff --git a/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java index 8c4f912c5988c..5bad06d08f96b 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.Query; import org.apache.lucene.search.TotalHits; -import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; @@ -40,6 +39,7 @@ import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestionSearchContext; +import org.elasticsearch.tasks.CancellableTask; import java.util.List; @@ -422,12 +422,12 @@ public SearchExecutionContext getSearchExecutionContext() { } @Override - public void setTask(SearchShardTask task) { + public void setTask(CancellableTask task) { in.setTask(task); } @Override - public SearchShardTask getTask() { + public CancellableTask getTask() { return in.getTask(); } diff --git a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 7da71b77c6a6f..5a8c280e12f85 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.Query; import org.apache.lucene.search.TotalHits; -import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.core.Assertions; import org.elasticsearch.core.Nullable; @@ -48,6 +47,7 @@ import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestionSearchContext; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.transport.LeakTracker; import java.io.IOException; @@ -90,7 +90,7 @@ public final List getCancellationChecks() { if (lowLevelCancellation()) { // This searching doesn't live beyond this phase, so we don't need to remove query cancellation Runnable c = () -> { - final SearchShardTask task = getTask(); + final CancellableTask task = getTask(); if (task != null) { task.ensureNotCancelled(); } @@ -100,9 +100,9 @@ public final List getCancellationChecks() { return timeoutRunnable == null ? List.of() : List.of(timeoutRunnable); } - public abstract void setTask(SearchShardTask task); + public abstract void setTask(CancellableTask task); - public abstract SearchShardTask getTask(); + public abstract CancellableTask getTask(); public abstract boolean isCancelled(); diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 3036a295d459a..8ad52c4f9bb59 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -126,7 +126,15 @@ static void executeRank(SearchContext searchContext) throws QueryPhaseExecutionE static void executeQuery(SearchContext searchContext) throws QueryPhaseExecutionException { if (searchContext.hasOnlySuggest()) { - SuggestPhase.execute(searchContext); + try { + SuggestPhase.execute(searchContext); + } catch (ContextIndexSearcher.TimeExceededException timeExceededException) { + SearchTimeoutException.handleTimeout( + searchContext.request().allowPartialSearchResults(), + searchContext.shardTarget(), + searchContext.queryResult() + ); + } searchContext.queryResult().topDocs(new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN), new DocValueFormat[0]); return; } @@ -142,11 +150,18 @@ static void executeQuery(SearchContext searchContext) throws QueryPhaseExecution addCollectorsAndSearch(searchContext); - RescorePhase.execute(searchContext); - SuggestPhase.execute(searchContext); - - if (searchContext.getProfilers() != null) { - searchContext.queryResult().profileResults(searchContext.getProfilers().buildQueryPhaseResults()); + try { + RescorePhase.execute(searchContext); + SuggestPhase.execute(searchContext); + if (searchContext.getProfilers() != null) { + searchContext.queryResult().profileResults(searchContext.getProfilers().buildQueryPhaseResults()); + } + } catch (ContextIndexSearcher.TimeExceededException timeExceededException) { + SearchTimeoutException.handleTimeout( + searchContext.request().allowPartialSearchResults(), + searchContext.shardTarget(), + searchContext.queryResult() + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/query/SearchTimeoutException.java b/server/src/main/java/org/elasticsearch/search/query/SearchTimeoutException.java index e006f176ff91a..e5caa00537c67 100644 --- a/server/src/main/java/org/elasticsearch/search/query/SearchTimeoutException.java +++ b/server/src/main/java/org/elasticsearch/search/query/SearchTimeoutException.java @@ -31,7 +31,7 @@ public SearchTimeoutException(StreamInput in) throws IOException { @Override public RestStatus status() { - return RestStatus.GATEWAY_TIMEOUT; + return RestStatus.TOO_MANY_REQUESTS; } /** diff --git a/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java b/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java index ad70e7d39aff8..951a9b0cf3520 100644 --- a/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.Query; import org.apache.lucene.search.TotalHits; -import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; @@ -48,6 +47,7 @@ import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestionSearchContext; +import org.elasticsearch.tasks.CancellableTask; import java.util.List; @@ -211,12 +211,12 @@ public long getRelativeTimeInMillis() { /* ---- ALL METHODS ARE UNSUPPORTED BEYOND HERE ---- */ @Override - public void setTask(SearchShardTask task) { + public void setTask(CancellableTask task) { throw new UnsupportedOperationException(); } @Override - public SearchShardTask getTask() { + public CancellableTask getTask() { throw new UnsupportedOperationException(); } diff --git a/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java b/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java index f8b348b383f01..7223da3c6101b 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java @@ -18,9 +18,7 @@ import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.util.Maps; import org.elasticsearch.lucene.grouping.TopFieldGroups; -import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.query.SearchTimeoutException; import org.elasticsearch.search.sort.ShardDocSortField; import org.elasticsearch.search.sort.SortAndFormats; @@ -72,7 +70,7 @@ public static void execute(SearchContext context) { assert topDocsSortedByScore(topDocs) : "topdocs should be sorted after rescore"; ctx.setCancellationChecker(null); } - /** + /* * Since rescorers are building top docs with score only, we must reconstruct the {@link TopFieldGroups} * or {@link TopFieldDocs} using their original version before rescoring. */ @@ -86,12 +84,6 @@ public static void execute(SearchContext context) { .topDocs(new TopDocsAndMaxScore(topDocs, topDocs.scoreDocs[0].score), context.queryResult().sortValueFormats()); } catch (IOException e) { throw new ElasticsearchException("Rescore Phase Failed", e); - } catch (ContextIndexSearcher.TimeExceededException e) { - SearchTimeoutException.handleTimeout( - context.request().allowPartialSearchResults(), - context.shardTarget(), - context.queryResult() - ); } } diff --git a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java index 8403031bc65f5..0bb5fd849bbcf 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java @@ -192,8 +192,13 @@ public void onFailure(Exception e) { } }); }); - - return new RankDocsRetrieverBuilder(rankWindowSize, newRetrievers.stream().map(s -> s.retriever).toList(), results::get); + RankDocsRetrieverBuilder rankDocsRetrieverBuilder = new RankDocsRetrieverBuilder( + rankWindowSize, + newRetrievers.stream().map(s -> s.retriever).toList(), + results::get + ); + rankDocsRetrieverBuilder.retrieverName(retrieverName()); + return rankDocsRetrieverBuilder; } @Override @@ -219,7 +224,8 @@ public ActionRequestValidationException validate( boolean allowPartialSearchResults ) { validationException = super.validate(source, validationException, isScroll, allowPartialSearchResults); - if (source.size() > rankWindowSize) { + final int size = source.size(); + if (size > rankWindowSize) { validationException = addValidationError( String.format( Locale.ROOT, @@ -227,7 +233,7 @@ public ActionRequestValidationException validate( getName(), getRankWindowSizeField().getPreferredName(), rankWindowSize, - source.size() + size ), validationException ); diff --git a/server/src/main/java/org/elasticsearch/search/retriever/RankDocsRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/RankDocsRetrieverBuilder.java index 4d3f3fefd4462..a77f5327fbc26 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/RankDocsRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/RankDocsRetrieverBuilder.java @@ -90,11 +90,13 @@ public QueryBuilder topDocsQuery() { @Override public QueryBuilder explainQuery() { - return new RankDocsQueryBuilder( + var explainQuery = new RankDocsQueryBuilder( rankDocs.get(), sources.stream().map(RetrieverBuilder::explainQuery).toArray(QueryBuilder[]::new), true ); + explainQuery.queryName(retrieverName()); + return explainQuery; } @Override @@ -123,8 +125,12 @@ public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder } else { rankQuery = new RankDocsQueryBuilder(rankDocResults, null, false); } + rankQuery.queryName(retrieverName()); // ignore prefilters of this level, they were already propagated to children searchSourceBuilder.query(rankQuery); + if (searchSourceBuilder.size() < 0) { + searchSourceBuilder.size(rankWindowSize); + } if (sourceHasMinScore()) { searchSourceBuilder.minScore(this.minScore() == null ? Float.MIN_VALUE : this.minScore()); } diff --git a/server/src/main/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilder.java index 4531beef7125d..83a331fd81b54 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilder.java @@ -144,6 +144,7 @@ public void doToXContent(XContentBuilder builder, Params params) throws IOExcept protected RescorerRetrieverBuilder clone(List newChildRetrievers, List newPreFilterQueryBuilders) { var newInstance = new RescorerRetrieverBuilder(newChildRetrievers.get(0), rescorers); newInstance.preFilterQueryBuilders = newPreFilterQueryBuilders; + newInstance.retrieverName = retrieverName; return newInstance; } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java b/server/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java index d63e0717ca7ac..272855bacd544 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java @@ -56,5 +56,4 @@ public static void execute(SearchContext context) { throw new ElasticsearchException("I/O exception during suggest phase", e); } } - } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 008c75ed13473..5d8ef51af8d51 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -3885,6 +3885,11 @@ public void onFailure(Exception e) { logSnapshotFailure("create", snapshot, e); listener.onFailure(e); } + + @Override + public String toString() { + return "CreateSnapshotTask{repository=" + repository.getMetadata().name() + ", snapshot=" + snapshot + '}'; + } } private static void logSnapshotFailure(String operation, Snapshot snapshot, Exception e) { diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java index 8399f5dd72f7d..95e507f70d7a9 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java @@ -157,15 +157,15 @@ protected Map> groupClusterIndices(Set remoteCluste if (indexName.equals("*") == false) { throw new IllegalArgumentException( Strings.format( - "To exclude a cluster you must specify the '*' wildcard for " + "the index expression, but found: [%s]", + "To exclude a cluster you must specify the '*' wildcard for the index expression, but found: [%s]", indexName ) ); } - if (selectorString != null && selectorString.equals("*") == false) { + if (selectorString != null) { throw new IllegalArgumentException( Strings.format( - "To exclude a cluster you must specify the '::*' selector or leave it off, but found: [%s]", + "To exclude a cluster you must not specify the a selector, but found selector: [%s]", selectorString ) ); diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java index 97520e8b939a6..8bc5771485f6d 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java @@ -13,6 +13,8 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.Nullable; @@ -27,10 +29,15 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicLong; +import static org.elasticsearch.transport.RemoteClusterPortSettings.REMOTE_CLUSTER_PROFILE; import static org.elasticsearch.transport.RemoteClusterService.REMOTE_CLUSTER_HANDSHAKE_ACTION_NAME; public class RemoteConnectionManager implements ConnectionManager { + private static final Logger logger = LogManager.getLogger(RemoteConnectionManager.class); + + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RemoteConnectionManager.class); + private final String clusterAlias; private final RemoteClusterCredentialsManager credentialsManager; private final ConnectionManager delegate; @@ -45,6 +52,12 @@ public class RemoteConnectionManager implements ConnectionManager { @Override public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { addConnectedNode(node); + try { + // called when a node is successfully connected through a proxy connection + maybeLogDeprecationWarning(wrapConnectionWithRemoteClusterInfo(connection, clusterAlias, credentialsManager)); + } catch (Exception e) { + logger.warn("Failed to log deprecation warning.", e); + } } @Override @@ -102,11 +115,28 @@ public void openConnection(DiscoveryNode node, @Nullable ConnectionProfile profi node, profile, listener.delegateFailureAndWrap( - (l, connection) -> l.onResponse(wrapConnectionWithRemoteClusterInfo(connection, clusterAlias, credentialsManager)) + (l, connection) -> l.onResponse( + maybeLogDeprecationWarning(wrapConnectionWithRemoteClusterInfo(connection, clusterAlias, credentialsManager)) + ) ) ); } + private InternalRemoteConnection maybeLogDeprecationWarning(InternalRemoteConnection connection) { + if (connection.getClusterCredentials() == null + && (false == REMOTE_CLUSTER_PROFILE.equals(this.getConnectionProfile().getTransportProfile()))) { + deprecationLogger.warn( + DeprecationCategory.SECURITY, + "remote_cluster_certificate_access-" + connection.getClusterAlias(), + "The remote cluster connection to [{}] is using the certificate-based security model. " + + "The certificate-based security model is deprecated and will be removed in a future major version. " + + "Migrate the remote cluster from the certificate-based to the API key-based security model.", + connection.getClusterAlias() + ); + } + return connection; + } + @Override public Transport.Connection getConnection(DiscoveryNode node) { try { diff --git a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java index 1a9043d093feb..eb4e0394bb5a2 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java @@ -11,7 +11,6 @@ import org.elasticsearch.Build; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.bytes.BytesReference; @@ -19,8 +18,11 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.metrics.CounterMetric; +import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.threadpool.ThreadPool; import java.io.EOFException; @@ -44,49 +46,17 @@ final class TransportHandshaker { * ignores the body of the request. After the handshake, the OutboundHandler uses the min(local,remote) protocol version for all later * messages. * - * This version supports three handshake protocols, v6080099, v7170099 and v8800000, which respectively have the same message structure - * as the transport protocols of v6.8.0, v7.17.0, and v8.18.0. This node only sends v7170099 requests, but it can send a valid response - * to any v6080099 or v8800000 requests that it receives. + * This version supports two handshake protocols, v7170099 and v8800000, which respectively have the same message structure as the + * transport protocols of v7.17.0, and v8.18.0. This node only sends v8800000 requests, but it can send a valid response to any v7170099 + * requests that it receives. * * Note that these are not really TransportVersion constants as used elsewhere in ES, they're independent things that just happen to be - * stored in the same location in the message header and which roughly match the same ID numbering scheme. Older versions of ES did - * rely on them matching the real transport protocol (which itself matched the release version numbers), but these days that's no longer + * stored in the same location in the message header and which roughly match the same ID numbering scheme. Older versions of ES did rely + * on them matching the real transport protocol (which itself matched the release version numbers), but these days that's no longer * true. * * Here are some example messages, broken down to show their structure. See TransportHandshakerRawMessageTests for supporting tests. * - * ## v6080099 Request: - * - * 45 53 -- 'ES' marker - * 00 00 00 34 -- total message length - * 00 00 00 00 00 00 00 01 -- request ID - * 08 -- status flags (0b1000 == handshake request) - * 00 5c c6 63 -- handshake protocol version (0x5cc663 == 6080099) - * 00 -- no request headers [1] - * 00 -- no response headers [1] - * 01 -- one feature [2] - * 06 -- feature name length - * 78 2d 70 61 63 6b -- feature name 'x-pack' - * 16 -- action string size - * 69 6e 74 65 72 6e 61 6c } - * 3a 74 63 70 2f 68 61 6e }- ASCII representation of HANDSHAKE_ACTION_NAME - * 64 73 68 61 6b 65 } - * 00 -- no parent task ID [3] - * 04 -- payload length - * 8b d5 b5 03 -- max acceptable protocol version (vInt: 00000011 10110101 11010101 10001011 == 7170699) - * - * ## v6080099 Response: - * - * 45 53 -- 'ES' marker - * 00 00 00 13 -- total message length - * 00 00 00 00 00 00 00 01 -- request ID (copied from request) - * 09 -- status flags (0b1001 == handshake response) - * 00 5c c6 63 -- handshake protocol version (0x5cc663 == 6080099, copied from request) - * 00 -- no request headers [1] - * 00 -- no response headers [1] - * c3 f9 eb 03 -- max acceptable protocol version (vInt: 00000011 11101011 11111001 11000011 == 8060099) - * - * * ## v7170099 Requests: * * 45 53 -- 'ES' marker @@ -158,14 +128,11 @@ final class TransportHandshaker { * [3] Parent task ID should be empty; see org.elasticsearch.tasks.TaskId.writeTo for its structure. */ - static final TransportVersion V7_HANDSHAKE_VERSION = TransportVersion.fromId(6_08_00_99); + private static final Logger logger = LogManager.getLogger(TransportHandshaker.class); + static final TransportVersion V8_HANDSHAKE_VERSION = TransportVersion.fromId(7_17_00_99); static final TransportVersion V9_HANDSHAKE_VERSION = TransportVersion.fromId(8_800_00_0); - static final Set ALLOWED_HANDSHAKE_VERSIONS = Set.of( - V7_HANDSHAKE_VERSION, - V8_HANDSHAKE_VERSION, - V9_HANDSHAKE_VERSION - ); + static final Set ALLOWED_HANDSHAKE_VERSIONS = Set.of(V8_HANDSHAKE_VERSION, V9_HANDSHAKE_VERSION); static final String HANDSHAKE_ACTION_NAME = "internal:tcp/handshake"; private final ConcurrentMap pendingHandshakes = new ConcurrentHashMap<>(); @@ -196,14 +163,14 @@ void sendHandshake( ActionListener listener ) { numHandshakes.inc(); - final HandshakeResponseHandler handler = new HandshakeResponseHandler(requestId, listener); + final HandshakeResponseHandler handler = new HandshakeResponseHandler(requestId, channel, listener); pendingHandshakes.put(requestId, handler); channel.addCloseListener( ActionListener.running(() -> handler.handleLocalException(new TransportException("handshake failed because connection reset"))) ); boolean success = false; try { - handshakeRequestSender.sendRequest(node, channel, requestId, V8_HANDSHAKE_VERSION); + handshakeRequestSender.sendRequest(node, channel, requestId, V9_HANDSHAKE_VERSION); threadPool.schedule( () -> handler.handleLocalException(new ConnectTransportException(node, "handshake_timeout[" + timeout + "]")), @@ -222,9 +189,9 @@ void sendHandshake( } void handleHandshake(TransportChannel channel, long requestId, StreamInput stream) throws IOException { + final HandshakeRequest handshakeRequest; try { - // Must read the handshake request to exhaust the stream - new HandshakeRequest(stream); + handshakeRequest = new HandshakeRequest(stream); } catch (Exception e) { assert ignoreDeserializationErrors : e; throw e; @@ -243,9 +210,44 @@ void handleHandshake(TransportChannel channel, long requestId, StreamInput strea assert ignoreDeserializationErrors : exception; throw exception; } + ensureCompatibleVersion(version, handshakeRequest.transportVersion, handshakeRequest.releaseVersion, channel); channel.sendResponse(new HandshakeResponse(this.version, Build.current().version())); } + static void ensureCompatibleVersion( + TransportVersion localTransportVersion, + TransportVersion remoteTransportVersion, + String releaseVersion, + Object channel + ) { + if (TransportVersion.isCompatible(remoteTransportVersion)) { + if (remoteTransportVersion.onOrAfter(localTransportVersion)) { + // Remote is newer than us, so we will be using our transport protocol and it's up to the other end to decide whether it + // knows how to do that. + return; + } + if (remoteTransportVersion.isKnown()) { + // Remote is older than us, so we will be using its transport protocol, which we can only do if and only if its protocol + // version is known to us. + return; + } + } + + final var message = Strings.format( + """ + Rejecting unreadable transport handshake from remote node with version [%s/%s] received on [%s] since this node has \ + version [%s/%s] which has an incompatible wire format.""", + releaseVersion, + remoteTransportVersion, + channel, + Build.current().version(), + localTransportVersion + ); + logger.warn(message); + throw new IllegalStateException(message); + + } + TransportResponseHandler removeHandlerForHandshake(long requestId) { return pendingHandshakes.remove(requestId); } @@ -261,11 +263,13 @@ long getNumHandshakes() { private class HandshakeResponseHandler implements TransportResponseHandler { private final long requestId; + private final TcpChannel channel; private final ActionListener listener; private final AtomicBoolean isDone = new AtomicBoolean(false); - private HandshakeResponseHandler(long requestId, ActionListener listener) { + private HandshakeResponseHandler(long requestId, TcpChannel channel, ActionListener listener) { this.requestId = requestId; + this.channel = channel; this.listener = listener; } @@ -282,20 +286,13 @@ public Executor executor() { @Override public void handleResponse(HandshakeResponse response) { if (isDone.compareAndSet(false, true)) { - TransportVersion responseVersion = response.transportVersion; - if (TransportVersion.isCompatible(responseVersion) == false) { - listener.onFailure( - new IllegalStateException( - "Received message from unsupported version: [" - + responseVersion - + "] minimal compatible version is: [" - + TransportVersions.MINIMUM_COMPATIBLE - + "]" - ) - ); - } else { - listener.onResponse(TransportVersion.min(TransportHandshaker.this.version, response.getTransportVersion())); - } + ActionListener.completeWith(listener, () -> { + ensureCompatibleVersion(version, response.getTransportVersion(), response.getReleaseVersion(), channel); + final var resultVersion = TransportVersion.min(TransportHandshaker.this.version, response.getTransportVersion()); + assert TransportVersion.current().before(version) // simulating a newer-version transport service for test purposes + || resultVersion.isKnown() : "negotiated unknown version " + resultVersion; + return resultVersion; + }); } } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index 0fb767c5789f9..b546b8cdd0f5f 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -83,7 +83,7 @@ public class TransportService extends AbstractLifecycleComponent /** * A feature flag enabling transport upgrades for serverless. */ - private static final String SERVERLESS_TRANSPORT_SYSTEM_PROPERTY = "es.serverless_transport"; + static final String SERVERLESS_TRANSPORT_SYSTEM_PROPERTY = "es.serverless_transport"; private static final boolean SERVERLESS_TRANSPORT_FEATURE_FLAG = Booleans.parseBoolean( System.getProperty(SERVERLESS_TRANSPORT_SYSTEM_PROPERTY), false diff --git a/server/src/main/java/org/elasticsearch/transport/TransportStats.java b/server/src/main/java/org/elasticsearch/transport/TransportStats.java index 46b161b01e9f3..2c06cd759e3a3 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportStats.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportStats.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -70,18 +69,17 @@ public TransportStats(StreamInput in) throws IOException { rxSize = in.readVLong(); txCount = in.readVLong(); txSize = in.readVLong(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0) && in.readBoolean()) { - inboundHandlingTimeBucketFrequencies = new long[HandlingTimeTracker.BUCKET_COUNT]; - for (int i = 0; i < inboundHandlingTimeBucketFrequencies.length; i++) { - inboundHandlingTimeBucketFrequencies[i] = in.readVLong(); - } - outboundHandlingTimeBucketFrequencies = new long[HandlingTimeTracker.BUCKET_COUNT]; - for (int i = 0; i < inboundHandlingTimeBucketFrequencies.length; i++) { - outboundHandlingTimeBucketFrequencies[i] = in.readVLong(); - } - } else { - inboundHandlingTimeBucketFrequencies = new long[0]; - outboundHandlingTimeBucketFrequencies = new long[0]; + if (in.getTransportVersion().before(TransportVersions.TRANSPORT_STATS_HANDLING_TIME_REQUIRED) + && in.getTransportVersion().isPatchFrom(TransportVersions.TRANSPORT_STATS_HANDLING_TIME_REQUIRED_90) == false) { + in.readBoolean(); + } + inboundHandlingTimeBucketFrequencies = new long[HandlingTimeTracker.BUCKET_COUNT]; + for (int i = 0; i < inboundHandlingTimeBucketFrequencies.length; i++) { + inboundHandlingTimeBucketFrequencies[i] = in.readVLong(); + } + outboundHandlingTimeBucketFrequencies = new long[HandlingTimeTracker.BUCKET_COUNT]; + for (int i = 0; i < inboundHandlingTimeBucketFrequencies.length; i++) { + outboundHandlingTimeBucketFrequencies[i] = in.readVLong(); } if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { transportActionStats = Collections.unmodifiableMap(in.readOrderedMap(StreamInput::readString, TransportActionStats::new)); @@ -99,15 +97,17 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(rxSize); out.writeVLong(txCount); out.writeVLong(txSize); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) { - assert (inboundHandlingTimeBucketFrequencies.length > 0) == (outboundHandlingTimeBucketFrequencies.length > 0); - out.writeBoolean(inboundHandlingTimeBucketFrequencies.length > 0); - for (long handlingTimeBucketFrequency : inboundHandlingTimeBucketFrequencies) { - out.writeVLong(handlingTimeBucketFrequency); - } - for (long handlingTimeBucketFrequency : outboundHandlingTimeBucketFrequencies) { - out.writeVLong(handlingTimeBucketFrequency); - } + assert inboundHandlingTimeBucketFrequencies.length == HandlingTimeTracker.BUCKET_COUNT; + assert outboundHandlingTimeBucketFrequencies.length == HandlingTimeTracker.BUCKET_COUNT; + if (out.getTransportVersion().before(TransportVersions.TRANSPORT_STATS_HANDLING_TIME_REQUIRED) + && out.getTransportVersion().isPatchFrom(TransportVersions.TRANSPORT_STATS_HANDLING_TIME_REQUIRED_90) == false) { + out.writeBoolean(true); + } + for (long handlingTimeBucketFrequency : inboundHandlingTimeBucketFrequencies) { + out.writeVLong(handlingTimeBucketFrequency); + } + for (long handlingTimeBucketFrequency : outboundHandlingTimeBucketFrequencies) { + out.writeVLong(handlingTimeBucketFrequency); } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { out.writeMap(transportActionStats, StreamOutput::writeWriteable); @@ -166,24 +166,13 @@ public Map getTransportActionStats() { return transportActionStats; } - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) - // Review and simplify the if-else blocks containing this symbol once v9 is released - private static final boolean IMPOSSIBLE_IN_V9 = true; - private boolean assertHistogramsConsistent() { assert inboundHandlingTimeBucketFrequencies.length == outboundHandlingTimeBucketFrequencies.length; - if (inboundHandlingTimeBucketFrequencies.length == 0) { - // Stats came from before v8.1 - assert IMPOSSIBLE_IN_V9; - } else { - assert inboundHandlingTimeBucketFrequencies.length == HandlingTimeTracker.BUCKET_COUNT; - } + assert inboundHandlingTimeBucketFrequencies.length == HandlingTimeTracker.BUCKET_COUNT; return true; } @Override - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) - // review the "if" blocks checking for non-empty once we have public Iterator toXContentChunked(ToXContent.Params outerParams) { return Iterators.concat(Iterators.single((builder, params) -> { builder.startObject(Fields.TRANSPORT); @@ -193,19 +182,10 @@ public Iterator toXContentChunked(ToXContent.Params outerP builder.humanReadableField(Fields.RX_SIZE_IN_BYTES, Fields.RX_SIZE, ByteSizeValue.ofBytes(rxSize)); builder.field(Fields.TX_COUNT, txCount); builder.humanReadableField(Fields.TX_SIZE_IN_BYTES, Fields.TX_SIZE, ByteSizeValue.ofBytes(txSize)); - if (inboundHandlingTimeBucketFrequencies.length > 0) { - histogramToXContent(builder, inboundHandlingTimeBucketFrequencies, Fields.INBOUND_HANDLING_TIME_HISTOGRAM); - histogramToXContent(builder, outboundHandlingTimeBucketFrequencies, Fields.OUTBOUND_HANDLING_TIME_HISTOGRAM); - } else { - // Stats came from before v8.1 - assert IMPOSSIBLE_IN_V9; - } - if (transportActionStats.isEmpty() == false) { - builder.startObject(Fields.ACTIONS); - } else { - // Stats came from before v8.8 - assert IMPOSSIBLE_IN_V9; - } + assert inboundHandlingTimeBucketFrequencies.length > 0; + histogramToXContent(builder, inboundHandlingTimeBucketFrequencies, Fields.INBOUND_HANDLING_TIME_HISTOGRAM); + histogramToXContent(builder, outboundHandlingTimeBucketFrequencies, Fields.OUTBOUND_HANDLING_TIME_HISTOGRAM); + builder.startObject(Fields.ACTIONS); return builder; }), @@ -215,12 +195,7 @@ public Iterator toXContentChunked(ToXContent.Params outerP return builder; }), - Iterators.single((builder, params) -> { - if (transportActionStats.isEmpty() == false) { - builder.endObject(); - } - return builder.endObject(); - }) + Iterators.single((builder, params) -> { return builder.endObject().endObject(); }) ); } diff --git a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java index 186618f3662fb..9947606470178 100644 --- a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java +++ b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java @@ -15,7 +15,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; +import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsClusterStateUpdateRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -32,7 +34,6 @@ import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.cluster.metadata.MetadataUpdateSettingsService; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -59,6 +60,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.action.admin.cluster.migration.TransportGetFeatureUpgradeStatusAction.NO_UPGRADE_REQUIRED_INDEX_VERSION; +import static org.elasticsearch.cluster.metadata.IndexMetadata.APIBlock.WRITE; import static org.elasticsearch.cluster.metadata.IndexMetadata.State.CLOSE; import static org.elasticsearch.core.Strings.format; @@ -205,49 +207,14 @@ public void run(SystemIndexMigrationTaskState taskState) { } // Kick off our callback "loop" - finishIndexAndLoop calls back into prepareNextIndex - cleanUpPreviousMigration( - taskState, - clusterState, - state -> prepareNextIndex(state, state2 -> migrateSingleIndex(state2, this::finishIndexAndLoop), stateFeatureName) - ); - } - - private void cleanUpPreviousMigration( - SystemIndexMigrationTaskState taskState, - ClusterState currentState, - Consumer listener - ) { logger.debug("cleaning up previous migration, task state: [{}]", taskState == null ? "null" : Strings.toString(taskState)); - if (taskState != null && taskState.getCurrentIndex() != null) { - SystemIndexMigrationInfo migrationInfo; - try { - migrationInfo = SystemIndexMigrationInfo.fromTaskState( - taskState, - systemIndices, - currentState.metadata(), - indexScopedSettings - ); - } catch (Exception e) { - markAsFailed(e); - return; - } - final String newIndexName = migrationInfo.getNextIndexName(); - logger.info("removing index [{}] from previous incomplete migration", newIndexName); - - migrationInfo.createClient(baseClient) - .admin() - .indices() - .prepareDelete(newIndexName) - .execute(ActionListener.wrap(ackedResponse -> { - if (ackedResponse.isAcknowledged()) { - logger.debug("successfully removed index [{}]", newIndexName); - clearResults(clusterService, ActionListener.wrap(listener::accept, this::markAsFailed)); - } - }, this::markAsFailed)); - } else { - logger.debug("no incomplete index to remove"); - clearResults(clusterService, ActionListener.wrap(listener::accept, this::markAsFailed)); - } + clearResults( + clusterService, + ActionListener.wrap( + state -> prepareNextIndex(state2 -> migrateSingleIndex(state2, this::finishIndexAndLoop), stateFeatureName), + this::markAsFailed + ) + ); } private void finishIndexAndLoop(BulkByScrollResponse bulkResponse) { @@ -287,11 +254,7 @@ private void finishIndexAndLoop(BulkByScrollResponse bulkResponse) { }, this::markAsFailed) ); } else { - prepareNextIndex( - clusterService.state(), - state2 -> migrateSingleIndex(state2, this::finishIndexAndLoop), - lastMigrationInfo.getFeatureName() - ); + prepareNextIndex(state2 -> migrateSingleIndex(state2, this::finishIndexAndLoop), lastMigrationInfo.getFeatureName()); } } @@ -301,7 +264,6 @@ private void recordIndexMigrationSuccess(SystemIndexMigrationInfo lastMigrationI SingleFeatureMigrationResult.success(), ActionListener.wrap(state -> { prepareNextIndex( - state, clusterState -> migrateSingleIndex(clusterState, this::finishIndexAndLoop), lastMigrationInfo.getFeatureName() ); @@ -310,7 +272,7 @@ private void recordIndexMigrationSuccess(SystemIndexMigrationInfo lastMigrationI updateTask.submit(clusterService); } - private void prepareNextIndex(ClusterState clusterState, Consumer listener, String lastFeatureName) { + private void prepareNextIndex(Consumer listener, String lastFeatureName) { synchronized (migrationQueue) { assert migrationQueue != null; if (migrationQueue.isEmpty()) { @@ -422,7 +384,7 @@ private void migrateSingleIndex(ClusterState clusterState, Consumer innerListener = ActionListener.wrap(listener::accept, this::markAsFailed); try { - createIndex(migrationInfo, innerListener.delegateFailureAndWrap((delegate, shardsAcknowledgedResponse) -> { + createIndexRetryOnFailure(migrationInfo, innerListener.delegateFailureAndWrap((delegate, shardsAcknowledgedResponse) -> { logger.debug( "while migrating [{}] , got create index response: [{}]", oldIndexName, @@ -448,12 +410,33 @@ private void migrateSingleIndex(ClusterState clusterState, Consumer { + if (aliasesResponse.hasErrors()) { + var e = new ElasticsearchException("Aliases request had errors"); + for (var error : aliasesResponse.getErrors()) { + e.addSuppressed(error); + } + throw e; + } + logger.info( + "Successfully migrated old index [{}] to new index [{}] from feature [{}]", + oldIndexName, + migrationInfo.getNextIndexName(), + migrationInfo.getFeatureName() + ); + delegate2.onResponse(bulkByScrollResponse); + }, e -> { + logger.error( + () -> format( + "An error occurred while changing aliases and removing the old index [%s] from feature [%s]", + oldIndexName, + migrationInfo.getFeatureName() + ), + e + ); + removeReadOnlyBlockOnReindexFailure(oldIndex, delegate2, e); + })); } }, e -> { logger.error( @@ -486,6 +469,8 @@ private void migrateSingleIndex(ClusterState clusterState, Consumer listener) { + logger.info("creating new system index [{}] from feature [{}]", migrationInfo.getNextIndexName(), migrationInfo.getFeatureName()); + final CreateIndexClusterStateUpdateRequest createRequest = new CreateIndexClusterStateUpdateRequest( "migrate-system-index", migrationInfo.getNextIndexName(), @@ -511,10 +496,36 @@ private void createIndex(SystemIndexMigrationInfo migrationInfo, ActionListener< ); } - private CheckedBiConsumer, AcknowledgedResponse, Exception> setAliasAndRemoveOldIndex( - SystemIndexMigrationInfo migrationInfo, - BulkByScrollResponse bulkByScrollResponse - ) { + private void createIndexRetryOnFailure(SystemIndexMigrationInfo migrationInfo, ActionListener listener) { + createIndex(migrationInfo, listener.delegateResponse((l, e) -> { + logger.warn("createIndex failed, retrying after removing index [{}] from previous attempt", migrationInfo.getNextIndexName()); + deleteIndex(migrationInfo, ActionListener.wrap(cleanupResponse -> createIndex(migrationInfo, l.delegateResponse((l3, e3) -> { + logger.error( + "createIndex failed after retrying, aborting system index migration. index: " + migrationInfo.getNextIndexName(), + e3 + ); + l.onFailure(e3); + })), e2 -> { + logger.error("deleteIndex failed, aborting system index migration. index: " + migrationInfo.getNextIndexName(), e2); + l.onFailure(e2); + })); + })); + } + + private void deleteIndex(SystemIndexMigrationInfo migrationInfo, ActionListener listener) { + logger.info("removing index [{}] from feature [{}]", migrationInfo.getNextIndexName(), migrationInfo.getFeatureName()); + String newIndexName = migrationInfo.getNextIndexName(); + baseClient.admin().indices().prepareDelete(newIndexName).execute(ActionListener.wrap(ackedResponse -> { + if (ackedResponse.isAcknowledged()) { + logger.info("successfully removed index [{}]", newIndexName); + listener.onResponse(ackedResponse); + } else { + listener.onFailure(new ElasticsearchException("Failed to acknowledge index deletion for [" + newIndexName + "]")); + } + }, listener::onFailure)); + } + + private void setAliasAndRemoveOldIndex(SystemIndexMigrationInfo migrationInfo, ActionListener listener) { final IndicesAliasesRequestBuilder aliasesRequest = migrationInfo.createClient(baseClient).admin().indices().prepareAliases(); aliasesRequest.removeIndex(migrationInfo.getCurrentIndexName()); aliasesRequest.addAlias(migrationInfo.getNextIndexName(), migrationInfo.getCurrentIndexName()); @@ -533,30 +544,42 @@ private CheckedBiConsumer, AcknowledgedResp ); }); - // Technically this callback might have a different cluster state, but it shouldn't matter - these indices shouldn't be changing - // while we're trying to migrate them. - return (listener, unsetReadOnlyResponse) -> aliasesRequest.execute( - listener.delegateFailureAndWrap((l, deleteIndexResponse) -> l.onResponse(bulkByScrollResponse)) - ); + aliasesRequest.execute(listener); } /** - * Makes the index readonly if it's not set as a readonly yet + * Sets the write block on the index to the given value. */ private void setWriteBlock(Index index, boolean readOnlyValue, ActionListener listener) { - final Settings readOnlySettings = Settings.builder().put(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey(), readOnlyValue).build(); - - metadataUpdateSettingsService.updateSettings( - new UpdateSettingsClusterStateUpdateRequest( - MasterNodeRequest.INFINITE_MASTER_NODE_TIMEOUT, - TimeValue.ZERO, - readOnlySettings, - UpdateSettingsClusterStateUpdateRequest.OnExisting.OVERWRITE, - UpdateSettingsClusterStateUpdateRequest.OnStaticSetting.REJECT, - index - ), - listener - ); + if (readOnlyValue) { + // Setting the Block with an AddIndexBlockRequest ensures all shards have accounted for the block and all + // in-flight writes are completed before returning. + baseClient.admin() + .indices() + .addBlock( + new AddIndexBlockRequest(WRITE, index.getName()).masterNodeTimeout(MasterNodeRequest.INFINITE_MASTER_NODE_TIMEOUT), + listener.delegateFailureAndWrap((l, response) -> { + if (response.isAcknowledged() == false) { + throw new ElasticsearchException("Failed to acknowledge read-only block index request"); + } + l.onResponse(response); + }) + ); + } else { + // The only way to remove a Block is via a settings update. + final Settings readOnlySettings = Settings.builder().put(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey(), false).build(); + metadataUpdateSettingsService.updateSettings( + new UpdateSettingsClusterStateUpdateRequest( + MasterNodeRequest.INFINITE_MASTER_NODE_TIMEOUT, + TimeValue.ZERO, + readOnlySettings, + UpdateSettingsClusterStateUpdateRequest.OnExisting.OVERWRITE, + UpdateSettingsClusterStateUpdateRequest.OnStaticSetting.REJECT, + index + ), + listener + ); + } } private void reindex(SystemIndexMigrationInfo migrationInfo, ActionListener listener) { diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index 33c8081971202..1fbdaea9c772a 100644 --- a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -1,3 +1,4 @@ org.elasticsearch.index.codec.Elasticsearch814Codec org.elasticsearch.index.codec.Elasticsearch816Codec org.elasticsearch.index.codec.Elasticsearch900Codec +org.elasticsearch.index.codec.Elasticsearch900Lucene101Codec diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index 81d9bf5cb30a4..97b6dd76bf021 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -137,5 +137,7 @@ 8.16.1,8772004 8.16.2,8772004 8.16.3,8772004 +8.16.4,8772004 8.17.0,8797002 8.17.1,8797002 +8.17.2,8797002 diff --git a/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index 77aae99907dfc..ada61c118ec3c 100644 --- a/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -23,6 +23,13 @@ grant codeBase "${codebase.mockito-core}" { permission java.lang.RuntimePermission "accessDeclaredMembers"; permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; permission java.lang.RuntimePermission "getClassLoader"; + // needed when calling bytebuddy + permission java.lang.RuntimePermission "createClassLoader"; + permission java.lang.RuntimePermission "net.bytebuddy.createJavaDispatcher"; + permission java.lang.reflect.ReflectPermission "newProxyInPackage.net.bytebuddy.utility"; + permission java.lang.reflect.ReflectPermission "newProxyInPackage.net.bytebuddy.dynamic.loading"; + permission java.lang.reflect.ReflectPermission "newProxyInPackage.net.bytebuddy.description.type"; + permission java.lang.reflect.ReflectPermission "newProxyInPackage.net.bytebuddy.description.method"; }; grant codeBase "${codebase.byte-buddy}" { diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index 874d9fe3d0150..164516a562b70 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -137,5 +137,7 @@ 8.16.1,8518000 8.16.2,8518000 8.16.3,8518000 +8.16.4,8518000 8.17.0,8521000 8.17.1,8521000 +8.17.2,8521000 diff --git a/server/src/test/java/org/elasticsearch/TransportVersionTests.java b/server/src/test/java/org/elasticsearch/TransportVersionTests.java index 00429035f97d3..9b02b66583e78 100644 --- a/server/src/test/java/org/elasticsearch/TransportVersionTests.java +++ b/server/src/test/java/org/elasticsearch/TransportVersionTests.java @@ -13,16 +13,13 @@ import org.elasticsearch.test.TransportVersionUtils; import java.lang.reflect.Modifier; -import java.util.Collections; -import java.util.List; import java.util.Set; -import java.util.TreeSet; import java.util.regex.Matcher; import java.util.regex.Pattern; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; @@ -70,13 +67,11 @@ public static class DuplicatedIdFakeVersion { public void testStaticTransportVersionChecks() { assertThat( TransportVersions.collectAllVersionIdsDefinedInClass(CorrectFakeVersion.class), - equalTo( - List.of( - CorrectFakeVersion.V_0_000_002, - CorrectFakeVersion.V_0_000_003, - CorrectFakeVersion.V_0_000_004, - CorrectFakeVersion.V_0_00_01 - ) + contains( + CorrectFakeVersion.V_0_000_002, + CorrectFakeVersion.V_0_000_003, + CorrectFakeVersion.V_0_000_004, + CorrectFakeVersion.V_0_00_01 ) ); AssertionError e = expectThrows( @@ -162,15 +157,15 @@ public void testMax() { } public void testIsPatchFrom() { - TransportVersion patchVersion = TransportVersion.fromId(8_800_00_4); - assertThat(TransportVersion.fromId(8_799_00_0).isPatchFrom(patchVersion), is(false)); - assertThat(TransportVersion.fromId(8_799_00_9).isPatchFrom(patchVersion), is(false)); - assertThat(TransportVersion.fromId(8_800_00_0).isPatchFrom(patchVersion), is(false)); - assertThat(TransportVersion.fromId(8_800_00_3).isPatchFrom(patchVersion), is(false)); - assertThat(TransportVersion.fromId(8_800_00_4).isPatchFrom(patchVersion), is(true)); - assertThat(TransportVersion.fromId(8_800_00_9).isPatchFrom(patchVersion), is(true)); - assertThat(TransportVersion.fromId(8_800_01_0).isPatchFrom(patchVersion), is(false)); - assertThat(TransportVersion.fromId(8_801_00_0).isPatchFrom(patchVersion), is(false)); + TransportVersion patchVersion = TransportVersion.fromId(8_800_0_04); + assertThat(TransportVersion.fromId(8_799_0_00).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_799_0_09).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_800_0_00).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_800_0_03).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_800_0_04).isPatchFrom(patchVersion), is(true)); + assertThat(TransportVersion.fromId(8_800_0_49).isPatchFrom(patchVersion), is(true)); + assertThat(TransportVersion.fromId(8_800_1_00).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_801_0_00).isPatchFrom(patchVersion), is(false)); } public void testVersionConstantPresent() { @@ -185,7 +180,20 @@ public void testVersionConstantPresent() { } public void testCURRENTIsLatest() { - assertThat(Collections.max(TransportVersion.getAllVersions()), is(TransportVersion.current())); + assertThat(TransportVersion.getAllVersions().getLast(), is(TransportVersion.current())); + } + + public void testPatchVersionsStillAvailable() { + for (TransportVersion tv : TransportVersion.getAllVersions()) { + if (tv.onOrAfter(TransportVersions.V_8_9_X) && (tv.id() % 100) > 90) { + fail( + "Transport version " + + tv + + " is nearing the limit of available patch numbers." + + " Please inform the Core/Infra team that isPatchFrom may need to be modified" + ); + } + } } public void testToReleaseVersion() { @@ -199,40 +207,4 @@ public void testToString() { assertEquals("2000099", TransportVersion.fromId(2_00_00_99).toString()); assertEquals("5000099", TransportVersion.fromId(5_00_00_99).toString()); } - - /** - * Until 9.0 bumps its transport version to 9_000_00_0, all transport changes must be backported to 8.x. - * This test ensures transport versions are dense, so that we have confidence backports have not been missed. - * Note that it does not ensure patches are not missed, but it should catch the majority of misordered - * or missing transport versions. - */ - public void testDenseTransportVersions() { - Set missingVersions = new TreeSet<>(); - TransportVersion previous = null; - for (var tv : TransportVersion.getAllVersions()) { - if (tv.before(TransportVersions.V_8_16_0)) { - continue; - } - if (previous == null) { - previous = tv; - continue; - } - - if (previous.id() + 1000 < tv.id()) { - int nextId = previous.id(); - do { - nextId = (nextId + 1000) / 1000 * 1000; - missingVersions.add(nextId); - } while (nextId + 1000 < tv.id()); - } - previous = tv; - } - if (missingVersions.isEmpty() == false) { - StringBuilder msg = new StringBuilder("Missing transport versions:\n"); - for (Integer id : missingVersions) { - msg.append(" " + id + "\n"); - } - fail(msg.toString()); - } - } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesActionTests.java index c8d7f9b7948cb..f32ebd3235144 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesActionTests.java @@ -21,12 +21,10 @@ import org.elasticsearch.cluster.metadata.DesiredNodes; import org.elasticsearch.cluster.metadata.DesiredNodesMetadata; import org.elasticsearch.cluster.metadata.DesiredNodesTestCase; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.test.MockUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -53,10 +51,8 @@ public void testWriteBlocks() { transportService, mock(ClusterService.class), mock(RerouteService.class), - mock(FeatureService.class), threadPool, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), mock(AllocationService.class) ); @@ -81,10 +77,8 @@ public void testNoBlocks() { transportService, mock(ClusterService.class), mock(RerouteService.class), - mock(FeatureService.class), threadPool, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), mock(AllocationService.class) ); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponseTests.java index 13479d1885360..d3e5c1c7268fa 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponseTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.admin.cluster.snapshots.get; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -31,14 +30,10 @@ import java.util.Arrays; import java.util.Base64; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.Set; -import static org.hamcrest.CoreMatchers.containsString; - public class GetSnapshotsResponseTests extends ESTestCase { // We can not subclass AbstractSerializingTestCase because it // can only be used for instances with equals and hashCode @@ -60,12 +55,6 @@ private GetSnapshotsResponse copyInstance(GetSnapshotsResponse instance) throws private void assertEqualInstances(GetSnapshotsResponse expectedInstance, GetSnapshotsResponse newInstance) { assertEquals(expectedInstance.getSnapshots(), newInstance.getSnapshots()); assertEquals(expectedInstance.next(), newInstance.next()); - assertEquals(expectedInstance.getFailures().keySet(), newInstance.getFailures().keySet()); - for (Map.Entry expectedEntry : expectedInstance.getFailures().entrySet()) { - ElasticsearchException expectedException = expectedEntry.getValue(); - ElasticsearchException newException = newInstance.getFailures().get(expectedEntry.getKey()); - assertThat(newException.getMessage(), containsString(expectedException.getMessage())); - } } private List createSnapshotInfos(String repoName) { @@ -99,7 +88,6 @@ private List createSnapshotInfos(String repoName) { private GetSnapshotsResponse createTestInstance() { Set repositories = new HashSet<>(); - Map failures = new HashMap<>(); List responses = new ArrayList<>(); for (int i = 0; i < randomIntBetween(0, 5); i++) { @@ -111,12 +99,10 @@ private GetSnapshotsResponse createTestInstance() { for (int i = 0; i < randomIntBetween(0, 5); i++) { String repository = randomValueOtherThanMany(repositories::contains, () -> randomAlphaOfLength(10)); repositories.add(repository); - failures.put(repository, new ElasticsearchException(randomAlphaOfLength(10))); } return new GetSnapshotsResponse( responses, - failures, randomBoolean() ? Base64.getUrlEncoder() .encodeToString( diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index bf4a28b9c60b2..d9edb5db52662 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -12,7 +12,7 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; @@ -55,12 +55,11 @@ import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; -import org.apache.lucene.search.suggest.document.Completion912PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion101PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionPostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; -import org.apache.lucene.store.IOContext; import org.apache.lucene.tests.geo.GeoTestUtil; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.BitSetIterator; @@ -328,11 +327,11 @@ public void testTriangle() throws Exception { public void testCompletionField() throws Exception { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(false) - .setCodec(new Lucene100Codec(Lucene100Codec.Mode.BEST_SPEED) { + .setCodec(new Lucene101Codec(Lucene101Codec.Mode.BEST_SPEED) { @Override public PostingsFormat getPostingsFormatForField(String field) { if (field.startsWith("suggest_")) { - return new Completion912PostingsFormat(randomFrom(CompletionPostingsFormat.FSTLoadMode.values())); + return new Completion101PostingsFormat(randomFrom(CompletionPostingsFormat.FSTLoadMode.values())); } else { return super.postingsFormat(); } @@ -415,25 +414,25 @@ private static void addFieldsToDoc(Document doc, IndexableField[] fields) { enum CodecMode { BEST_SPEED { @Override - Lucene100Codec.Mode mode() { - return Lucene100Codec.Mode.BEST_SPEED; + Lucene101Codec.Mode mode() { + return Lucene101Codec.Mode.BEST_SPEED; } }, BEST_COMPRESSION { @Override - Lucene100Codec.Mode mode() { - return Lucene100Codec.Mode.BEST_COMPRESSION; + Lucene101Codec.Mode mode() { + return Lucene101Codec.Mode.BEST_COMPRESSION; } }; - abstract Lucene100Codec.Mode mode(); + abstract Lucene101Codec.Mode mode(); } static void indexRandomly(Directory directory, CodecMode codecMode, int numDocs, Consumer addFields) throws IOException { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene100Codec(codecMode.mode())); + .setCodec(new Lucene101Codec(codecMode.mode())); try (IndexWriter writer = new IndexWriter(directory, config)) { for (int i = 0; i < numDocs; i++) { final Document doc = new Document(); @@ -641,7 +640,7 @@ static void rewriteIndexWithPerFieldCodec(Directory source, CodecMode mode, Dire try (DirectoryReader reader = DirectoryReader.open(source)) { IndexWriterConfig config = new IndexWriterConfig().setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene100Codec(mode.mode()) { + .setCodec(new Lucene101Codec(mode.mode()) { @Override public PostingsFormat getPostingsFormatForField(String field) { return new ES812PostingsFormat(); @@ -688,7 +687,7 @@ static void collectPerFieldStats(SegmentReader reader, IndexDiskUsageStats stats final String[] files; final Directory directory; if (sis.getUseCompoundFile()) { - directory = sis.getCodec().compoundFormat().getCompoundReader(reader.directory(), sis, IOContext.DEFAULT); + directory = sis.getCodec().compoundFormat().getCompoundReader(reader.directory(), sis); files = directory.listAll(); } else { directory = reader.directory(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageActionTests.java index f06ad47d306ef..564bcee973ae5 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageActionTests.java @@ -18,8 +18,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.GroupShardsIterator; -import org.elasticsearch.cluster.routing.PlainShardIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; @@ -299,16 +297,16 @@ public String[] concreteIndexNames(ClusterState state, IndicesRequest request) { } ) { @Override - protected GroupShardsIterator shards( + protected List shards( ClusterState clusterState, AnalyzeIndexDiskUsageRequest request, String[] concreteIndices ) { final List shardIterators = new ArrayList<>(targetShards.size()); for (Map.Entry> e : targetShards.entrySet()) { - shardIterators.add(new PlainShardIterator(e.getKey(), e.getValue())); + shardIterators.add(new ShardIterator(e.getKey(), e.getValue())); } - return new GroupShardsIterator<>(shardIterators); + return shardIterators; } }; } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java index ee95f7ffb5b9a..515d571243a7c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -253,7 +253,7 @@ public void testValidation() { assertNotNull(validationException); assertEquals(1, validationException.validationErrors().size()); assertEquals( - "rollover cannot be applied to both regular and failure indices at the same time", + "Invalid index name [alias-index::*], invalid usage of :: separator, [*] is not a recognized selector", validationException.validationErrors().get(0) ); } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestModifierTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestModifierTests.java index 5e17768857455..e7a802b75d7a3 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestModifierTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestModifierTests.java @@ -44,7 +44,7 @@ public void testBulkRequestModifier() { for (int i = 0; modifier.hasNext(); i++) { modifier.next(); if (randomBoolean()) { - modifier.markItemAsFailed(i, new RuntimeException()); + modifier.markItemAsFailed(i, new RuntimeException(), randomFrom(IndexDocFailureStoreStatus.values())); failedSlots.add(i); } } @@ -110,7 +110,7 @@ public void testPipelineFailures() { // actually mark the failures for (int i : failures) { - modifier.markItemAsFailed(i, new RuntimeException()); + modifier.markItemAsFailed(i, new RuntimeException(), randomFrom(IndexDocFailureStoreStatus.values())); } // So half of the requests have "failed", so only the successful requests are left: diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index c6f923ce7cc03..0b6dfb301ecbf 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -117,7 +117,7 @@ public class TransportBulkActionIngestTests extends ESTestCase { @Captor ArgumentCaptor> redirectHandler; @Captor - ArgumentCaptor> failureHandler; + ArgumentCaptor> failureHandler; @Captor ArgumentCaptor> completionHandler; @Captor @@ -412,7 +412,8 @@ public void testIngestLocal() throws Exception { // now check success Iterator> req = bulkDocsItr.getValue().iterator(); - failureHandler.getValue().accept(0, exception); // have an exception for our one index request + // have an exception for our one index request + failureHandler.getValue().apply(0, exception, IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN); indexRequest2.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing // ensure redirects on failure store data stream assertTrue(redirectPredicate.getValue().apply(WITH_FAILURE_STORE_ENABLED + "-1")); @@ -509,7 +510,8 @@ public void testIngestSystemLocal() throws Exception { // now check success Iterator> req = bulkDocsItr.getValue().iterator(); - failureHandler.getValue().accept(0, exception); // have an exception for our one index request + // have an exception for our one index request + failureHandler.getValue().apply(0, exception, IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN); indexRequest2.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing completionHandler.getValue().accept(DUMMY_WRITE_THREAD, null); assertTrue(action.isExecuted); diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java index c99c671c69148..fa57431cc582a 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.test.TransportVersionUtils; -import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -37,7 +36,6 @@ import static org.elasticsearch.action.fieldcaps.FieldCapabilitiesIndexResponseTests.randomMappingHashToIndices; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.nullValue; public class FieldCapabilitiesNodeResponseTests extends AbstractWireSerializingTestCase { @@ -145,48 +143,6 @@ public void testSerializeNodeResponseBetweenNewNodes() throws Exception { } } - public void testSerializeNodeResponseBetweenOldNodes() throws IOException { - final TransportVersion minCompactVersion = TransportVersions.MINIMUM_COMPATIBLE; - assertTrue("Remove this test once minCompactVersion >= 8.2.0", minCompactVersion.before(TransportVersions.V_8_2_0)); - List indexResponses = CollectionUtils.concatLists( - randomIndexResponsesWithMappingHash(randomMappingHashToIndices()), - randomIndexResponsesWithoutMappingHash() - ); - Randomness.shuffle(indexResponses); - FieldCapabilitiesNodeResponse inResponse = randomNodeResponse(indexResponses); - TransportVersion version = TransportVersionUtils.randomVersionBetween( - random(), - minCompactVersion, - TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_2_0) - ); - final FieldCapabilitiesNodeResponse outResponse = copyInstance(inResponse, version); - assertThat(outResponse.getFailures().keySet(), equalTo(inResponse.getFailures().keySet())); - assertThat(outResponse.getUnmatchedShardIds(), equalTo(inResponse.getUnmatchedShardIds())); - final List inList = inResponse.getIndexResponses(); - final List outList = outResponse.getIndexResponses(); - assertThat(outList, hasSize(inList.size())); - for (int i = 0; i < inList.size(); i++) { - assertThat("Responses between old nodes don't have mapping hash", outList.get(i).getIndexMappingHash(), nullValue()); - assertThat(outList.get(i).getIndexName(), equalTo(inList.get(i).getIndexName())); - assertThat(outList.get(i).canMatch(), equalTo(inList.get(i).canMatch())); - Map outCap = outList.get(i).get(); - Map inCap = inList.get(i).get(); - if (version.onOrAfter(TransportVersions.V_8_0_0)) { - assertThat(outCap, equalTo(inCap)); - } else { - // Exclude metric types which was introduced in 8.0 - assertThat(outCap.keySet(), equalTo(inCap.keySet())); - for (String field : outCap.keySet()) { - assertThat(outCap.get(field).name(), equalTo(inCap.get(field).name())); - assertThat(outCap.get(field).type(), equalTo(inCap.get(field).type())); - assertThat(outCap.get(field).isSearchable(), equalTo(inCap.get(field).isSearchable())); - assertThat(outCap.get(field).isAggregatable(), equalTo(inCap.get(field).isAggregatable())); - assertThat(outCap.get(field).meta(), equalTo(inCap.get(field).meta())); - } - } - } - } - private static FieldCapabilitiesNodeResponse randomNodeResponse(List indexResponses) { int numUnmatched = randomIntBetween(0, 3); final Set unmatchedShardIds = new HashSet<>(); diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java index 6ea4a1d3dc46b..ceb84e4b2a0d9 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java @@ -40,7 +40,6 @@ import static org.elasticsearch.action.fieldcaps.FieldCapabilitiesIndexResponseTests.randomMappingHashToIndices; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.nullValue; public class FieldCapabilitiesResponseTests extends AbstractWireSerializingTestCase { @@ -198,48 +197,4 @@ public void testSerializeCCSResponseBetweenNewClusters() throws Exception { } } } - - public void testSerializeCCSResponseBetweenOldClusters() throws IOException { - TransportVersion minCompactVersion = TransportVersions.MINIMUM_COMPATIBLE; - assertTrue("Remove this test once minCompactVersion >= 8.2.0", minCompactVersion.before(TransportVersions.V_8_2_0)); - List indexResponses = CollectionUtils.concatLists( - randomIndexResponsesWithMappingHash(randomMappingHashToIndices()), - randomIndexResponsesWithoutMappingHash() - ); - Randomness.shuffle(indexResponses); - FieldCapabilitiesResponse inResponse = randomCCSResponse(indexResponses); - TransportVersion version = TransportVersionUtils.randomVersionBetween( - random(), - minCompactVersion, - TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_2_0) - ); - final FieldCapabilitiesResponse outResponse = copyInstance(inResponse, version); - assertThat( - outResponse.getFailures().stream().flatMap(f -> Arrays.stream(f.getIndices())).toList(), - equalTo(inResponse.getFailures().stream().flatMap(f -> Arrays.stream(f.getIndices())).toList()) - ); - final List inList = inResponse.getIndexResponses(); - final List outList = outResponse.getIndexResponses(); - assertThat(outList, hasSize(inList.size())); - for (int i = 0; i < inList.size(); i++) { - assertThat("Responses between old clusters don't have mapping hash", outList.get(i).getIndexMappingHash(), nullValue()); - assertThat(outList.get(i).getIndexName(), equalTo(inList.get(i).getIndexName())); - assertThat(outList.get(i).canMatch(), equalTo(inList.get(i).canMatch())); - Map outCap = outList.get(i).get(); - Map inCap = inList.get(i).get(); - if (version.onOrAfter(TransportVersions.V_8_0_0)) { - assertThat(outCap, equalTo(inCap)); - } else { - // Exclude metric types which was introduced in 8.0 - assertThat(outCap.keySet(), equalTo(inCap.keySet())); - for (String field : outCap.keySet()) { - assertThat(outCap.get(field).name(), equalTo(inCap.get(field).name())); - assertThat(outCap.get(field).type(), equalTo(inCap.get(field).type())); - assertThat(outCap.get(field).isSearchable(), equalTo(inCap.get(field).isSearchable())); - assertThat(outCap.get(field).isAggregatable(), equalTo(inCap.get(field).isAggregatable())); - assertThat(outCap.get(field).meta(), equalTo(inCap.get(field).meta())); - } - } - } - } } diff --git a/server/src/test/java/org/elasticsearch/action/get/TransportMultiGetActionTests.java b/server/src/test/java/org/elasticsearch/action/get/TransportMultiGetActionTests.java index dad392970b6a1..96b06bed27ce8 100644 --- a/server/src/test/java/org/elasticsearch/action/get/TransportMultiGetActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/get/TransportMultiGetActionTests.java @@ -46,6 +46,7 @@ import org.junit.AfterClass; import org.junit.BeforeClass; +import java.util.Collections; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -67,7 +68,6 @@ public class TransportMultiGetActionTests extends ESTestCase { private static TransportService transportService; private static ClusterService clusterService; private static TransportMultiGetAction transportAction; - private static TransportShardMultiGetAction shardAction; @BeforeClass public static void beforeClass() throws Exception { @@ -135,11 +135,9 @@ public static void beforeClass() throws Exception { ) .build(); - final ShardIterator index1ShardIterator = mock(ShardIterator.class); - when(index1ShardIterator.shardId()).thenReturn(new ShardId(index1, randomInt())); + final ShardIterator index1ShardIterator = new ShardIterator(new ShardId(index1, randomInt()), Collections.emptyList()); - final ShardIterator index2ShardIterator = mock(ShardIterator.class); - when(index2ShardIterator.shardId()).thenReturn(new ShardId(index2, randomInt())); + final ShardIterator index2ShardIterator = new ShardIterator(new ShardId(index2, randomInt()), Collections.emptyList()); final OperationRouting operationRouting = mock(OperationRouting.class); when( @@ -153,21 +151,6 @@ public static void beforeClass() throws Exception { when(clusterService.localNode()).thenReturn(transportService.getLocalNode()); when(clusterService.state()).thenReturn(clusterState); when(clusterService.operationRouting()).thenReturn(operationRouting); - final NodeClient client = new NodeClient(Settings.EMPTY, threadPool); - - shardAction = new TransportShardMultiGetAction( - clusterService, - transportService, - mock(IndicesService.class), - threadPool, - new ActionFilters(emptySet()), - new Resolver(), - EmptySystemIndices.INSTANCE.getExecutorSelector(), - client - ) { - @Override - protected void doExecute(Task task, MultiGetShardRequest request, ActionListener listener) {} - }; } @AfterClass @@ -177,7 +160,6 @@ public static void afterClass() { transportService = null; clusterService = null; transportAction = null; - shardAction = null; } public void testTransportMultiGetAction() { @@ -270,5 +252,4 @@ public Index concreteSingleIndex(ClusterState state, IndicesRequest request) { return new Index("index1", randomBase64UUID()); } } - } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java index dc1698e3459ec..41a5919060095 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java @@ -138,7 +138,7 @@ public void setup() { clusterService, mock(ReservedClusterStateService.class), newEnvironment(Settings.EMPTY), - new FileSettingsService.FileSettingsHealthIndicatorService() + new FileSettingsService.FileSettingsHealthIndicatorService(Settings.EMPTY) ) ); } diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java index 47ff4ca6f0600..11085558dbe16 100644 --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.UUIDs; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; @@ -83,9 +82,7 @@ private AbstractSearchAsyncAction createAction( null, request, listener, - new GroupShardsIterator<>( - Collections.singletonList(new SearchShardIterator(null, new ShardId("index", "_na", 0), Collections.emptyList(), null)) - ), + Collections.singletonList(new SearchShardIterator(null, new ShardId("index", "_na", 0), Collections.emptyList(), null)), timeProvider, ClusterState.EMPTY_STATE, null, diff --git a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java index 1460270c48293..fb134a2b31366 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.action.search; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.CanMatchNodeResponse.ResponseOrFailure; @@ -22,7 +23,6 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.allocation.DataTier; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; @@ -135,9 +135,9 @@ public void sendCanMatch( } }; - AtomicReference> result = new AtomicReference<>(); + AtomicReference> result = new AtomicReference<>(); CountDownLatch latch = new CountDownLatch(1); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "idx", new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS), 2, @@ -232,9 +232,9 @@ public void sendCanMatch( } }; - AtomicReference> result = new AtomicReference<>(); + AtomicReference> result = new AtomicReference<>(); CountDownLatch latch = new CountDownLatch(1); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "idx", new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS), 2, @@ -325,9 +325,9 @@ public void sendCanMatch( } }; - AtomicReference> result = new AtomicReference<>(); + AtomicReference> result = new AtomicReference<>(); CountDownLatch latch = new CountDownLatch(1); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "logs", new OriginalIndices(new String[] { "logs" }, SearchRequest.DEFAULT_INDICES_OPTIONS), randomIntBetween(2, 20), @@ -427,9 +427,9 @@ public void sendCanMatch( } }; - AtomicReference> result = new AtomicReference<>(); + AtomicReference> result = new AtomicReference<>(); CountDownLatch latch = new CountDownLatch(1); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "logs", new OriginalIndices(new String[] { "logs" }, SearchRequest.DEFAULT_INDICES_OPTIONS), numShards, @@ -1202,7 +1202,7 @@ public void testCanMatchFilteringOnCoordinatorWithMissingShards() throws Excepti false, new ActionListener<>() { @Override - public void onResponse(GroupShardsIterator searchShardIterators) { + public void onResponse(List searchShardIterators) { fail(null, "unexpected success with result [%s] while expecting to handle failure with [%s]", searchShardIterators); latch.countDown(); } @@ -1268,7 +1268,7 @@ private void assignShardsAndExecuteCanMatchPhase( boolean allowPartialResults, BiConsumer, List> canMatchResultsConsumer ) throws Exception { - AtomicReference> result = new AtomicReference<>(); + AtomicReference> result = new AtomicReference<>(); CountDownLatch latch = new CountDownLatch(1); Tuple> canMatchAndShardRequests = getCanMatchPhaseAndRequests( dataStreams, @@ -1305,7 +1305,7 @@ private Tuple> getCanMatc SuggestBuilder suggest, List unassignedIndices, boolean allowPartialResults, - ActionListener> canMatchActionListener + ActionListener> canMatchActionListener ) { Map lookup = new ConcurrentHashMap<>(); DiscoveryNode primaryNode = DiscoveryNodeUtils.create("node_1"); @@ -1324,7 +1324,7 @@ private Tuple> getCanMatc String[] indices = indicesToSearch.toArray(new String[0]); OriginalIndices originalIndices = new OriginalIndices(indices, SearchRequest.DEFAULT_INDICES_OPTIONS); - final List originalShardIters = new ArrayList<>(); + final List shardIters = new ArrayList<>(); for (var dataStream : dataStreams) { boolean atLeastOnePrimaryAssigned = false; for (var dataStreamIndex : dataStream.getIndices()) { @@ -1333,9 +1333,9 @@ private Tuple> getCanMatc boolean withAssignedPrimaries = randomBoolean() || atLeastOnePrimaryAssigned == false; int numShards = randomIntBetween(1, 6); if (unassignedIndices.contains(dataStreamIndex)) { - originalShardIters.addAll(getShardsIter(dataStreamIndex, originalIndices, numShards, false, null, null)); + shardIters.addAll(getShardsIter(dataStreamIndex, originalIndices, numShards, false, null, null)); } else { - originalShardIters.addAll( + shardIters.addAll( getShardsIter(dataStreamIndex, originalIndices, numShards, false, withAssignedPrimaries ? primaryNode : null, null) ); atLeastOnePrimaryAssigned |= withAssignedPrimaries; @@ -1345,14 +1345,14 @@ private Tuple> getCanMatc for (Index regularIndex : regularIndices) { if (unassignedIndices.contains(regularIndex)) { - originalShardIters.addAll(getShardsIter(regularIndex, originalIndices, randomIntBetween(1, 6), false, null, null)); + shardIters.addAll(getShardsIter(regularIndex, originalIndices, randomIntBetween(1, 6), false, null, null)); } else { - originalShardIters.addAll( + shardIters.addAll( getShardsIter(regularIndex, originalIndices, randomIntBetween(1, 6), randomBoolean(), primaryNode, replicaNode) ); } } - GroupShardsIterator shardsIter = GroupShardsIterator.sortAndCreate(originalShardIters); + CollectionUtil.timSort(shardIters); final SearchRequest searchRequest = new SearchRequest(); searchRequest.indices(indices); @@ -1415,7 +1415,6 @@ public void sendCanMatch( System::nanoTime ); - AtomicReference> result = new AtomicReference<>(); return new Tuple<>( new CanMatchPreFilterSearchPhase( logger, @@ -1425,7 +1424,7 @@ public void sendCanMatch( Collections.emptyMap(), threadPool.executor(ThreadPool.Names.SEARCH_COORDINATION), searchRequest, - shardsIter, + shardIters, timeProvider, null, true, diff --git a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java index 65fdec96c92f0..5f84739599514 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java @@ -144,7 +144,6 @@ protected void run() { hits.decRef(); } } finally { - mockSearchPhaseContext.execute(() -> {}); var resp = mockSearchPhaseContext.searchResponse.get(); if (resp != null) { resp.decRef(); diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index e8e12300c23e3..b642787fbf901 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -62,7 +61,7 @@ public MockSearchPhaseContext(int numShards) { Runnable::run, new SearchRequest(), ActionListener.noop(), - new GroupShardsIterator(List.of()), + List.of(), null, ClusterState.EMPTY_STATE, new SearchTask(0, "n/a", "n/a", () -> "test", null, Collections.emptyMap()), @@ -151,7 +150,7 @@ protected void executePhaseOnShard( SearchActionListener listener ) { onShardResult(new SearchPhaseResult() { - }, shardIt); + }); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index 7e9e6f623cab0..647d16977181f 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; @@ -67,7 +66,7 @@ public void testSkipSearchShards() throws InterruptedException { DiscoveryNode replicaNode = DiscoveryNodeUtils.create("node_2"); AtomicInteger contextIdGenerator = new AtomicInteger(0); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "idx", new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS), numShards, @@ -182,7 +181,7 @@ public void testLimitConcurrentShardRequests() throws InterruptedException { DiscoveryNode replicaNode = DiscoveryNodeUtils.create("node_1"); AtomicInteger contextIdGenerator = new AtomicInteger(0); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "idx", new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS), numShards, @@ -285,7 +284,7 @@ public void testFanOutAndCollect() throws InterruptedException { Map> nodeToContextMap = newConcurrentMap(); AtomicInteger contextIdGenerator = new AtomicInteger(0); int numShards = randomIntBetween(1, 10); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "idx", new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS), numShards, @@ -415,7 +414,7 @@ public void testFanOutAndFail() throws InterruptedException { Map> nodeToContextMap = newConcurrentMap(); AtomicInteger contextIdGenerator = new AtomicInteger(0); int numShards = randomIntBetween(2, 10); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "idx", new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS), numShards, @@ -534,7 +533,7 @@ public void testAllowPartialResults() throws InterruptedException { DiscoveryNode replicaNode = DiscoveryNodeUtils.create("node_1"); AtomicInteger contextIdGenerator = new AtomicInteger(0); - GroupShardsIterator shardsIter = getShardsIter( + List shardsIter = getShardsIter( "idx", new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS), numShards, @@ -647,7 +646,7 @@ public void testSkipUnavailableSearchShards() throws InterruptedException { searchShardIterator.reset(); searchShardIterators.add(searchShardIterator); } - GroupShardsIterator shardsIter = new GroupShardsIterator<>(searchShardIterators); + List shardsIter = searchShardIterators; Map lookup = Map.of(primaryNode.getId(), new MockConnection(primaryNode)); CountDownLatch latch = new CountDownLatch(1); @@ -706,7 +705,7 @@ protected void run() { assertThat(searchResponse.get().getSuccessfulShards(), equalTo(shardsIter.size())); } - static GroupShardsIterator getShardsIter( + static List getShardsIter( String index, OriginalIndices originalIndices, int numShards, @@ -714,9 +713,7 @@ static GroupShardsIterator getShardsIter( DiscoveryNode primaryNode, DiscoveryNode replicaNode ) { - return new GroupShardsIterator<>( - getShardsIter(new Index(index, "_na_"), originalIndices, numShards, doReplicas, primaryNode, replicaNode) - ); + return getShardsIter(new Index(index, "_na_"), originalIndices, numShards, doReplicas, primaryNode, replicaNode); } static List getShardsIter( diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index f005f862720ff..227239481a55a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; @@ -42,6 +41,7 @@ import org.elasticsearch.transport.Transport; import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; @@ -150,7 +150,7 @@ public void sendExecuteQuery( } }; CountDownLatch latch = new CountDownLatch(1); - GroupShardsIterator shardsIter = SearchAsyncActionTests.getShardsIter( + List shardsIter = SearchAsyncActionTests.getShardsIter( "idx", new OriginalIndices(new String[] { "idx" }, SearchRequest.DEFAULT_INDICES_OPTIONS), numShards, @@ -219,13 +219,8 @@ protected void run() { if (withScroll) { assertFalse(canReturnNullResponse.get()); assertThat(numWithTopDocs.get(), equalTo(0)); - } else { - assertTrue(canReturnNullResponse.get()); - if (withCollapse) { - assertThat(numWithTopDocs.get(), equalTo(0)); - } else { - assertThat(numWithTopDocs.get(), greaterThanOrEqualTo(1)); - } + } else if (withCollapse) { + assertThat(numWithTopDocs.get(), equalTo(0)); } SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); assertThat(phase.numReducePhases(), greaterThanOrEqualTo(1)); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java index 2ecdc1be9c364..79736427f634d 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java @@ -12,7 +12,8 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.OriginalIndicesTests; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.routing.GroupShardsIteratorTests; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; @@ -24,10 +25,24 @@ import java.util.Collections; import java.util.List; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.hamcrest.Matchers.equalTo; public class SearchShardIteratorTests extends ESTestCase { + public static List randomShardRoutings(ShardId shardId) { + return randomShardRoutings(shardId, randomIntBetween(0, 2)); + } + + private static List randomShardRoutings(ShardId shardId, int numReplicas) { + List shardRoutings = new ArrayList<>(); + shardRoutings.add(TestShardRouting.newShardRouting(shardId, randomAlphaOfLengthBetween(5, 10), true, STARTED)); + for (int j = 0; j < numReplicas; j++) { + shardRoutings.add(TestShardRouting.newShardRouting(shardId, randomAlphaOfLengthBetween(5, 10), false, STARTED)); + } + return shardRoutings; + } + public void testShardId() { ShardId shardId = new ShardId(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLength(10), randomInt()); SearchShardIterator searchShardIterator = new SearchShardIterator(null, shardId, Collections.emptyList(), OriginalIndices.NONE); @@ -149,19 +164,14 @@ public void testCompareTo() { for (String uuid : uuids) { ShardId shardId = new ShardId(index, uuid, i); shardIterators.add( - new SearchShardIterator( - null, - shardId, - GroupShardsIteratorTests.randomShardRoutings(shardId), - OriginalIndicesTests.randomOriginalIndices() - ) + new SearchShardIterator(null, shardId, randomShardRoutings(shardId), OriginalIndicesTests.randomOriginalIndices()) ); for (String cluster : clusters) { shardIterators.add( new SearchShardIterator( cluster, shardId, - GroupShardsIteratorTests.randomShardRoutings(shardId), + randomShardRoutings(shardId), OriginalIndicesTests.randomOriginalIndices() ) ); @@ -207,11 +217,6 @@ public void testCompareToEqualItems() { private static SearchShardIterator randomSearchShardIterator() { String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); ShardId shardId = new ShardId(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLength(10), randomIntBetween(0, Integer.MAX_VALUE)); - return new SearchShardIterator( - clusterAlias, - shardId, - GroupShardsIteratorTests.randomShardRoutings(shardId), - OriginalIndicesTests.randomOriginalIndices() - ); + return new SearchShardIterator(clusterAlias, shardId, randomShardRoutings(shardId), OriginalIndicesTests.randomOriginalIndices()); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index 8196c01ee8bbc..f244af80ad9fb 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -37,8 +37,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.VersionInformation; -import org.elasticsearch.cluster.routing.GroupShardsIterator; -import org.elasticsearch.cluster.routing.GroupShardsIteratorTests; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; @@ -151,7 +149,7 @@ private static SearchShardIterator createSearchShardIterator( String clusterAlias ) { ShardId shardId = new ShardId(index, id); - List shardRoutings = GroupShardsIteratorTests.randomShardRoutings(shardId); + List shardRoutings = SearchShardIteratorTests.randomShardRoutings(shardId); return new SearchShardIterator(clusterAlias, shardId, shardRoutings, originalIndices); } @@ -250,7 +248,7 @@ public void testMergeShardsIterators() { Collections.shuffle(localShardIterators, random()); Collections.shuffle(remoteShardIterators, random()); - GroupShardsIterator groupShardsIterator = TransportSearchAction.mergeShardsIterators( + List groupShardsIterator = TransportSearchAction.mergeShardsIterators( localShardIterators, remoteShardIterators ); diff --git a/server/src/test/java/org/elasticsearch/action/support/IndexComponentSelectorTests.java b/server/src/test/java/org/elasticsearch/action/support/IndexComponentSelectorTests.java index 73d4ab59ce479..585d660917e4b 100644 --- a/server/src/test/java/org/elasticsearch/action/support/IndexComponentSelectorTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/IndexComponentSelectorTests.java @@ -20,7 +20,7 @@ public class IndexComponentSelectorTests extends ESTestCase { public void testIndexComponentSelectorFromKey() { assertThat(IndexComponentSelector.getByKey("data"), equalTo(IndexComponentSelector.DATA)); assertThat(IndexComponentSelector.getByKey("failures"), equalTo(IndexComponentSelector.FAILURES)); - assertThat(IndexComponentSelector.getByKey("*"), equalTo(IndexComponentSelector.ALL_APPLICABLE)); + assertThat(IndexComponentSelector.getByKey("*"), nullValue()); assertThat(IndexComponentSelector.getByKey("d*ta"), nullValue()); assertThat(IndexComponentSelector.getByKey("_all"), nullValue()); assertThat(IndexComponentSelector.getByKey("**"), nullValue()); @@ -30,11 +30,10 @@ public void testIndexComponentSelectorFromKey() { public void testIndexComponentSelectorFromId() { assertThat(IndexComponentSelector.getById((byte) 0), equalTo(IndexComponentSelector.DATA)); assertThat(IndexComponentSelector.getById((byte) 1), equalTo(IndexComponentSelector.FAILURES)); - assertThat(IndexComponentSelector.getById((byte) 2), equalTo(IndexComponentSelector.ALL_APPLICABLE)); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> IndexComponentSelector.getById((byte) 3)); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> IndexComponentSelector.getById((byte) 2)); assertThat( exception.getMessage(), - containsString("Unknown id of index component selector [3], available options are: {0=DATA, 1=FAILURES, 2=ALL_APPLICABLE}") + containsString("Unknown id of index component selector [2], available options are: {0=DATA, 1=FAILURES}") ); } diff --git a/server/src/test/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableActionTests.java b/server/src/test/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableActionTests.java index ed12a3fda6eec..f5152dfbe7a5e 100644 --- a/server/src/test/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableActionTests.java @@ -333,7 +333,7 @@ public void testInvalidNodes() throws Exception { // We were able to mark shards as stale, so the request finishes successfully assertThat(safeAwait(broadcastUnpromotableRequest(wrongRoutingTable, true)), equalTo(ActionResponse.Empty.INSTANCE)); - for (var shardRouting : wrongRoutingTable.unpromotableShards()) { + for (var shardRouting : wrongRoutingTable.assignedUnpromotableShards()) { Mockito.verify(shardStateAction) .remoteShardFailed( eq(shardRouting.shardId()), diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/PostWriteRefreshTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/PostWriteRefreshTests.java index 9897fe9a42547..4337d4c3d9e99 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/PostWriteRefreshTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/PostWriteRefreshTests.java @@ -162,7 +162,7 @@ public void testPrimaryWithUnpromotables() throws IOException { new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "message"), ShardRouting.Role.SEARCH_ONLY ); - when(routingTable.allUnpromotableShards()).thenReturn(List.of(shardRouting)); + when(routingTable.unpromotableShards()).thenReturn(List.of(shardRouting)); when(routingTable.shardId()).thenReturn(shardId); WriteRequest.RefreshPolicy policy = randomFrom(WriteRequest.RefreshPolicy.IMMEDIATE, WriteRequest.RefreshPolicy.WAIT_UNTIL); postWriteRefresh.refreshShard(policy, primary, result.getTranslogLocation(), f, postWriteRefreshTimeout); @@ -238,9 +238,9 @@ public void testWaitForWithNullLocationCompletedImmediately() throws IOException ); // Randomly test scenarios with and without unpromotables if (randomBoolean()) { - when(routingTable.allUnpromotableShards()).thenReturn(Collections.emptyList()); + when(routingTable.unpromotableShards()).thenReturn(Collections.emptyList()); } else { - when(routingTable.allUnpromotableShards()).thenReturn(List.of(shardRouting)); + when(routingTable.unpromotableShards()).thenReturn(List.of(shardRouting)); } WriteRequest.RefreshPolicy policy = WriteRequest.RefreshPolicy.WAIT_UNTIL; postWriteRefresh.refreshShard(policy, primary, null, f, postWriteRefreshTimeout); diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsActionTests.java b/server/src/test/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsActionTests.java index 4655d2e47bac5..d5be6727927db 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsActionTests.java @@ -34,7 +34,6 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.EmptySystemIndices; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; @@ -47,6 +46,7 @@ import org.junit.AfterClass; import org.junit.BeforeClass; +import java.util.Collections; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -68,7 +68,6 @@ public class TransportMultiTermVectorsActionTests extends ESTestCase { private static TransportService transportService; private static ClusterService clusterService; private static TransportMultiTermVectorsAction transportAction; - private static TransportShardMultiTermsVectorAction shardAction; @BeforeClass public static void beforeClass() throws Exception { @@ -136,12 +135,8 @@ public static void beforeClass() throws Exception { ) .build(); - final ShardIterator index1ShardIterator = mock(ShardIterator.class); - when(index1ShardIterator.shardId()).thenReturn(new ShardId(index1, randomInt())); - - final ShardIterator index2ShardIterator = mock(ShardIterator.class); - when(index2ShardIterator.shardId()).thenReturn(new ShardId(index2, randomInt())); - + final ShardIterator index1ShardIterator = new ShardIterator(new ShardId(index1, randomInt()), Collections.emptyList()); + final ShardIterator index2ShardIterator = new ShardIterator(new ShardId(index2, randomInt()), Collections.emptyList()); final OperationRouting operationRouting = mock(OperationRouting.class); when( operationRouting.getShards(eq(clusterState), eq(index1.getName()), anyString(), nullable(String.class), nullable(String.class)) @@ -154,22 +149,6 @@ public static void beforeClass() throws Exception { when(clusterService.localNode()).thenReturn(transportService.getLocalNode()); when(clusterService.state()).thenReturn(clusterState); when(clusterService.operationRouting()).thenReturn(operationRouting); - - shardAction = new TransportShardMultiTermsVectorAction( - clusterService, - transportService, - mock(IndicesService.class), - threadPool, - new ActionFilters(emptySet()), - new Resolver() - ) { - @Override - protected void doExecute( - Task task, - MultiTermVectorsShardRequest request, - ActionListener listener - ) {} - }; } @AfterClass @@ -179,7 +158,6 @@ public static void afterClass() { transportService = null; clusterService = null; transportAction = null; - shardAction = null; } public void testTransportMultiGetAction() { diff --git a/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchTests.java b/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchTests.java index d84ee0267251a..8eaff521068e8 100644 --- a/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchTests.java +++ b/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchTests.java @@ -9,11 +9,11 @@ package org.elasticsearch.bootstrap; -import org.elasticsearch.entitlement.runtime.policy.InboundNetworkEntitlement; -import org.elasticsearch.entitlement.runtime.policy.LoadNativeLibrariesEntitlement; -import org.elasticsearch.entitlement.runtime.policy.OutboundNetworkEntitlement; import org.elasticsearch.entitlement.runtime.policy.Policy; import org.elasticsearch.entitlement.runtime.policy.Scope; +import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; import org.elasticsearch.test.ESTestCase; import java.util.List; diff --git a/server/src/test/java/org/elasticsearch/bootstrap/NoSecurityManagerTests.java b/server/src/test/java/org/elasticsearch/bootstrap/NoSecurityManagerTests.java index c1bb941020575..d7628be0d7f00 100644 --- a/server/src/test/java/org/elasticsearch/bootstrap/NoSecurityManagerTests.java +++ b/server/src/test/java/org/elasticsearch/bootstrap/NoSecurityManagerTests.java @@ -12,6 +12,7 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import org.apache.lucene.tests.util.LuceneTestCase; +import org.elasticsearch.jdk.RuntimeVersionFeature; import org.elasticsearch.test.GraalVMThreadsFilter; import static org.hamcrest.Matchers.is; @@ -20,6 +21,7 @@ public class NoSecurityManagerTests extends LuceneTestCase { public void testPrepopulateSecurityCaller() { + assumeTrue("security manager must be available", RuntimeVersionFeature.isSecurityManagerAvailable()); assumeTrue("Unexpected security manager:" + System.getSecurityManager(), System.getSecurityManager() == null); boolean isAtLeastJava17 = Runtime.version().feature() >= 17; boolean isPrepopulated = Security.prepopulateSecurityCaller(); diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java index 6dd39da53362c..75b6882abd7e8 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingNodesHelper; import org.elasticsearch.cluster.routing.RoutingTable; @@ -237,7 +236,7 @@ private ClusterState createClusterStateWithStartedShards(String reason) { private List createExistingShards(ClusterState currentState, String reason) { List shards = new ArrayList<>(); - GroupShardsIterator shardGroups = currentState.routingTable().allAssignedShardsGrouped(new String[] { INDEX }, true); + List shardGroups = currentState.routingTable().allAssignedShardsGrouped(new String[] { INDEX }, true); for (ShardIterator shardIt : shardGroups) { for (ShardRouting shard : shardIt) { shards.add(shard); diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index 3c680d891ff13..75cc99e4c280e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -612,11 +612,7 @@ public void testStartedShardEntrySerializationWithOlderTransportVersion() throws final String allocationId = randomRealisticUnicodeOfCodepointLengthBetween(10, 100); final long primaryTerm = randomIntBetween(0, 100); final String message = randomRealisticUnicodeOfCodepointLengthBetween(10, 100); - final TransportVersion version = randomFrom( - getFirstVersion(), - getPreviousVersion(TransportVersions.MINIMUM_COMPATIBLE), - getPreviousVersion(TransportVersions.V_8_15_0) - ); + final TransportVersion version = randomFrom(getFirstVersion(), getPreviousVersion(TransportVersions.V_8_15_0)); final ShardLongFieldRange timestampRange = ShardLongFieldRangeWireTests.randomRange(); final ShardLongFieldRange eventIngestedRange = ShardLongFieldRangeWireTests.randomRange(); var startedShardEntry = new StartedShardEntry(shardId, allocationId, primaryTerm, message, timestampRange, eventIngestedRange); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java index efb5df7d7a4fc..eb550223617e4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java @@ -20,8 +20,10 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingNodesHelper; import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.allocation.ExistingShardsAllocator; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.cluster.ClusterStateChanges; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -31,11 +33,14 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_AUTO_EXPAND_REPLICAS_SETTING; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; @@ -221,4 +226,48 @@ public void testCalculateDesiredNumberOfReplicas() { assertThat(autoExpandReplicas.calculateDesiredNumberOfReplicas(matchingNodes), equalTo(Math.max(lowerBound, matchingNodes - 1))); assertThat(autoExpandReplicas.calculateDesiredNumberOfReplicas(max + 1), equalTo(max)); } + + public void testGetAutoExpandReplicaChangesStatelessIndices() { + { + // number of replicas is adjusted to 1 when it is initialized to 0 + Metadata metadata = Metadata.builder() + .put( + IndexMetadata.builder("test") + .settings( + Settings.builder() + .put(ExistingShardsAllocator.EXISTING_SHARDS_ALLOCATOR_SETTING.getKey(), "stateless") + .put("index.version.created", IndexVersion.current()) + .put(SETTING_NUMBER_OF_SHARDS, 1) + .put(SETTING_NUMBER_OF_REPLICAS, 0) + .put(INDEX_AUTO_EXPAND_REPLICAS_SETTING.getKey(), "0-all") + ) + ) + .build(); + Map> autoExpandReplicaChanges = AutoExpandReplicas.getAutoExpandReplicaChanges(metadata, null); + assertEquals(1, autoExpandReplicaChanges.size()); + List indices = autoExpandReplicaChanges.get(1); + assertEquals(1, indices.size()); + assertEquals("test", indices.getFirst()); + } + { + // no changes when number of replicas is set to anything other than 0 + Metadata metadata = Metadata.builder() + .put( + IndexMetadata.builder("test") + .settings( + Settings.builder() + .put(ExistingShardsAllocator.EXISTING_SHARDS_ALLOCATOR_SETTING.getKey(), "stateless") + .put("index.version.created", IndexVersion.current()) + .put(SETTING_NUMBER_OF_SHARDS, 1) + .put(SETTING_NUMBER_OF_REPLICAS, randomIntBetween(1, 10)) + .put(INDEX_AUTO_EXPAND_REPLICAS_SETTING.getKey(), "0-all") + ) + ) + .build(); + Map> autoExpandReplicaChanges = AutoExpandReplicas.getAutoExpandReplicaChanges(metadata, () -> { + throw new UnsupportedOperationException(); + }); + assertEquals(0, autoExpandReplicaChanges.size()); + } + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexAbstractionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexAbstractionResolverTests.java index 5ab5ed1c23e4f..a3ac361f5b055 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexAbstractionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexAbstractionResolverTests.java @@ -13,15 +13,24 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.EmptySystemIndices; import org.elasticsearch.indices.InvalidIndexNameException; +import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import java.io.IOException; +import java.io.UncheckedIOException; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; +import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME; +import static org.elasticsearch.indices.SystemIndices.SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY; +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.either; @@ -72,11 +81,8 @@ public void testResolveIndexAbstractions() { expectThrows(IllegalArgumentException.class, () -> resolveAbstractionsSelectorNotAllowed(List.of("index1::data"))); // Selectors allowed, valid selector given, data selector stripped off in result since it is the default assertThat(resolveAbstractionsSelectorAllowed(List.of("index1::data")), contains("index1")); - // Selectors allowed, wildcard selector provided, data selector stripped off in result since it is the default - // ** only returns ::data since expression is an index - assertThat(resolveAbstractionsSelectorAllowed(List.of("index1::*")), contains("index1")); // Selectors allowed, invalid selector given - expectThrows(InvalidIndexNameException.class, () -> resolveAbstractionsSelectorAllowed(List.of("index1::custom"))); + expectThrows(InvalidIndexNameException.class, () -> resolveAbstractionsSelectorAllowed(List.of("index1::*"))); // == Single Date Math Expressions == @@ -116,7 +122,7 @@ public void testResolveIndexAbstractions() { assertThat(resolveAbstractionsSelectorAllowed(List.of("index*::data")), containsInAnyOrder("index1", "index2")); // Selectors allowed, wildcard selector provided, data selector stripped off in result since it is the default // ** only returns ::data since expression is an index - assertThat(resolveAbstractionsSelectorAllowed(List.of("index*::*")), containsInAnyOrder("index1", "index2")); + assertThat(resolveAbstractionsSelectorAllowed(List.of("index*")), containsInAnyOrder("index1", "index2")); // Selectors allowed, invalid selector given expectThrows(InvalidIndexNameException.class, () -> resolveAbstractionsSelectorAllowed(List.of("index*::custom"))); @@ -128,11 +134,9 @@ public void testResolveIndexAbstractions() { expectThrows(IllegalArgumentException.class, () -> resolveAbstractionsSelectorNotAllowed(List.of("data-stream1::data"))); // Selectors allowed, valid selector given assertThat(resolveAbstractionsSelectorAllowed(List.of("data-stream1::failures")), contains("data-stream1::failures")); - // Selectors allowed, wildcard selector provided - // ** returns both ::data and ::failures since expression is a data stream - // ** data selector stripped off in result since it is the default + // Selectors allowed, data selector is not added in result since it is the default assertThat( - resolveAbstractionsSelectorAllowed(List.of("data-stream1::*")), + resolveAbstractionsSelectorAllowed(List.of("data-stream1", "data-stream1::failures")), containsInAnyOrder("data-stream1", "data-stream1::failures") ); // Selectors allowed, invalid selector given @@ -146,10 +150,9 @@ public void testResolveIndexAbstractions() { expectThrows(IllegalArgumentException.class, () -> resolveAbstractionsSelectorNotAllowed(List.of("data-stream*::data"))); // Selectors allowed, valid selector given assertThat(resolveAbstractionsSelectorAllowed(List.of("data-stream*::failures")), contains("data-stream1::failures")); - // Selectors allowed, wildcard selector provided - // ** returns both ::data and ::failures since expression is a data stream + // Selectors allowed, both ::data and ::failures are returned assertThat( - resolveAbstractionsSelectorAllowed(List.of("data-stream*::*")), + resolveAbstractionsSelectorAllowed(List.of("data-stream*", "data-stream*::failures")), containsInAnyOrder("data-stream1", "data-stream1::failures") ); // Selectors allowed, invalid selector given @@ -170,7 +173,7 @@ public void testResolveIndexAbstractions() { // Selectors allowed, wildcard selector provided // ** returns both ::data and ::failures for applicable abstractions assertThat( - resolveAbstractionsSelectorAllowed(List.of("*::*")), + resolveAbstractionsSelectorAllowed(List.of("*", "*::failures")), containsInAnyOrder("index1", "index2", "data-stream1", "data-stream1::failures") ); // Selectors allowed, invalid selector given @@ -185,11 +188,11 @@ public void testResolveIndexAbstractions() { // Selectors allowed, wildcard selector provided // ** returns both ::data and ::failures for applicable abstractions // ** limits the returned values based on selectors - assertThat(resolveAbstractionsSelectorAllowed(List.of("*::*", "-*::data")), contains("data-stream1::failures")); + assertThat(resolveAbstractionsSelectorAllowed(List.of("*", "*::failures", "-*::data")), contains("data-stream1::failures")); // Selectors allowed, wildcard selector provided // ** limits the returned values based on selectors assertThat( - resolveAbstractionsSelectorAllowed(List.of("*::*", "-*::failures")), + resolveAbstractionsSelectorAllowed(List.of("*", "*::failures", "-*::failures")), containsInAnyOrder("index1", "index2", "data-stream1") ); // Selectors allowed, none given, default to both selectors @@ -220,13 +223,78 @@ private boolean isIndexVisible(String index, String selector) { "*", selector, index, - IndicesOptions.strictExpandOpen(), + IndicesOptions.strictExpandHidden(), metadata, indexNameExpressionResolver, true ); } + public void testIsNetNewSystemIndexVisible() { + final Settings settings = Settings.builder() + .put("index.number_of_replicas", 0) + .put("index.number_of_shards", 1) + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .build(); + + final Settings hiddenSettings = Settings.builder().put(settings).put("index.hidden", true).build(); + + final IndexMetadata foo = IndexMetadata.builder(".foo").settings(hiddenSettings).system(true).build(); + final IndexMetadata barReindexed = IndexMetadata.builder(".bar-reindexed") + .settings(hiddenSettings) + .system(true) + .putAlias(AliasMetadata.builder(".bar").isHidden(true).build()) + .build(); + final IndexMetadata other = IndexMetadata.builder("other").settings(settings).build(); + + final SystemIndexDescriptor fooDescriptor = SystemIndexDescriptor.builder() + .setDescription("foo indices") + .setOrigin("foo origin") + .setPrimaryIndex(".foo") + .setIndexPattern(".foo*") + .setSettings(settings) + .setMappings(mappings()) + .setNetNew() + .build(); + final SystemIndexDescriptor barDescriptor = SystemIndexDescriptor.builder() + .setDescription("bar indices") + .setOrigin("bar origin") + .setPrimaryIndex(".bar") + .setIndexPattern(".bar*") + .setSettings(settings) + .setMappings(mappings()) + .setNetNew() + .build(); + final SystemIndices systemIndices = new SystemIndices( + List.of(new SystemIndices.Feature("name", "description", List.of(fooDescriptor, barDescriptor))) + ); + + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "false"); + indexNameExpressionResolver = new IndexNameExpressionResolver(threadContext, systemIndices); + indexAbstractionResolver = new IndexAbstractionResolver(indexNameExpressionResolver); + + metadata = Metadata.builder().put(foo, true).put(barReindexed, true).put(other, true).build(); + + assertThat(isIndexVisible("other", "*"), is(true)); + assertThat(isIndexVisible(".foo", "*"), is(false)); + assertThat(isIndexVisible(".bar", "*"), is(false)); + } + + private static XContentBuilder mappings() { + try (XContentBuilder builder = jsonBuilder()) { + return builder.startObject() + .startObject(SINGLE_MAPPING_NAME) + .startObject("_meta") + .field(SystemIndexDescriptor.VERSION_META_KEY, 0) + .endObject() + .endObject() + .endObject(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + private List resolveAbstractionsSelectorNotAllowed(List expressions) { return resolveAbstractions(expressions, IndicesOptions.strictExpandHiddenNoSelectors(), defaultMask); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index f5e4c3d8f2d09..293bdb2c53899 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -2765,10 +2765,27 @@ public void testDataStreamsWithFailureStore() { assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 2, epochMillis))); } + // Test default with an exact data stream name and include failures true + { + IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN_CLOSED_HIDDEN_FAILURE_NO_SELECTORS; + Index[] result = indexNameExpressionResolver.concreteIndices(state, indicesOptions, true, "my-data-stream"); + assertThat(result.length, equalTo(4)); + assertThat(result[0].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 1, epochMillis))); + assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 2, epochMillis))); + assertThat(result[2].getName(), equalTo(DataStream.getDefaultFailureStoreName(dataStreamName, 1, epochMillis))); + assertThat(result[3].getName(), equalTo(DataStream.getDefaultFailureStoreName(dataStreamName, 2, epochMillis))); + } + // Test explicit include failure store with an exact data stream name { IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN; - Index[] result = indexNameExpressionResolver.concreteIndices(state, indicesOptions, true, "my-data-stream::*"); + Index[] result = indexNameExpressionResolver.concreteIndices( + state, + indicesOptions, + true, + "my-data-stream::data", + "my-data-stream::failures" + ); assertThat(result.length, equalTo(4)); assertThat(result[0].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 1, epochMillis))); assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 2, epochMillis))); @@ -2784,7 +2801,7 @@ public void testDataStreamsWithFailureStore() { .build(); expectThrows( IllegalArgumentException.class, - () -> indexNameExpressionResolver.concreteIndices(state, indicesOptions, true, "my-data-stream::*") + () -> indexNameExpressionResolver.concreteIndices(state, indicesOptions, true, "my-data-stream::failures") ); } @@ -2813,6 +2830,26 @@ public void testDataStreamsWithFailureStore() { ); } + // Test default without any expressions and include failures + { + IndicesOptions indicesOptions = IndicesOptions.builder() + .gatekeeperOptions(IndicesOptions.GatekeeperOptions.builder().allowSelectors(false).includeFailureIndices(true).build()) + .build(); + Index[] result = indexNameExpressionResolver.concreteIndices(state, indicesOptions, true); + assertThat(result.length, equalTo(5)); + List indexNames = Arrays.stream(result).map(Index::getName).toList(); + assertThat( + indexNames, + containsInAnyOrder( + DataStream.getDefaultBackingIndexName(dataStreamName, 2, epochMillis), + DataStream.getDefaultBackingIndexName(dataStreamName, 1, epochMillis), + DataStream.getDefaultFailureStoreName(dataStreamName, 1, epochMillis), + DataStream.getDefaultFailureStoreName(dataStreamName, 2, epochMillis), + otherIndex.getIndex().getName() + ) + ); + } + // Test default with wildcard expression { IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN; @@ -2832,7 +2869,7 @@ public void testDataStreamsWithFailureStore() { // Test explicit include failure store with wildcard expression { IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN; - Index[] result = indexNameExpressionResolver.concreteIndices(state, indicesOptions, true, "my-*::*"); + Index[] result = indexNameExpressionResolver.concreteIndices(state, indicesOptions, true, "my-*::data", "my-*::failures"); assertThat(result.length, equalTo(5)); List indexNames = Arrays.stream(result).map(Index::getName).toList(); assertThat( @@ -3225,8 +3262,8 @@ public void testDataStreamsNames() { assertThat(streams, containsInAnyOrder(new ResolvedExpression(dataStream1, DATA), new ResolvedExpression(dataStream2, DATA))); assertThat(names, containsInAnyOrder(dataStream1, dataStream2)); - streams = indexNameExpressionResolver.dataStreams(state, IndicesOptions.lenientExpand(), "*foobar::*"); - names = indexNameExpressionResolver.dataStreamNames(state, IndicesOptions.lenientExpand(), "*foobar::*"); + streams = indexNameExpressionResolver.dataStreams(state, IndicesOptions.lenientExpand(), "*foobar::data", "*foobar::failures"); + names = indexNameExpressionResolver.dataStreamNames(state, IndicesOptions.lenientExpand(), "*foobar::data", "*foobar::failures"); assertThat( streams, containsInAnyOrder( diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/SelectorResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/SelectorResolverTests.java index 2bf34dcfd2a34..dd3876afd3c74 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/SelectorResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/SelectorResolverTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.test.ESTestCase; -import static org.elasticsearch.action.support.IndexComponentSelector.ALL_APPLICABLE; import static org.elasticsearch.action.support.IndexComponentSelector.DATA; import static org.elasticsearch.action.support.IndexComponentSelector.FAILURES; import static org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context; @@ -38,7 +37,6 @@ public void testResolveExpression() { assertThat(resolve(selectorsAllowed, "testXXX"), equalTo(new ResolvedExpression("testXXX", DATA))); assertThat(resolve(selectorsAllowed, "testXXX::data"), equalTo(new ResolvedExpression("testXXX", DATA))); assertThat(resolve(selectorsAllowed, "testXXX::failures"), equalTo(new ResolvedExpression("testXXX", FAILURES))); - assertThat(resolve(selectorsAllowed, "testXXX::*"), equalTo(new ResolvedExpression("testXXX", ALL_APPLICABLE))); // Disallow selectors (example: creating, modifying, or deleting indices/data streams/aliases). // Accepts standard expressions but throws when selectors are specified. @@ -47,7 +45,6 @@ public void testResolveExpression() { assertThat(resolve(noSelectors, "testXXX"), equalTo(new ResolvedExpression("testXXX"))); expectThrows(IllegalArgumentException.class, () -> resolve(noSelectors, "testXXX::data")); expectThrows(IllegalArgumentException.class, () -> resolve(noSelectors, "testXXX::failures")); - expectThrows(IllegalArgumentException.class, () -> resolve(noSelectors, "testXXX::*")); // === Errors // Only recognized components can be selected @@ -116,9 +113,7 @@ public void testCombineExpressionWithSelector() { assertThat(IndexNameExpressionResolver.combineSelectorExpression("a", null), is(equalTo("a"))); assertThat(IndexNameExpressionResolver.combineSelectorExpression("a", ""), is(equalTo("a::"))); assertThat(IndexNameExpressionResolver.combineSelectorExpression("a", "b"), is(equalTo("a::b"))); - assertThat(IndexNameExpressionResolver.combineSelectorExpression("a", "*"), is(equalTo("a::*"))); assertThat(IndexNameExpressionResolver.combineSelectorExpression("*", "b"), is(equalTo("*::b"))); - assertThat(IndexNameExpressionResolver.combineSelectorExpression("*", "*"), is(equalTo("*::*"))); } public void testHasSelectorSuffix() { @@ -151,14 +146,14 @@ public void testSplitSelectorExpression() { assertThat(IndexNameExpressionResolver.splitSelectorExpression("a::data"), is(equalTo(new Tuple<>("a", "data")))); assertThat(IndexNameExpressionResolver.splitSelectorExpression("a::failures"), is(equalTo(new Tuple<>("a", "failures")))); - assertThat(IndexNameExpressionResolver.splitSelectorExpression("a::*"), is(equalTo(new Tuple<>("a", "*")))); + expectThrows(InvalidIndexNameException.class, () -> IndexNameExpressionResolver.splitSelectorExpression("a::*")); expectThrows(InvalidIndexNameException.class, () -> IndexNameExpressionResolver.splitSelectorExpression("a::random")); expectThrows(InvalidIndexNameException.class, () -> IndexNameExpressionResolver.splitSelectorExpression("a::d*ta")); expectThrows(InvalidIndexNameException.class, () -> IndexNameExpressionResolver.splitSelectorExpression("a::*ailures")); expectThrows(InvalidIndexNameException.class, () -> IndexNameExpressionResolver.splitSelectorExpression("a::")); expectThrows(InvalidIndexNameException.class, () -> IndexNameExpressionResolver.splitSelectorExpression("a::**")); expectThrows(InvalidIndexNameException.class, () -> IndexNameExpressionResolver.splitSelectorExpression("index::data::*")); - assertThat(IndexNameExpressionResolver.splitSelectorExpression("::*"), is(equalTo(new Tuple<>("", "*")))); + expectThrows(InvalidIndexNameException.class, () -> IndexNameExpressionResolver.splitSelectorExpression("::*")); } private static IndicesOptions getOptionsForSelectors() { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index 9d9a5ebd37218..1eeaef473521d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -25,7 +25,6 @@ import java.util.function.Predicate; import java.util.stream.Collectors; -import static org.elasticsearch.action.support.IndexComponentSelector.ALL_APPLICABLE; import static org.elasticsearch.action.support.IndexComponentSelector.DATA; import static org.elasticsearch.action.support.IndexComponentSelector.FAILURES; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; @@ -54,19 +53,19 @@ public void testConvertWildcardsJustIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "ku*", ALL_APPLICABLE)), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "ku*", DATA)), equalTo(resolvedExpressionsSet("kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*", ALL_APPLICABLE)), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*", DATA)), equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*", ALL_APPLICABLE)), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*", DATA)), equalTo(resolvedExpressionsSet("testXXX", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*", ALL_APPLICABLE)), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*", DATA)), equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY", "kuku")) ); } @@ -87,7 +86,7 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*", ALL_APPLICABLE)), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*", DATA)), equalTo(resolvedExpressionsSet("testXXX", "testXXY", "testXYY")) ); context = new IndexNameExpressionResolver.Context( @@ -96,7 +95,7 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*", ALL_APPLICABLE)), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*", DATA)), equalTo(resolvedExpressionsSet("testXYY")) ); context = new IndexNameExpressionResolver.Context( @@ -105,7 +104,7 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*", ALL_APPLICABLE)), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*", DATA)), equalTo(resolvedExpressionsSet("testXXX", "testXXY")) ); } @@ -128,31 +127,27 @@ public void testMultipleWildcards() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*X*", ALL_APPLICABLE)), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*X*", DATA)), equalTo(resolvedExpressionsSet("testXXX", "testXXY", "testXYY")) ); assertThat( - newHashSet( - IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*X*Y", ALL_APPLICABLE) - ), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*X*Y", DATA)), equalTo(resolvedExpressionsSet("testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "kuku*Y*", ALL_APPLICABLE)), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "kuku*Y*", DATA)), equalTo(resolvedExpressionsSet("kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*Y*", ALL_APPLICABLE)), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*Y*", DATA)), equalTo(resolvedExpressionsSet("testXXY", "testXYY", "testYYY", "kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*Y*X", ALL_APPLICABLE)) - .size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*Y*X", DATA)).size(), equalTo(0) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*Y*X", ALL_APPLICABLE)) - .size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*Y*X", DATA)).size(), equalTo(0) ); } @@ -171,7 +166,7 @@ public void testAll() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context, ALL_APPLICABLE)), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context, DATA)), equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) ); } @@ -189,7 +184,7 @@ public void testAll() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context, null)), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context, DATA)), equalTo(resolvedExpressionsNoSelectorSet("testXXX", "testXYY", "testYYY")) ); } @@ -212,10 +207,7 @@ public void testAllAliases() { IndicesOptions.lenientExpandOpen(), // don't include hidden SystemIndexAccessLevel.NONE ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context, ALL_APPLICABLE)), - equalTo(newHashSet()) - ); + assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context, DATA)), equalTo(newHashSet())); } { @@ -235,7 +227,7 @@ public void testAllAliases() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context, ALL_APPLICABLE)), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context, DATA)), equalTo(resolvedExpressionsSet("index-visible-alias")) ); } @@ -290,13 +282,8 @@ public void testAllDataStreams() { equalTo(resolvedExpressionsSet(DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis))) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context, ALL_APPLICABLE)), - equalTo( - resolvedExpressionsSet( - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultFailureStoreName("foo_logs", 1, epochMillis) - ) - ) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context, FAILURES)), + equalTo(resolvedExpressionsSet(DataStream.getDefaultFailureStoreName("foo_logs", 1, epochMillis))) ); } @@ -328,10 +315,7 @@ public void testAllDataStreams() { ); assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context, DATA)), equalTo(Set.of())); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context, ALL_APPLICABLE)), - equalTo(Set.of()) - ); + assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context, FAILURES)), equalTo(Set.of())); } } @@ -455,7 +439,7 @@ public void testResolveAliases() { Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, "foo_a*", - ALL_APPLICABLE + DATA ); assertThat(indices, containsInAnyOrder(new ResolvedExpression("foo_index", DATA), new ResolvedExpression("bar_index", DATA))); } @@ -463,7 +447,7 @@ public void testResolveAliases() { Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesLenientContext, "foo_a*", - ALL_APPLICABLE + DATA ); assertEquals(0, indices.size()); } @@ -471,7 +455,7 @@ public void testResolveAliases() { Set indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesStrictContext, "foo_a*", - ALL_APPLICABLE + DATA ); assertThat(indices, empty()); } @@ -479,7 +463,7 @@ public void testResolveAliases() { Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, "foo*", - ALL_APPLICABLE + DATA ); assertThat( indices, @@ -494,7 +478,7 @@ public void testResolveAliases() { Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesLenientContext, "foo*", - ALL_APPLICABLE + DATA ); assertThat(indices, containsInAnyOrder(new ResolvedExpression("foo_foo", DATA), new ResolvedExpression("foo_index", DATA))); } @@ -502,7 +486,7 @@ public void testResolveAliases() { Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesStrictContext, "foo*", - ALL_APPLICABLE + DATA ); assertThat(indices, containsInAnyOrder(new ResolvedExpression("foo_foo", DATA), new ResolvedExpression("foo_index", DATA))); } @@ -556,7 +540,7 @@ public void testResolveDataStreams() { Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, "foo_*", - ALL_APPLICABLE + DATA ); assertThat( indices, @@ -571,7 +555,7 @@ public void testResolveDataStreams() { indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, "bar_*", - ALL_APPLICABLE + DATA ); assertThat(indices, containsInAnyOrder(new ResolvedExpression("bar_bar", DATA), new ResolvedExpression("bar_index", DATA))); } @@ -602,7 +586,7 @@ public void testResolveDataStreams() { Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesAndDataStreamsContext, "foo_*", - ALL_APPLICABLE + DATA ); assertThat( indices, @@ -611,9 +595,7 @@ public void testResolveDataStreams() { new ResolvedExpression("bar_index", DATA), new ResolvedExpression("foo_foo", DATA), new ResolvedExpression(DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), DATA), - new ResolvedExpression(DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis), DATA), - new ResolvedExpression(DataStream.getDefaultFailureStoreName("foo_logs", 1, epochMillis), DATA), - new ResolvedExpression(DataStream.getDefaultFailureStoreName("foo_logs", 2, epochMillis), DATA) + new ResolvedExpression(DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis), DATA) ) ); @@ -632,26 +614,6 @@ public void testResolveDataStreams() { ) ) ); - - // include all wildcard adds the data stream's backing indices - indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( - indicesAliasesAndDataStreamsContext, - "*", - ALL_APPLICABLE - ); - assertThat( - indices, - containsInAnyOrder( - new ResolvedExpression("foo_index", DATA), - new ResolvedExpression("bar_index", DATA), - new ResolvedExpression("foo_foo", DATA), - new ResolvedExpression("bar_bar", DATA), - new ResolvedExpression(DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), DATA), - new ResolvedExpression(DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis), DATA), - new ResolvedExpression(DataStream.getDefaultFailureStoreName("foo_logs", 1, epochMillis), DATA), - new ResolvedExpression(DataStream.getDefaultFailureStoreName("foo_logs", 2, epochMillis), DATA) - ) - ); } { @@ -681,7 +643,7 @@ public void testResolveDataStreams() { Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesDataStreamsAndHiddenIndices, "foo_*", - ALL_APPLICABLE + DATA ); assertThat( indices, @@ -690,9 +652,7 @@ public void testResolveDataStreams() { new ResolvedExpression("bar_index", DATA), new ResolvedExpression("foo_foo", DATA), new ResolvedExpression(DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), DATA), - new ResolvedExpression(DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis), DATA), - new ResolvedExpression(DataStream.getDefaultFailureStoreName("foo_logs", 1, epochMillis), DATA), - new ResolvedExpression(DataStream.getDefaultFailureStoreName("foo_logs", 2, epochMillis), DATA) + new ResolvedExpression(DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis), DATA) ) ); @@ -712,32 +672,11 @@ public void testResolveDataStreams() { ) ); - // Resolve both backing and failure indices - indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( - indicesAliasesDataStreamsAndHiddenIndices, - "foo_*", - ALL_APPLICABLE - ); - assertThat( - newHashSet(indices), - equalTo( - resolvedExpressionsSet( - "foo_index", - "bar_index", - "foo_foo", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis), - DataStream.getDefaultFailureStoreName("foo_logs", 1, epochMillis), - DataStream.getDefaultFailureStoreName("foo_logs", 2, epochMillis) - ) - ) - ); - // include all wildcard adds the data stream's backing indices indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesDataStreamsAndHiddenIndices, "*", - ALL_APPLICABLE + DATA ); assertThat( newHashSet(indices), @@ -770,28 +709,6 @@ public void testResolveDataStreams() { ) ) ); - - // include all wildcard adds the data stream's backing and failure indices - indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( - indicesAliasesDataStreamsAndHiddenIndices, - "*", - ALL_APPLICABLE - ); - assertThat( - newHashSet(indices), - equalTo( - resolvedExpressionsSet( - "foo_index", - "bar_index", - "foo_foo", - "bar_bar", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis), - DataStream.getDefaultFailureStoreName("foo_logs", 1, epochMillis), - DataStream.getDefaultFailureStoreName("foo_logs", 2, epochMillis) - ) - ) - ); } } @@ -824,7 +741,7 @@ public void testMatchesConcreteIndicesWildcardAndAliases() { Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, "*", - ALL_APPLICABLE + DATA ); assertThat( matches, @@ -835,7 +752,7 @@ public void testMatchesConcreteIndicesWildcardAndAliases() { new ResolvedExpression("bar_index", DATA) ) ); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(onlyIndicesContext, "*", ALL_APPLICABLE); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(onlyIndicesContext, "*", DATA); assertThat( matches, containsInAnyOrder( @@ -845,11 +762,7 @@ public void testMatchesConcreteIndicesWildcardAndAliases() { new ResolvedExpression("bar_index", DATA) ) ); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( - indicesAndAliasesContext, - "foo*", - ALL_APPLICABLE - ); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(indicesAndAliasesContext, "foo*", DATA); assertThat( matches, containsInAnyOrder( @@ -858,11 +771,7 @@ public void testMatchesConcreteIndicesWildcardAndAliases() { new ResolvedExpression("bar_index", DATA) ) ); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( - onlyIndicesContext, - "foo*", - ALL_APPLICABLE - ); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(onlyIndicesContext, "foo*", DATA); assertThat(matches, containsInAnyOrder(new ResolvedExpression("foo_foo", DATA), new ResolvedExpression("foo_index", DATA))); } diff --git a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java index a91cef576df33..744a12d5ab6e0 100644 --- a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java @@ -31,8 +31,6 @@ import static java.util.Collections.emptySet; import static org.elasticsearch.test.NodeRoles.nonRemoteClusterClientNode; import static org.elasticsearch.test.NodeRoles.remoteClusterClientNode; -import static org.elasticsearch.test.TransportVersionUtils.getPreviousVersion; -import static org.elasticsearch.test.TransportVersionUtils.randomVersionBetween; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -274,39 +272,5 @@ public void testDiscoveryNodeMinReadOnlyVersionSerialization() throws Exception } } } - - { - var oldVersion = randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - getPreviousVersion(TransportVersions.NODE_VERSION_INFORMATION_WITH_MIN_READ_ONLY_INDEX_VERSION) - ); - try (var out = new BytesStreamOutput()) { - out.setTransportVersion(oldVersion); - node.writeTo(out); - - try (var in = StreamInput.wrap(out.bytes().array())) { - in.setTransportVersion(oldVersion); - - var deserialized = new DiscoveryNode(in); - assertThat(deserialized.getId(), equalTo(node.getId())); - assertThat(deserialized.getAddress(), equalTo(node.getAddress())); - assertThat(deserialized.getMinIndexVersion(), equalTo(node.getMinIndexVersion())); - assertThat(deserialized.getMaxIndexVersion(), equalTo(node.getMaxIndexVersion())); - assertThat(deserialized.getMinReadOnlyIndexVersion(), equalTo(node.getMinIndexVersion())); - assertThat( - deserialized.getVersionInformation(), - equalTo( - new VersionInformation( - node.getBuildVersion(), - node.getMinIndexVersion(), - node.getMinIndexVersion(), - node.getMaxIndexVersion() - ) - ) - ); - } - } - } } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java deleted file mode 100644 index 8e111c3676284..0000000000000 --- a/server/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.routing; - -import org.apache.lucene.util.CollectionUtil; -import org.elasticsearch.action.OriginalIndicesTests; -import org.elasticsearch.action.search.SearchShardIterator; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.test.ESTestCase; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; - -public class GroupShardsIteratorTests extends ESTestCase { - - public static List randomShardRoutings(ShardId shardId) { - return randomShardRoutings(shardId, randomIntBetween(0, 2)); - } - - private static List randomShardRoutings(ShardId shardId, int numReplicas) { - List shardRoutings = new ArrayList<>(); - shardRoutings.add(TestShardRouting.newShardRouting(shardId, randomAlphaOfLengthBetween(5, 10), true, STARTED)); - for (int j = 0; j < numReplicas; j++) { - shardRoutings.add(TestShardRouting.newShardRouting(shardId, randomAlphaOfLengthBetween(5, 10), false, STARTED)); - } - return shardRoutings; - } - - public void testSize() { - List list = new ArrayList<>(); - Index index = new Index("foo", "na"); - { - ShardId shardId = new ShardId(index, 0); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId, 2))); - } - list.add(new PlainShardIterator(new ShardId(index, 1), Collections.emptyList())); - { - ShardId shardId = new ShardId(index, 2); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId, 0))); - } - index = new Index("foo_1", "na"); - { - ShardId shardId = new ShardId(index, 0); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId, 0))); - } - { - ShardId shardId = new ShardId(index, 1); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId, 0))); - } - GroupShardsIterator iter = new GroupShardsIterator<>(list); - assertEquals(7, iter.totalSizeWith1ForEmpty()); - assertEquals(5, iter.size()); - assertEquals(6, iter.totalSize()); - } - - public void testIterate() { - List list = new ArrayList<>(); - Index index = new Index("foo", "na"); - { - ShardId shardId = new ShardId(index, 0); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId))); - } - list.add(new PlainShardIterator(new ShardId(index, 1), Collections.emptyList())); - { - ShardId shardId = new ShardId(index, 2); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId))); - } - { - ShardId shardId = new ShardId(index, 0); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId))); - } - { - ShardId shardId = new ShardId(index, 1); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId))); - } - index = new Index("foo_2", "na"); - { - ShardId shardId = new ShardId(index, 0); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId))); - } - { - ShardId shardId = new ShardId(index, 1); - list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId))); - } - - Collections.shuffle(list, random()); - { - GroupShardsIterator unsorted = new GroupShardsIterator<>(list); - GroupShardsIterator iter = new GroupShardsIterator<>(list); - List actualIterators = new ArrayList<>(); - for (ShardIterator shardsIterator : iter) { - actualIterators.add(shardsIterator); - } - assertEquals(actualIterators, list); - } - { - GroupShardsIterator iter = GroupShardsIterator.sortAndCreate(list); - List actualIterators = new ArrayList<>(); - for (ShardIterator shardsIterator : iter) { - actualIterators.add(shardsIterator); - } - CollectionUtil.timSort(actualIterators); - assertEquals(actualIterators, list); - } - } - - public void testOrderingWithSearchShardIterators() { - String[] indices = generateRandomStringArray(10, 10, false, false); - Arrays.sort(indices); - String[] uuids = generateRandomStringArray(5, 10, false, false); - Arrays.sort(uuids); - String[] clusters = generateRandomStringArray(5, 10, false, false); - Arrays.sort(clusters); - - List sorted = new ArrayList<>(); - int numShards = randomIntBetween(1, 10); - for (int i = 0; i < numShards; i++) { - for (String index : indices) { - for (String uuid : uuids) { - ShardId shardId = new ShardId(index, uuid, i); - SearchShardIterator shardIterator = new SearchShardIterator( - null, - shardId, - GroupShardsIteratorTests.randomShardRoutings(shardId), - OriginalIndicesTests.randomOriginalIndices() - ); - sorted.add(shardIterator); - for (String cluster : clusters) { - SearchShardIterator remoteIterator = new SearchShardIterator( - cluster, - shardId, - GroupShardsIteratorTests.randomShardRoutings(shardId), - OriginalIndicesTests.randomOriginalIndices() - ); - sorted.add(remoteIterator); - } - } - } - } - - List shuffled = new ArrayList<>(sorted); - Collections.shuffle(shuffled, random()); - { - List actualIterators = new ArrayList<>(); - GroupShardsIterator iter = new GroupShardsIterator<>(shuffled); - for (SearchShardIterator searchShardIterator : iter) { - actualIterators.add(searchShardIterator); - } - assertEquals(shuffled, actualIterators); - } - { - List actualIterators = new ArrayList<>(); - GroupShardsIterator iter = GroupShardsIterator.sortAndCreate(shuffled); - for (SearchShardIterator searchShardIterator : iter) { - actualIterators.add(searchShardIterator); - } - assertEquals(sorted, actualIterators); - } - } -} diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java index 912326162e5c4..fd9a2a154d47f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java @@ -44,13 +44,13 @@ public void testReadyForSearch() { List.of(getShard(p2, true, ShardRoutingState.STARTED, ShardRouting.Role.DEFAULT)) ); IndexRoutingTable indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - assertTrue(indexRoutingTable.readyForSearch(clusterState)); + assertTrue(indexRoutingTable.readyForSearch()); // 2 primaries that are index only shardTable1 = new IndexShardRoutingTable(p1, List.of(getShard(p1, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); shardTable2 = new IndexShardRoutingTable(p2, List.of(getShard(p2, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - assertFalse(indexRoutingTable.readyForSearch(clusterState)); + assertFalse(indexRoutingTable.readyForSearch()); // 2 unassigned primaries that are index only shardTable1 = new IndexShardRoutingTable( @@ -62,7 +62,7 @@ public void testReadyForSearch() { List.of(getShard(p2, true, ShardRoutingState.UNASSIGNED, ShardRouting.Role.INDEX_ONLY)) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - assertFalse(indexRoutingTable.readyForSearch(clusterState)); + assertFalse(indexRoutingTable.readyForSearch()); // 2 primaries that are index only with replicas that are not all available shardTable1 = new IndexShardRoutingTable( @@ -82,7 +82,7 @@ public void testReadyForSearch() { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - assertFalse(indexRoutingTable.readyForSearch(clusterState)); + assertFalse(indexRoutingTable.readyForSearch()); // 2 primaries that are index only with some replicas that are all available shardTable1 = new IndexShardRoutingTable( @@ -102,7 +102,7 @@ public void testReadyForSearch() { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - assertTrue(indexRoutingTable.readyForSearch(clusterState)); + assertTrue(indexRoutingTable.readyForSearch()); // 2 unassigned primaries that are index only with some replicas that are all available shardTable1 = new IndexShardRoutingTable( @@ -122,7 +122,7 @@ public void testReadyForSearch() { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - assertTrue(indexRoutingTable.readyForSearch(clusterState)); + assertTrue(indexRoutingTable.readyForSearch()); // 2 primaries that are index only with at least 1 replica per primary that is available shardTable1 = new IndexShardRoutingTable( @@ -142,7 +142,7 @@ public void testReadyForSearch() { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - assertTrue(indexRoutingTable.readyForSearch(clusterState)); + assertTrue(indexRoutingTable.readyForSearch()); } private ShardRouting getShard(ShardId shardId, boolean isPrimary, ShardRoutingState state, ShardRouting.Role role) { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java index 6c244008097f2..7337e28da1dc9 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java @@ -162,7 +162,7 @@ public void testFairSessionIdPreferences() throws InterruptedException, IOExcept for (int i = 0; i < numRepeatedSearches; i++) { List searchedShards = new ArrayList<>(numShards); Set selectedNodes = Sets.newHashSetWithExpectedSize(numShards); - final GroupShardsIterator groupIterator = opRouting.searchShards(state, indexNames, null, sessionKey); + final List groupIterator = opRouting.searchShards(state, indexNames, null, sessionKey); assertThat("One group per index shard", groupIterator.size(), equalTo(numIndices * numShards)); for (ShardIterator shardIterator : groupIterator) { @@ -283,14 +283,7 @@ public void testARSRanking() throws Exception { TestThreadPool threadPool = new TestThreadPool("test"); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); ResponseCollectorService collector = new ResponseCollectorService(clusterService); - GroupShardsIterator groupIterator = opRouting.searchShards( - state, - indexNames, - null, - null, - collector, - new HashMap<>() - ); + List groupIterator = opRouting.searchShards(state, indexNames, null, null, collector, new HashMap<>()); assertThat("One group per index shard", groupIterator.size(), equalTo(numIndices * numShards)); @@ -369,14 +362,7 @@ public void testARSStatsAdjustment() throws Exception { ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); ResponseCollectorService collector = new ResponseCollectorService(clusterService); - GroupShardsIterator groupIterator = opRouting.searchShards( - state, - indexNames, - null, - null, - collector, - new HashMap<>() - ); + List groupIterator = opRouting.searchShards(state, indexNames, null, null, collector, new HashMap<>()); assertThat("One group per index shard", groupIterator.size(), equalTo(numIndices * numShards)); // We have two nodes, where the second has more load @@ -435,14 +421,7 @@ public void testARSOutstandingRequestTracking() throws Exception { Map outstandingRequests = new HashMap<>(); // Check that we choose to search over both nodes - GroupShardsIterator groupIterator = opRouting.searchShards( - state, - indexNames, - null, - null, - collector, - outstandingRequests - ); + List groupIterator = opRouting.searchShards(state, indexNames, null, null, collector, outstandingRequests); Set nodeIds = new HashSet<>(); nodeIds.add(groupIterator.get(0).nextOrNull().currentNodeId()); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java index 27caa5b53c96a..0001ad175f5c5 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java @@ -312,18 +312,21 @@ public void testAllAssignedShardsGrouped() { } public void testAllShardsForMultipleIndices() { - assertThat(this.emptyRoutingTable.allShards(new String[0]).size(), is(0)); + assertThat(this.emptyRoutingTable.allShards(new String[0]).getShardRoutings().size(), is(0)); - assertThat(clusterState.routingTable().allShards(new String[] { TEST_INDEX_1 }).size(), is(this.shardsPerIndex)); + assertThat(clusterState.routingTable().allShards(new String[] { TEST_INDEX_1 }).getShardRoutings().size(), is(this.shardsPerIndex)); initPrimaries(); - assertThat(clusterState.routingTable().allShards(new String[] { TEST_INDEX_1 }).size(), is(this.shardsPerIndex)); + assertThat(clusterState.routingTable().allShards(new String[] { TEST_INDEX_1 }).getShardRoutings().size(), is(this.shardsPerIndex)); startInitializingShards(TEST_INDEX_1); - assertThat(clusterState.routingTable().allShards(new String[] { TEST_INDEX_1 }).size(), is(this.shardsPerIndex)); + assertThat(clusterState.routingTable().allShards(new String[] { TEST_INDEX_1 }).getShardRoutings().size(), is(this.shardsPerIndex)); startInitializingShards(TEST_INDEX_2); - assertThat(clusterState.routingTable().allShards(new String[] { TEST_INDEX_1, TEST_INDEX_2 }).size(), is(this.totalNumberOfShards)); + assertThat( + clusterState.routingTable().allShards(new String[] { TEST_INDEX_1, TEST_INDEX_2 }).getShardRoutings().size(), + is(this.totalNumberOfShards) + ); try { clusterState.routingTable().allShards(new String[] { TEST_INDEX_1, "not_exists" }); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/PlainShardIteratorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/ShardIteratorTests.java similarity index 76% rename from server/src/test/java/org/elasticsearch/cluster/routing/PlainShardIteratorTests.java rename to server/src/test/java/org/elasticsearch/cluster/routing/ShardIteratorTests.java index 0d6cc3fad1e52..8666abf1292c8 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/PlainShardIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/ShardIteratorTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.routing; +import org.elasticsearch.action.search.SearchShardIteratorTests; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; @@ -18,12 +19,12 @@ import java.util.Arrays; import java.util.List; -public class PlainShardIteratorTests extends ESTestCase { +public class ShardIteratorTests extends ESTestCase { public void testEqualsAndHashCode() { EqualsHashCodeTestUtils.checkEqualsAndHashCode( randomPlainShardIterator(), - i -> new PlainShardIterator(i.shardId(), i.getShardRoutings()), + i -> new ShardIterator(i.shardId(), i.getShardRoutings()), i -> { ShardId shardId = switch (randomIntBetween(0, 2)) { case 0 -> new ShardId(i.shardId().getIndex(), i.shardId().getId() + randomIntBetween(1, 1000)); @@ -39,7 +40,7 @@ public void testEqualsAndHashCode() { ); default -> throw new UnsupportedOperationException(); }; - return new PlainShardIterator(shardId, i.getShardRoutings()); + return new ShardIterator(shardId, i.getShardRoutings()); } ); } @@ -49,26 +50,26 @@ public void testCompareTo() { Arrays.sort(indices); String[] uuids = generateRandomStringArray(3, 10, false, false); Arrays.sort(uuids); - List shardIterators = new ArrayList<>(); + List shardIterators = new ArrayList<>(); int numShards = randomIntBetween(1, 5); for (int i = 0; i < numShards; i++) { for (String index : indices) { for (String uuid : uuids) { ShardId shardId = new ShardId(index, uuid, i); - shardIterators.add(new PlainShardIterator(shardId, GroupShardsIteratorTests.randomShardRoutings(shardId))); + shardIterators.add(new ShardIterator(shardId, SearchShardIteratorTests.randomShardRoutings(shardId))); } } } for (int i = 0; i < shardIterators.size(); i++) { - PlainShardIterator currentIterator = shardIterators.get(i); + ShardIterator currentIterator = shardIterators.get(i); for (int j = i + 1; j < shardIterators.size(); j++) { - PlainShardIterator greaterIterator = shardIterators.get(j); + ShardIterator greaterIterator = shardIterators.get(j); assertThat(currentIterator, Matchers.lessThan(greaterIterator)); assertThat(greaterIterator, Matchers.greaterThan(currentIterator)); assertNotEquals(currentIterator, greaterIterator); } for (int j = i - 1; j >= 0; j--) { - PlainShardIterator smallerIterator = shardIterators.get(j); + ShardIterator smallerIterator = shardIterators.get(j); assertThat(smallerIterator, Matchers.lessThan(currentIterator)); assertThat(currentIterator, Matchers.greaterThan(smallerIterator)); assertNotEquals(currentIterator, smallerIterator); @@ -77,15 +78,15 @@ public void testCompareTo() { } public void testCompareToEqualItems() { - PlainShardIterator shardIterator1 = randomPlainShardIterator(); - PlainShardIterator shardIterator2 = new PlainShardIterator(shardIterator1.shardId(), shardIterator1.getShardRoutings()); + ShardIterator shardIterator1 = randomPlainShardIterator(); + ShardIterator shardIterator2 = new ShardIterator(shardIterator1.shardId(), shardIterator1.getShardRoutings()); assertEquals(shardIterator1, shardIterator2); assertEquals(0, shardIterator1.compareTo(shardIterator2)); assertEquals(0, shardIterator2.compareTo(shardIterator1)); } - private static PlainShardIterator randomPlainShardIterator() { + private static ShardIterator randomPlainShardIterator() { ShardId shardId = new ShardId(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLength(10), randomIntBetween(1, Integer.MAX_VALUE)); - return new PlainShardIterator(shardId, GroupShardsIteratorTests.randomShardRoutings(shardId)); + return new ShardIterator(shardId, SearchShardIteratorTests.randomShardRoutings(shardId)); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryServiceTests.java new file mode 100644 index 0000000000000..337fad01f905b --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryServiceTests.java @@ -0,0 +1,256 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.routing.allocation.allocator; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLog; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.Before; + +public class AllocationBalancingRoundSummaryServiceTests extends ESTestCase { + private static final Logger logger = LogManager.getLogger(AllocationBalancingRoundSummaryServiceTests.class); + + private static final String BALANCING_SUMMARY_MSG_PREFIX = "Balancing round summaries:*"; + + final Settings enabledSummariesSettings = Settings.builder() + .put(AllocationBalancingRoundSummaryService.ENABLE_BALANCER_ROUND_SUMMARIES_SETTING.getKey(), true) + .build(); + final Settings disabledDefaultEmptySettings = Settings.builder().build(); + final Settings enabledButNegativeIntervalSettings = Settings.builder() + .put(AllocationBalancingRoundSummaryService.ENABLE_BALANCER_ROUND_SUMMARIES_SETTING.getKey(), true) + .put(AllocationBalancingRoundSummaryService.BALANCER_ROUND_SUMMARIES_LOG_INTERVAL_SETTING.getKey(), TimeValue.MINUS_ONE) + .build(); + + ClusterSettings enabledClusterSettings = new ClusterSettings(enabledSummariesSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + ClusterSettings disabledDefaultEmptyClusterSettings = new ClusterSettings( + disabledDefaultEmptySettings, + ClusterSettings.BUILT_IN_CLUSTER_SETTINGS + ); + ClusterSettings enabledButNegativeIntervalClusterSettings = new ClusterSettings( + enabledButNegativeIntervalSettings, + ClusterSettings.BUILT_IN_CLUSTER_SETTINGS + ); + + // Construction parameters for the service. + + DeterministicTaskQueue deterministicTaskQueue; + ThreadPool testThreadPool; + + @Before + public void setUpThreadPool() { + deterministicTaskQueue = new DeterministicTaskQueue(); + testThreadPool = deterministicTaskQueue.getThreadPool(); + } + + /** + * Test that the service is disabled and no logging occurs when + * {@link AllocationBalancingRoundSummaryService#ENABLE_BALANCER_ROUND_SUMMARIES_SETTING} defaults to false. + */ + public void testServiceDisabledByDefault() { + var service = new AllocationBalancingRoundSummaryService(testThreadPool, disabledDefaultEmptyClusterSettings); + + try (var mockLog = MockLog.capture(AllocationBalancingRoundSummaryService.class)) { + /** + * Add a summary and check it is not logged. + */ + + service.addBalancerRoundSummary(new BalancingRoundSummary(50)); + service.verifyNumberOfSummaries(0); // when summaries are disabled, summaries are not retained when added. + mockLog.addExpectation( + new MockLog.UnseenEventExpectation( + "Running balancer summary logging", + AllocationBalancingRoundSummaryService.class.getName(), + Level.INFO, + "*" + ) + ); + + if (deterministicTaskQueue.hasDeferredTasks()) { + deterministicTaskQueue.advanceTime(); + } + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + service.verifyNumberOfSummaries(0); + } + } + + public void testEnabledService() { + var service = new AllocationBalancingRoundSummaryService(testThreadPool, enabledClusterSettings); + + try (var mockLog = MockLog.capture(AllocationBalancingRoundSummaryService.class)) { + /** + * Add a summary and check the service logs a report on it. + */ + + service.addBalancerRoundSummary(new BalancingRoundSummary(50)); + service.verifyNumberOfSummaries(1); + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "Running balancer summary logging", + AllocationBalancingRoundSummaryService.class.getName(), + Level.INFO, + BALANCING_SUMMARY_MSG_PREFIX + ) + ); + + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + service.verifyNumberOfSummaries(0); + + /** + * Add a second summary, check for more logging. + */ + + service.addBalancerRoundSummary(new BalancingRoundSummary(200)); + service.verifyNumberOfSummaries(1); + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "Running balancer summary logging a second time", + AllocationBalancingRoundSummaryService.class.getName(), + Level.INFO, + BALANCING_SUMMARY_MSG_PREFIX + ) + ); + + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + service.verifyNumberOfSummaries(0); + } + } + + /** + * The service should combine multiple summaries together into a single report when multiple summaries were added since the last report. + */ + public void testCombinedSummary() { + var service = new AllocationBalancingRoundSummaryService(testThreadPool, enabledClusterSettings); + + try (var mockLog = MockLog.capture(AllocationBalancingRoundSummaryService.class)) { + service.addBalancerRoundSummary(new BalancingRoundSummary(50)); + service.addBalancerRoundSummary(new BalancingRoundSummary(100)); + service.verifyNumberOfSummaries(2); + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "Running balancer summary logging of combined summaries", + AllocationBalancingRoundSummaryService.class.getName(), + Level.INFO, + "*150*" + ) + ); + + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + service.verifyNumberOfSummaries(0); + } + } + + /** + * The service shouldn't log anything when there haven't been any summaries added since the last report. + */ + public void testNoSummariesToReport() { + var service = new AllocationBalancingRoundSummaryService(testThreadPool, enabledClusterSettings); + + try (var mockLog = MockLog.capture(AllocationBalancingRoundSummaryService.class)) { + /** + * First add some summaries to report, ensuring that the logging is active. + */ + + service.addBalancerRoundSummary(new BalancingRoundSummary(50)); + service.verifyNumberOfSummaries(1); + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "Running balancer summary logging of combined summaries", + AllocationBalancingRoundSummaryService.class.getName(), + Level.INFO, + BALANCING_SUMMARY_MSG_PREFIX + ) + ); + + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + service.verifyNumberOfSummaries(0); + + /** + * Now check that there are no further log messages because there were no further summaries added. + */ + + mockLog.addExpectation( + new MockLog.UnseenEventExpectation( + "No balancer round summary to log", + AllocationBalancingRoundSummaryService.class.getName(), + Level.INFO, + "*" + ) + ); + + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + service.verifyNumberOfSummaries(0); + } + } + + /** + * Test that the service is disabled by setting {@link AllocationBalancingRoundSummaryService#ENABLE_BALANCER_ROUND_SUMMARIES_SETTING} + * to false. + */ + public void testEnableAndThenDisableService() { + var disabledSettingsUpdate = Settings.builder() + .put(AllocationBalancingRoundSummaryService.ENABLE_BALANCER_ROUND_SUMMARIES_SETTING.getKey(), false) + .build(); + ClusterSettings clusterSettings = new ClusterSettings(enabledSummariesSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + var service = new AllocationBalancingRoundSummaryService(testThreadPool, clusterSettings); + + try (var mockLog = MockLog.capture(AllocationBalancingRoundSummaryService.class)) { + /** + * Add some summaries, but then disable the service before logging occurs. Disabling the service should drain and discard any + * summaries waiting to be reported. + */ + + service.addBalancerRoundSummary(new BalancingRoundSummary(50)); + service.verifyNumberOfSummaries(1); + + clusterSettings.applySettings(disabledSettingsUpdate); + service.verifyNumberOfSummaries(0); + + /** + * Verify that any additional summaries are not retained, since the service is disabled. + */ + + service.addBalancerRoundSummary(new BalancingRoundSummary(50)); + service.verifyNumberOfSummaries(0); + + // Check that the service never logged anything. + mockLog.addExpectation( + new MockLog.UnseenEventExpectation( + "Running balancer summary logging", + AllocationBalancingRoundSummaryService.class.getName(), + Level.INFO, + "*" + ) + ); + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + service.verifyNumberOfSummaries(0); + } + } + +} diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java index cd94c87bb4b57..81aa1a60eb45e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java @@ -1212,12 +1212,7 @@ public void testRebalanceDoesNotCauseHotSpots() { new ConcurrentRebalanceAllocationDecider(clusterSettings), new ThrottlingAllocationDecider(clusterSettings) }; - var reconciler = new DesiredBalanceReconciler( - clusterSettings, - new DeterministicTaskQueue().getThreadPool(), - DesiredBalanceMetrics.NOOP, - EMPTY_NODE_ALLOCATION_STATS - ); + var reconciler = new DesiredBalanceReconciler(clusterSettings, new DeterministicTaskQueue().getThreadPool()); var totalOutgoingMoves = new HashMap(); for (int i = 0; i < numberOfNodes; i++) { @@ -1299,12 +1294,7 @@ public void testShouldLogOnTooManyUndesiredAllocations() { final var timeInMillisSupplier = new AtomicLong(); when(threadPool.relativeTimeInMillisSupplier()).thenReturn(timeInMillisSupplier::incrementAndGet); - var reconciler = new DesiredBalanceReconciler( - createBuiltInClusterSettings(), - threadPool, - DesiredBalanceMetrics.NOOP, - EMPTY_NODE_ALLOCATION_STATS - ); + var reconciler = new DesiredBalanceReconciler(createBuiltInClusterSettings(), threadPool); final long initialDelayInMillis = TimeValue.timeValueMinutes(5).getMillis(); timeInMillisSupplier.addAndGet(randomLongBetween(initialDelayInMillis, 2 * initialDelayInMillis)); @@ -1356,8 +1346,7 @@ public void testShouldLogOnTooManyUndesiredAllocations() { private static void reconcile(RoutingAllocation routingAllocation, DesiredBalance desiredBalance) { final var threadPool = mock(ThreadPool.class); when(threadPool.relativeTimeInMillisSupplier()).thenReturn(new AtomicLong()::incrementAndGet); - new DesiredBalanceReconciler(createBuiltInClusterSettings(), threadPool, DesiredBalanceMetrics.NOOP, EMPTY_NODE_ALLOCATION_STATS) - .reconcile(desiredBalance, routingAllocation); + new DesiredBalanceReconciler(createBuiltInClusterSettings(), threadPool).reconcile(desiredBalance, routingAllocation); } private static boolean isReconciled(RoutingNode node, DesiredBalance balance) { diff --git a/server/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java b/server/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java index 2d9055f718083..75f9a89764392 100644 --- a/server/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java @@ -17,9 +17,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.OperationRouting; -import org.elasticsearch.cluster.routing.PlainShardIterator; import org.elasticsearch.cluster.routing.RotationShardShuffler; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardIterator; @@ -51,7 +49,7 @@ public class RoutingIteratorTests extends ESAllocationTestCase { public void testEmptyIterator() { ShardShuffler shuffler = new RotationShardShuffler(0); - ShardIterator shardIterator = new PlainShardIterator( + ShardIterator shardIterator = new ShardIterator( new ShardId("test1", "_na_", 0), shuffler.shuffle(Collections.emptyList()) ); @@ -61,21 +59,21 @@ public void testEmptyIterator() { assertThat(shardIterator.nextOrNull(), nullValue()); assertThat(shardIterator.remaining(), equalTo(0)); - shardIterator = new PlainShardIterator(new ShardId("test1", "_na_", 0), shuffler.shuffle(Collections.emptyList())); + shardIterator = new ShardIterator(new ShardId("test1", "_na_", 0), shuffler.shuffle(Collections.emptyList())); assertThat(shardIterator.remaining(), equalTo(0)); assertThat(shardIterator.nextOrNull(), nullValue()); assertThat(shardIterator.remaining(), equalTo(0)); assertThat(shardIterator.nextOrNull(), nullValue()); assertThat(shardIterator.remaining(), equalTo(0)); - shardIterator = new PlainShardIterator(new ShardId("test1", "_na_", 0), shuffler.shuffle(Collections.emptyList())); + shardIterator = new ShardIterator(new ShardId("test1", "_na_", 0), shuffler.shuffle(Collections.emptyList())); assertThat(shardIterator.remaining(), equalTo(0)); assertThat(shardIterator.nextOrNull(), nullValue()); assertThat(shardIterator.remaining(), equalTo(0)); assertThat(shardIterator.nextOrNull(), nullValue()); assertThat(shardIterator.remaining(), equalTo(0)); - shardIterator = new PlainShardIterator(new ShardId("test1", "_na_", 0), shuffler.shuffle(Collections.emptyList())); + shardIterator = new ShardIterator(new ShardId("test1", "_na_", 0), shuffler.shuffle(Collections.emptyList())); assertThat(shardIterator.remaining(), equalTo(0)); assertThat(shardIterator.nextOrNull(), nullValue()); assertThat(shardIterator.remaining(), equalTo(0)); @@ -357,12 +355,7 @@ public void testShardsAndPreferNodeRouting() { new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) ); - GroupShardsIterator shardIterators = operationRouting.searchShards( - clusterState, - new String[] { "test" }, - null, - "_shards:0" - ); + List shardIterators = operationRouting.searchShards(clusterState, new String[] { "test" }, null, "_shards:0"); assertThat(shardIterators.size(), equalTo(1)); assertThat(shardIterators.iterator().next().shardId().id(), equalTo(0)); diff --git a/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java b/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java index ebf1064c2ae3f..77ae472065b08 100644 --- a/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java @@ -104,7 +104,7 @@ public void setUp() throws Exception { env = newEnvironment(Settings.EMPTY); - Files.createDirectories(env.configFile()); + Files.createDirectories(env.configDir()); fileWatchingService = new TestFileWatchingService(getWatchedFilePath(env)); } @@ -203,7 +203,7 @@ private void writeTestFile(Path path, String contents) throws IOException { } private static Path getWatchedFilePath(Environment env) { - return env.configFile().toAbsolutePath().resolve("test").resolve("test.json"); + return env.configDir().toAbsolutePath().resolve("test").resolve("test.json"); } } diff --git a/server/src/test/java/org/elasticsearch/common/settings/LocallyMountedSecretsTests.java b/server/src/test/java/org/elasticsearch/common/settings/LocallyMountedSecretsTests.java index 0dd1db64c144f..0c2dc68a01464 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/LocallyMountedSecretsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/LocallyMountedSecretsTests.java @@ -97,7 +97,7 @@ public void testCreate() { } public void testProcessSettingsFile() throws Exception { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSON); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); assertTrue(secrets.isLoaded()); assertThat(secrets.getVersion(), equalTo(1L)); @@ -109,7 +109,7 @@ public void testProcessSettingsFile() throws Exception { } public void testProcessDeprecatedSettingsFile() throws Exception { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSONDepricated); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSONDepricated); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); assertTrue(secrets.isLoaded()); assertThat(secrets.getVersion(), equalTo(1L)); @@ -119,7 +119,7 @@ public void testProcessDeprecatedSettingsFile() throws Exception { } public void testDuplicateSettingKeys() throws Exception { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSONDuplicateKeys); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSONDuplicateKeys); Exception e = expectThrows(Exception.class, () -> new LocallyMountedSecrets(env)); assertThat(e, instanceOf(XContentParseException.class)); assertThat(e.getMessage(), containsString("failed to parse field")); @@ -134,7 +134,7 @@ public void testDuplicateSettingKeys() throws Exception { } public void testSettingsGetFile() throws IOException, GeneralSecurityException { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSON); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); assertTrue(secrets.isLoaded()); assertThat(secrets.getSettingNames(), containsInAnyOrder("aaa", "ccc", "eee")); @@ -165,7 +165,7 @@ public void testSettingsGetFile() throws IOException, GeneralSecurityException { } public void testSettingsSHADigest() throws IOException, GeneralSecurityException { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSON); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); assertTrue(secrets.isLoaded()); assertThat(secrets.getSettingNames(), containsInAnyOrder("aaa", "ccc", "eee")); @@ -178,7 +178,7 @@ public void testSettingsSHADigest() throws IOException, GeneralSecurityException } public void testProcessBadSettingsFile() throws IOException { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), noMetadataJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), noMetadataJSON); assertThat( expectThrows(IllegalArgumentException.class, () -> new LocallyMountedSecrets(env)).getMessage(), containsString("Required [metadata]") @@ -186,7 +186,7 @@ public void testProcessBadSettingsFile() throws IOException { } public void testSerializationWithSecrets() throws Exception { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSON); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); final BytesStreamOutput out = new BytesStreamOutput(); @@ -213,7 +213,7 @@ public void testSerializationNewlyCreated() throws Exception { } public void testClose() throws IOException { - writeTestFile(env.configFile().resolve("secrets").resolve("secrets.json"), testJSON); + writeTestFile(env.configDir().resolve("secrets").resolve("secrets.json"), testJSON); LocallyMountedSecrets secrets = new LocallyMountedSecrets(env); assertEquals("bbb", secrets.getString("aaa").toString()); assertEquals("ddd", secrets.getString("ccc").toString()); diff --git a/server/src/test/java/org/elasticsearch/common/text/SizeLimitingStringWriterTests.java b/server/src/test/java/org/elasticsearch/common/text/SizeLimitingStringWriterTests.java index 32a8de20df9aa..0874a106e59e7 100644 --- a/server/src/test/java/org/elasticsearch/common/text/SizeLimitingStringWriterTests.java +++ b/server/src/test/java/org/elasticsearch/common/text/SizeLimitingStringWriterTests.java @@ -11,6 +11,8 @@ import org.elasticsearch.test.ESTestCase; +import static org.hamcrest.Matchers.equalTo; + public class SizeLimitingStringWriterTests extends ESTestCase { public void testSizeIsLimited() { SizeLimitingStringWriter writer = new SizeLimitingStringWriter(10); @@ -26,4 +28,11 @@ public void testSizeIsLimited() { expectThrows(SizeLimitingStringWriter.SizeLimitExceededException.class, () -> writer.append("a")); expectThrows(SizeLimitingStringWriter.SizeLimitExceededException.class, () -> writer.append("a", 0, 1)); } + + public void testLimitMessage() { + SizeLimitingStringWriter writer = new SizeLimitingStringWriter(3); + + var e = expectThrows(SizeLimitingStringWriter.SizeLimitExceededException.class, () -> writer.write("abcdefgh")); + assertThat(e.getMessage(), equalTo("String [abc...] has size [8] which exceeds the size limit [3]")); + } } diff --git a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java index 25ac11b516dc0..834f53dc410da 100644 --- a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java @@ -34,20 +34,20 @@ public class EnvironmentTests extends ESTestCase { public void testRepositoryResolution() throws IOException { Environment environment = newEnvironment(); - assertThat(environment.resolveRepoFile("/test/repos/repo1"), nullValue()); - assertThat(environment.resolveRepoFile("test/repos/repo1"), nullValue()); + assertThat(environment.resolveRepoDir("/test/repos/repo1"), nullValue()); + assertThat(environment.resolveRepoDir("test/repos/repo1"), nullValue()); environment = newEnvironment( Settings.builder() .putList(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other") .build() ); - assertThat(environment.resolveRepoFile("/test/repos/repo1"), notNullValue()); - assertThat(environment.resolveRepoFile("test/repos/repo1"), notNullValue()); - assertThat(environment.resolveRepoFile("/another/repos/repo1"), notNullValue()); - assertThat(environment.resolveRepoFile("/test/repos/../repo1"), nullValue()); - assertThat(environment.resolveRepoFile("/test/repos/../repos/repo1"), notNullValue()); - assertThat(environment.resolveRepoFile("/somethingeles/repos/repo1"), nullValue()); - assertThat(environment.resolveRepoFile("/test/other/repo"), notNullValue()); + assertThat(environment.resolveRepoDir("/test/repos/repo1"), notNullValue()); + assertThat(environment.resolveRepoDir("test/repos/repo1"), notNullValue()); + assertThat(environment.resolveRepoDir("/another/repos/repo1"), notNullValue()); + assertThat(environment.resolveRepoDir("/test/repos/../repo1"), nullValue()); + assertThat(environment.resolveRepoDir("/test/repos/../repos/repo1"), notNullValue()); + assertThat(environment.resolveRepoDir("/somethingeles/repos/repo1"), nullValue()); + assertThat(environment.resolveRepoDir("/test/other/repo"), notNullValue()); assertThat(environment.resolveRepoURL(new URL("file:///test/repos/repo1")), notNullValue()); assertThat(environment.resolveRepoURL(new URL("file:/test/repos/repo1")), notNullValue()); @@ -66,7 +66,7 @@ public void testPathDataWhenNotSet() { final Path pathHome = createTempDir().toAbsolutePath(); final Settings settings = Settings.builder().put("path.home", pathHome).build(); final Environment environment = new Environment(settings, null); - assertThat(environment.dataFiles(), equalTo(new Path[] { pathHome.resolve("data") })); + assertThat(environment.dataDirs(), equalTo(new Path[] { pathHome.resolve("data") })); } public void testPathDataNotSetInEnvironmentIfNotSet() { @@ -82,41 +82,41 @@ public void testPathDataLegacyCommaList() { .put("path.data", createTempDir().toAbsolutePath() + "," + createTempDir().toAbsolutePath()) .build(); final Environment environment = new Environment(settings, null); - assertThat(environment.dataFiles(), arrayWithSize(2)); + assertThat(environment.dataDirs(), arrayWithSize(2)); } public void testPathLogsWhenNotSet() { final Path pathHome = createTempDir().toAbsolutePath(); final Settings settings = Settings.builder().put("path.home", pathHome).build(); final Environment environment = new Environment(settings, null); - assertThat(environment.logsFile(), equalTo(pathHome.resolve("logs"))); + assertThat(environment.logsDir(), equalTo(pathHome.resolve("logs"))); } public void testDefaultConfigPath() { final Path path = createTempDir().toAbsolutePath(); final Settings settings = Settings.builder().put("path.home", path).build(); final Environment environment = new Environment(settings, null); - assertThat(environment.configFile(), equalTo(path.resolve("config"))); + assertThat(environment.configDir(), equalTo(path.resolve("config"))); } public void testConfigPath() { final Path configPath = createTempDir().toAbsolutePath(); final Settings settings = Settings.builder().put("path.home", createTempDir().toAbsolutePath()).build(); final Environment environment = new Environment(settings, configPath); - assertThat(environment.configFile(), equalTo(configPath)); + assertThat(environment.configDir(), equalTo(configPath)); } public void testConfigPathWhenNotSet() { final Path pathHome = createTempDir().toAbsolutePath(); final Settings settings = Settings.builder().put("path.home", pathHome).build(); final Environment environment = new Environment(settings, null); - assertThat(environment.configFile(), equalTo(pathHome.resolve("config"))); + assertThat(environment.configDir(), equalTo(pathHome.resolve("config"))); } public void testNonExistentTempPathValidation() { Settings build = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); Environment environment = new Environment(build, null, createTempDir().resolve("this_does_not_exist")); - FileNotFoundException e = expectThrows(FileNotFoundException.class, environment::validateTmpFile); + FileNotFoundException e = expectThrows(FileNotFoundException.class, environment::validateTmpDir); assertThat(e.getMessage(), startsWith("Temporary directory [")); assertThat(e.getMessage(), endsWith("this_does_not_exist] does not exist or is not accessible")); } @@ -124,7 +124,7 @@ public void testNonExistentTempPathValidation() { public void testTempPathValidationWhenRegularFile() throws IOException { Settings build = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); Environment environment = new Environment(build, null, createTempFile("something", ".test")); - IOException e = expectThrows(IOException.class, environment::validateTmpFile); + IOException e = expectThrows(IOException.class, environment::validateTmpDir); assertThat(e.getMessage(), startsWith("Temporary directory [")); assertThat(e.getMessage(), endsWith(".test] is not a directory")); } diff --git a/server/src/test/java/org/elasticsearch/env/NodeRepurposeCommandTests.java b/server/src/test/java/org/elasticsearch/env/NodeRepurposeCommandTests.java index ab0ccb129fe57..0cfa9716c5fe7 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeRepurposeCommandTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeRepurposeCommandTests.java @@ -131,7 +131,7 @@ public void testCleanupAll() throws Exception { boolean hasClusterState = randomBoolean(); createIndexDataFiles(dataMasterSettings, shardCount, hasClusterState); - String messageText = NodeRepurposeCommand.noMasterMessage(1, environment.dataFiles().length * shardCount, 0); + String messageText = NodeRepurposeCommand.noMasterMessage(1, environment.dataDirs().length * shardCount, 0); Matcher outputMatcher = allOf( containsString(messageText), @@ -157,7 +157,7 @@ public void testCleanupShardData() throws Exception { createIndexDataFiles(dataMasterSettings, shardCount, hasClusterState); Matcher matcher = allOf( - containsString(NodeRepurposeCommand.shardMessage(environment.dataFiles().length * shardCount, 1)), + containsString(NodeRepurposeCommand.shardMessage(environment.dataDirs().length * shardCount, 1)), conditionalNot(containsString("testUUID"), verbose == false), conditionalNot(containsString("testIndex"), verbose == false || hasClusterState == false), conditionalNot(containsString("no name for uuid: testUUID"), verbose == false || hasClusterState) @@ -271,7 +271,7 @@ private void verifyUnchangedDataFiles(CheckedRunnable runna private long digestPaths() { // use a commutative digest to avoid dependency on file system order. - return Arrays.stream(environment.dataFiles()).mapToLong(this::digestPath).sum(); + return Arrays.stream(environment.dataDirs()).mapToLong(this::digestPath).sum(); } private long digestPath(Path path) { diff --git a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java index 07aa9af3b4030..b77f970d9e785 100644 --- a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java @@ -117,7 +117,7 @@ public void setUp() throws Exception { public void testServiceBasics() { Set discoveryNodes = createNodesWithAllRoles(); ClusterService clusterService = createClusterService(discoveryNodes, false); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); { HealthStatus expectedStatus = HealthStatus.UNKNOWN; HealthInfo healthInfo = HealthInfo.EMPTY_HEALTH_INFO; @@ -141,7 +141,7 @@ public void testServiceBasics() { public void testIndicatorYieldsGreenWhenNodeHasUnknownStatus() { Set discoveryNodes = createNodesWithAllRoles(); ClusterService clusterService = createClusterService(discoveryNodes, false); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); HealthStatus expectedStatus = HealthStatus.GREEN; HealthInfo healthInfo = createHealthInfoWithOneUnhealthyNode(HealthStatus.UNKNOWN, discoveryNodes); @@ -152,7 +152,7 @@ public void testIndicatorYieldsGreenWhenNodeHasUnknownStatus() { public void testGreen() throws IOException { Set discoveryNodes = createNodesWithAllRoles(); ClusterService clusterService = createClusterService(discoveryNodes, false); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); HealthStatus expectedStatus = HealthStatus.GREEN; HealthInfo healthInfo = createHealthInfoWithOneUnhealthyNode(expectedStatus, discoveryNodes); HealthIndicatorResult result = diskHealthIndicatorService.calculate(true, healthInfo); @@ -187,7 +187,7 @@ public void testYellowMixedNodes() throws IOException { final var clusterService = createClusterService(Set.of(), allNodes, indexNameToNodeIdsMap); HealthStatus expectedStatus = HealthStatus.YELLOW; HealthInfo healthInfo = createHealthInfo(new HealthInfoConfig(expectedStatus, allNodes.size(), allNodes)); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); HealthIndicatorResult result = diskHealthIndicatorService.calculate(true, healthInfo); assertThat(result.status(), equalTo(expectedStatus)); assertThat(result.symptom(), containsString("with roles: [data")); @@ -265,7 +265,7 @@ public void testRedNoBlockedIndicesAndRedAllRoleNodes() throws IOException { indexNameToNodeIdsMap.put(indexName, new HashSet<>(randomNonEmptySubsetOf(affectedNodeIds))); } ClusterService clusterService = createClusterService(Set.of(), discoveryNodes, indexNameToNodeIdsMap); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); Map diskInfoByNode = new HashMap<>(); for (DiscoveryNode discoveryNode : discoveryNodes) { if (affectedNodeIds.contains(discoveryNode.getId())) { @@ -329,7 +329,7 @@ public void testRedNoBlockedIndicesAndRedAllRoleNodes() throws IOException { public void testRedWithBlockedIndicesAndGreenNodes() throws IOException { Set discoveryNodes = createNodesWithAllRoles(); ClusterService clusterService = createClusterService(discoveryNodes, true); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); HealthStatus expectedStatus = HealthStatus.RED; HealthInfo healthInfo = createHealthInfoWithOneUnhealthyNode(HealthStatus.GREEN, discoveryNodes); @@ -374,7 +374,7 @@ public void testRedWithBlockedIndicesAndGreenNodes() throws IOException { public void testRedWithBlockedIndicesAndYellowNodes() throws IOException { Set discoveryNodes = createNodesWithAllRoles(); ClusterService clusterService = createClusterService(discoveryNodes, true); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); HealthStatus expectedStatus = HealthStatus.RED; int numberOfYellowNodes = randomIntBetween(1, discoveryNodes.size()); HealthInfo healthInfo = createHealthInfo(new HealthInfoConfig(HealthStatus.YELLOW, numberOfYellowNodes, discoveryNodes)); @@ -453,7 +453,7 @@ public void testRedBlockedIndicesAndRedAllRolesNodes() throws IOException { } } ClusterService clusterService = createClusterService(blockedIndices, discoveryNodes, indexNameToNodeIdsMap); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); HealthIndicatorResult result = diskHealthIndicatorService.calculate(true, healthInfo); assertThat(result.status(), equalTo(expectedStatus)); assertThat( @@ -492,7 +492,7 @@ public void testRedNodesWithoutAnyBlockedIndices() throws IOException { indexNameToNodeIdsMap.put(indexName, nonRedNodeIds); } ClusterService clusterService = createClusterService(Set.of(), discoveryNodes, indexNameToNodeIdsMap); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); HealthIndicatorResult result = diskHealthIndicatorService.calculate(true, healthInfo); assertThat(result.status(), equalTo(expectedStatus)); assertThat(result.impacts().size(), equalTo(3)); @@ -528,7 +528,7 @@ public void testMissingHealthInfo() { Set discoveryNodesInClusterState = new HashSet<>(discoveryNodes); discoveryNodesInClusterState.add(DiscoveryNodeUtils.create(randomAlphaOfLength(30), UUID.randomUUID().toString())); ClusterService clusterService = createClusterService(discoveryNodesInClusterState, false); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); { HealthInfo healthInfo = HealthInfo.EMPTY_HEALTH_INFO; HealthIndicatorResult result = diskHealthIndicatorService.calculate(true, healthInfo); @@ -560,7 +560,7 @@ public void testUnhealthyMasterNodes() { Set roles = Set.of(DiscoveryNodeRole.MASTER_ROLE, otherRole); Set discoveryNodes = createNodes(roles); ClusterService clusterService = createClusterService(discoveryNodes, false); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); HealthStatus expectedStatus = randomFrom(HealthStatus.RED, HealthStatus.YELLOW); int numberOfProblemNodes = randomIntBetween(1, discoveryNodes.size()); HealthInfo healthInfo = createHealthInfo(new HealthInfoConfig(expectedStatus, numberOfProblemNodes, discoveryNodes)); @@ -615,7 +615,7 @@ public void testUnhealthyNonDataNonMasterNodes() { Set roles = new HashSet<>(randomNonEmptySubsetOf(OTHER_ROLES)); Set nodes = createNodes(roles); ClusterService clusterService = createClusterService(nodes, false); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); HealthStatus expectedStatus = randomFrom(HealthStatus.RED, HealthStatus.YELLOW); int numberOfProblemNodes = randomIntBetween(1, nodes.size()); HealthInfo healthInfo = createHealthInfo(new HealthInfoConfig(expectedStatus, numberOfProblemNodes, nodes)); @@ -671,7 +671,7 @@ public void testBlockedIndexWithRedNonDataNodesAndYellowDataNodes() { Set masterNodes = createNodes(masterRole); Set otherNodes = createNodes(otherRoles); ClusterService clusterService = createClusterService(Sets.union(Sets.union(dataNodes, masterNodes), otherNodes), true); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); int numberOfRedMasterNodes = randomIntBetween(1, masterNodes.size()); int numberOfRedOtherNodes = randomIntBetween(1, otherNodes.size()); int numberOfYellowDataNodes = randomIntBetween(1, dataNodes.size()); @@ -893,7 +893,7 @@ public void testLimitNumberOfAffectedResources() { Set masterNodes = createNodes(20, masterRole); Set otherNodes = createNodes(10, otherRoles); ClusterService clusterService = createClusterService(Sets.union(Sets.union(dataNodes, masterNodes), otherNodes), true); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); int numberOfRedMasterNodes = masterNodes.size(); int numberOfRedOtherNodes = otherNodes.size(); int numberOfYellowDataNodes = dataNodes.size(); @@ -968,7 +968,7 @@ public void testLimitNumberOfAffectedResources() { public void testSkippingFieldsWhenVerboseIsFalse() { Set discoveryNodes = createNodesWithAllRoles(); ClusterService clusterService = createClusterService(discoveryNodes, false); - DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService); HealthStatus expectedStatus = HealthStatus.RED; HealthInfo healthInfo = createHealthInfoWithOneUnhealthyNode(expectedStatus, discoveryNodes); HealthIndicatorResult result = diskHealthIndicatorService.calculate(false, healthInfo); diff --git a/server/src/test/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutorTests.java index aee02fb288b55..420a655e092e2 100644 --- a/server/src/test/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutorTests.java @@ -94,7 +94,7 @@ public void tearDown() throws Exception { } public void testTaskCreation() throws Exception { - HealthNodeTaskExecutor.create(clusterService, persistentTasksService, featureService, settings, clusterSettings); + HealthNodeTaskExecutor.create(clusterService, persistentTasksService, settings, clusterSettings); clusterService.getClusterApplierService().onNewClusterState("initialization", this::initialState, ActionListener.noop()); // Ensure that if the task is gone, it will be recreated. clusterService.getClusterApplierService().onNewClusterState("initialization", this::initialState, ActionListener.noop()); @@ -110,13 +110,7 @@ public void testTaskCreation() throws Exception { } public void testSkippingTaskCreationIfItExists() { - HealthNodeTaskExecutor executor = HealthNodeTaskExecutor.create( - clusterService, - persistentTasksService, - featureService, - settings, - clusterSettings - ); + HealthNodeTaskExecutor executor = HealthNodeTaskExecutor.create(clusterService, persistentTasksService, settings, clusterSettings); executor.startTask(new ClusterChangedEvent("", stateWithHealthNodeSelectorTask(initialState()), ClusterState.EMPTY_STATE)); verify(persistentTasksService, never()).sendStartRequest( eq("health-node"), @@ -132,7 +126,6 @@ public void testDoNothingIfAlreadyShutdown() { HealthNodeTaskExecutor executor = HealthNodeTaskExecutor.create( clusterService, persistentTasksService, - featureService, settings, clusterSettings ); @@ -150,7 +143,6 @@ public void testAbortOnShutdown() { HealthNodeTaskExecutor executor = HealthNodeTaskExecutor.create( clusterService, persistentTasksService, - featureService, settings, clusterSettings ); @@ -165,13 +157,7 @@ public void testAbortOnShutdown() { } public void testAbortOnDisable() { - HealthNodeTaskExecutor executor = HealthNodeTaskExecutor.create( - clusterService, - persistentTasksService, - featureService, - settings, - clusterSettings - ); + HealthNodeTaskExecutor executor = HealthNodeTaskExecutor.create(clusterService, persistentTasksService, settings, clusterSettings); HealthNode task = mock(HealthNode.class); PersistentTaskState state = mock(PersistentTaskState.class); executor.nodeOperation(task, new HealthNodeTaskParams(), state); diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java index fa774c0bcfd12..bfe7702ea3311 100644 --- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java @@ -355,8 +355,6 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th } }; - // the set of headers to copy - Set headers = Set.of(new RestHeaderDefinition(Task.TRACE_PARENT_HTTP_HEADER, false)); // sample request headers to test with Map> restHeaders = new HashMap<>(); restHeaders.put(Task.TRACE_PARENT_HTTP_HEADER, Collections.singletonList(traceParentValue)); @@ -397,7 +395,7 @@ public HttpStats stats() { @Override protected void populatePerRequestThreadContext(RestRequest restRequest, ThreadContext threadContext) { - getFakeActionModule(headers).copyRequestHeadersToThreadContext(restRequest.getHttpRequest(), threadContext); + getFakeActionModule(Set.of()).copyRequestHeadersToThreadContext(restRequest.getHttpRequest(), threadContext); } } ) { diff --git a/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java b/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java index 359118c7cb5a1..d4aec300c666b 100644 --- a/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java +++ b/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.TestSearchContext; @@ -93,7 +94,7 @@ public ShardSearchRequest request() { } @Override - public SearchShardTask getTask() { + public CancellableTask getTask() { return super.getTask(); } }; diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 6b1ffc3693636..9c9492df24cf3 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -50,7 +50,7 @@ public void testResolveDefaultCodecs() throws Exception { assumeTrue("Only when zstd_stored_fields feature flag is enabled", CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()); CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMapperCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Elasticsearch900Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Elasticsearch900Lucene101Codec.class)); } public void testDefault() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java index 57cca6eea86ec..69d2cc21a6a2d 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; import org.elasticsearch.common.logging.LogConfigurator; @@ -24,7 +24,7 @@ public class ES813FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase { @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES813FlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java index 9069b094ee483..3f750ab5d7cbc 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; import org.elasticsearch.common.logging.LogConfigurator; @@ -24,7 +24,7 @@ public class ES813Int8FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES813Int8FlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java index 549a14ca6c31b..eaf59b7028b80 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.KnnFloatVectorField; @@ -42,7 +42,7 @@ public class ES814HnswScalarQuantizedVectorsFormatTests extends BaseKnnVectorsFo @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES814HnswScalarQuantizedVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java index 034d428b25209..a4c3697726cb2 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.index.VectorSimilarityFunction; import org.junit.Before; @@ -19,7 +19,7 @@ public class ES815BitFlatVectorFormatTests extends BaseKnnBitVectorsFormatTestCa @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES815BitFlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java index 4af6a405c7705..b5f56b6b42b7c 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.index.VectorSimilarityFunction; import org.junit.Before; @@ -19,7 +19,7 @@ public class ES815HnswBitVectorsFormatTests extends BaseKnnBitVectorsFormatTestC @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES815HnswBitVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsFormatTests.java index 48ba566353f5d..e11775e2cdedb 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsFormatTests.java @@ -22,7 +22,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnFloatVectorField; import org.apache.lucene.index.DirectoryReader; @@ -60,7 +60,7 @@ public class ES816BinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFormat @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES816BinaryQuantizedRWVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816HnswBinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816HnswBinaryQuantizedVectorsFormatTests.java index 03aa847f3a5d4..5c78aa5367f23 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816HnswBinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816HnswBinaryQuantizedVectorsFormatTests.java @@ -22,7 +22,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsReader; import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnFloatVectorField; @@ -56,7 +56,7 @@ public class ES816HnswBinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFo @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES816HnswBinaryQuantizedRWVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsFormatTests.java index 397cc472592b6..6b8b64b235252 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsFormatTests.java @@ -22,7 +22,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnFloatVectorField; import org.apache.lucene.index.DirectoryReader; @@ -60,7 +60,7 @@ public class ES818BinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFormat @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES818BinaryQuantizedVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818HnswBinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818HnswBinaryQuantizedVectorsFormatTests.java index 09304b3ba4c91..c0f66adda4b94 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818HnswBinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818HnswBinaryQuantizedVectorsFormatTests.java @@ -22,7 +22,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsReader; import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnFloatVectorField; @@ -56,7 +56,7 @@ public class ES818HnswBinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFo @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES818HnswBinaryQuantizedVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java index 437ba1cecc11d..0e5732ec09e5b 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.index.codec.zstd; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; @@ -35,13 +35,13 @@ public class StoredFieldCodecDuelTests extends ESTestCase { private static final String DOUBLE_FIELD = "double_field_5"; public void testDuelBestSpeed() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene101Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } public void testDuelBestCompression() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene101Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java index 77a7585e3b518..b6fefcb9a4e98 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java @@ -11,11 +11,11 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; -import org.elasticsearch.index.codec.Elasticsearch900Codec; +import org.elasticsearch.index.codec.Elasticsearch900Lucene101Codec; public class Zstd814BestCompressionStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { - private final Codec codec = new Elasticsearch900Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); + private final Codec codec = new Elasticsearch900Lucene101Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); @Override protected Codec getCodec() { diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java index 3d6cfea70d121..98318707f6c4b 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java @@ -11,11 +11,11 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; -import org.elasticsearch.index.codec.Elasticsearch900Codec; +import org.elasticsearch.index.codec.Elasticsearch900Lucene101Codec; public class Zstd814BestSpeedStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { - private final Codec codec = new Elasticsearch900Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + private final Codec codec = new Elasticsearch900Lucene101Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); @Override protected Codec getCodec() { diff --git a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java index 6d205a22433b4..1343078906d6f 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java @@ -9,12 +9,12 @@ package org.elasticsearch.index.engine; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.search.suggest.document.Completion912PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion101PostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; import org.elasticsearch.ElasticsearchException; @@ -44,8 +44,8 @@ public void testExceptionsAreNotCached() { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); - final PostingsFormat postingsFormat = new Completion912PostingsFormat(); - indexWriterConfig.setCodec(new Lucene100Codec() { + final PostingsFormat postingsFormat = new Completion101PostingsFormat(); + indexWriterConfig.setCodec(new Lucene101Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormat; // all fields are suggest fields diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index 36c25b352a792..172545ab459c2 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -333,7 +333,8 @@ public void testRequireDocValuesOnLongs() { null, false, null, - null + null, + false ) ); } @@ -353,7 +354,8 @@ public void testRequireDocValuesOnDoubles() { null, false, null, - null + null, + false ) ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index c9fe314056331..b093307f3733b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -16,7 +16,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; -import org.apache.lucene.search.suggest.document.Completion912PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion101PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionAnalyzer; import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; @@ -148,21 +148,19 @@ protected IndexAnalyzers createIndexAnalyzers(IndexSettings indexSettings) { } public void testPostingsFormat() throws IOException { + final Class latestLuceneCPClass = Completion101PostingsFormat.class; MapperService mapperService = createMapperService(fieldMapping(this::minimalMapping)); CodecService codecService = new CodecService(mapperService, BigArrays.NON_RECYCLING_INSTANCE); Codec codec = codecService.codec("default"); if (CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { assertThat(codec, instanceOf(PerFieldMapperCodec.class)); - assertThat(((PerFieldMapperCodec) codec).getPostingsFormatForField("field"), instanceOf(Completion912PostingsFormat.class)); + assertThat(((PerFieldMapperCodec) codec).getPostingsFormatForField("field"), instanceOf(latestLuceneCPClass)); } else { if (codec instanceof CodecService.DeduplicateFieldInfosCodec deduplicateFieldInfosCodec) { codec = deduplicateFieldInfosCodec.delegate(); } assertThat(codec, instanceOf(LegacyPerFieldMapperCodec.class)); - assertThat( - ((LegacyPerFieldMapperCodec) codec).getPostingsFormatForField("field"), - instanceOf(Completion912PostingsFormat.class) - ); + assertThat(((LegacyPerFieldMapperCodec) codec).getPostingsFormatForField("field"), instanceOf(latestLuceneCPClass)); } } @@ -305,6 +303,55 @@ public void testKeywordWithSubCompletionAndContext() throws Exception { ); } + public void testDuplicateSuggestionsWithContexts() throws IOException { + DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { + b.field("type", "completion"); + b.startArray("contexts"); + { + b.startObject(); + b.field("name", "place"); + b.field("type", "category"); + b.endObject(); + } + b.endArray(); + })); + + ParsedDocument parsedDocument = defaultMapper.parse(source(b -> { + b.startArray("field"); + { + b.startObject(); + { + b.array("input", "timmy", "starbucks"); + b.startObject("contexts").array("place", "cafe", "food").endObject(); + b.field("weight", 10); + } + b.endObject(); + b.startObject(); + { + b.array("input", "timmy", "starbucks"); + b.startObject("contexts").array("place", "restaurant").endObject(); + b.field("weight", 1); + } + b.endObject(); + } + b.endArray(); + })); + + List indexedFields = parsedDocument.rootDoc().getFields("field"); + assertThat(indexedFields, hasSize(4)); + + assertThat( + indexedFields, + containsInAnyOrder( + contextSuggestField("timmy"), + contextSuggestField("timmy"), + contextSuggestField("starbucks"), + contextSuggestField("starbucks") + ) + ); + + } + public void testCompletionWithContextAndSubCompletion() throws Exception { DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "completion"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java index 5da7d6100bf4b..75c984d6f4305 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java @@ -129,7 +129,6 @@ protected Settings getIndexSettings() { var resultFromParserContext = parserContext.createDynamicMapperBuilderContext(); assertEquals("foo.hey", resultFromParserContext.buildFullName("hey")); - assertTrue(resultFromParserContext.isSourceSynthetic()); assertTrue(resultFromParserContext.isDataStream()); assertTrue(resultFromParserContext.parentObjectContainsDimensions()); assertEquals(ObjectMapper.Defaults.DYNAMIC, resultFromParserContext.getDynamic()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java index d12bf5dc2e34c..6bcc94924d551 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java @@ -2420,6 +2420,34 @@ public void testStoredArrayWithFlatFields() throws IOException { {"outer":{"inner":[{"a.b":"a.b","a.c":"a.c"}]}}""", syntheticSource); } + public void testSingleDeepIgnoredField() throws IOException { + DocumentMapper documentMapper = createSytheticSourceMapperService(mapping(b -> { + b.startObject("top"); + b.startObject("properties"); + { + b.startObject("level1").startObject("properties"); + { + b.startObject("level2").startObject("properties"); + { + b.startObject("n") + .field("type", "integer") + .field("doc_values", "false") + .field("synthetic_source_keep", "all") + .endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + })).documentMapper(); + + var syntheticSource = syntheticSource(documentMapper, b -> { + b.startObject("top").startObject("level1").startObject("level2").field("n", 25).endObject().endObject().endObject(); + }); + assertEquals("{\"top\":{\"level1\":{\"level2\":{\"n\":25}}}}", syntheticSource); + } + protected void validateRoundTripReader(String syntheticSource, DirectoryReader reader, DirectoryReader roundTripReader) throws IOException { // We exclude ignored source field since in some cases it contains an exact copy of a part of document source. @@ -2427,8 +2455,14 @@ protected void validateRoundTripReader(String syntheticSource, DirectoryReader r // and since the copy is exact, contents of ignored source are different. assertReaderEquals( "round trip " + syntheticSource, - new FieldMaskingReader(Set.of(SourceFieldMapper.RECOVERY_SOURCE_NAME, IgnoredSourceFieldMapper.NAME), reader), - new FieldMaskingReader(Set.of(SourceFieldMapper.RECOVERY_SOURCE_NAME, IgnoredSourceFieldMapper.NAME), roundTripReader) + new FieldMaskingReader( + Set.of(SourceFieldMapper.RECOVERY_SOURCE_NAME, IgnoredSourceFieldMapper.NAME, SourceFieldMapper.RECOVERY_SOURCE_SIZE_NAME), + reader + ), + new FieldMaskingReader( + Set.of(SourceFieldMapper.RECOVERY_SOURCE_NAME, IgnoredSourceFieldMapper.NAME, SourceFieldMapper.RECOVERY_SOURCE_SIZE_NAME), + roundTripReader + ) ); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index 052bf995bdd48..198988832cb55 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexSortConfig; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.CharFilterFactory; @@ -770,4 +771,204 @@ public void testDocValuesLoadedFromStoredSynthetic() throws IOException { ); assertScriptDocValues(mapper, "foo", equalTo(List.of("foo"))); } + + public void testFieldTypeWithSkipDocValues_LogsDbModeDisabledSetting() throws IOException { + final MapperService mapperService = createMapperService( + Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "host.name") + .put(IndexSettings.USE_DOC_VALUES_SKIPPER.getKey(), false) + .build(), + mapping(b -> { + b.startObject("host.name"); + b.field("type", "keyword"); + b.endObject(); + }) + ); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("host.name"); + assertTrue(mapper.fieldType().hasDocValues()); + assertTrue(mapper.fieldType().isIndexed()); + assertFalse(mapper.fieldType().hasDocValuesSkipper()); + } + + public void testFieldTypeWithSkipDocValues_LogsDbMode() throws IOException { + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "host.name") + .build(); + final MapperService mapperService = createMapperService(settings, mapping(b -> { + b.startObject("host.name"); + b.field("type", "keyword"); + b.endObject(); + })); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("host.name"); + assertTrue(mapper.fieldType().hasDocValues()); + assertFalse(IndexSettings.USE_DOC_VALUES_SKIPPER.get(settings) && mapper.fieldType().isIndexed()); + if (IndexSettings.USE_DOC_VALUES_SKIPPER.get(settings)) { + assertTrue(mapper.fieldType().hasDocValuesSkipper()); + } else { + assertFalse(mapper.fieldType().hasDocValuesSkipper()); + } + } + + public void testFieldTypeDefault_StandardMode() throws IOException { + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.STANDARD.name()) + .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "host.name") + .build(); + final MapperService mapperService = createMapperService(settings, mapping(b -> { + b.startObject("host.name"); + b.field("type", "keyword"); + b.endObject(); + })); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("host.name"); + assertTrue(mapper.fieldType().hasDocValues()); + assertTrue(mapper.fieldType().isIndexed()); + assertFalse(mapper.fieldType().hasDocValuesSkipper()); + } + + public void testFieldTypeDefault_NonMatchingFieldName() throws IOException { + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "hostname") + .build(); + final MapperService mapperService = createMapperService(settings, mapping(b -> { + b.startObject("hostname"); + b.field("type", "keyword"); + b.endObject(); + })); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("hostname"); + assertTrue(mapper.fieldType().hasDocValues()); + assertTrue(mapper.fieldType().isIndexed()); + assertFalse(mapper.fieldType().hasDocValuesSkipper()); + } + + public void testFieldTypeDefault_ConfiguredIndexedWithSettingOverride() throws IOException { + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "host.name") + .build(); + final MapperService mapperService = createMapperService(settings, mapping(b -> { + b.startObject("host.name"); + b.field("type", "keyword"); + b.field("index", true); + b.endObject(); + })); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("host.name"); + assertTrue(mapper.fieldType().hasDocValues()); + assertFalse(IndexSettings.USE_DOC_VALUES_SKIPPER.get(settings) && mapper.fieldType().isIndexed()); + if (IndexSettings.USE_DOC_VALUES_SKIPPER.get(settings)) { + assertTrue(mapper.fieldType().hasDocValuesSkipper()); + } else { + assertFalse(mapper.fieldType().hasDocValuesSkipper()); + } + } + + public void testFieldTypeDefault_ConfiguredIndexedWithoutSettingOverride() throws IOException { + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "host.name") + .build(); + final MapperService mapperService = createMapperService(settings, mapping(b -> { + b.startObject("host.name"); + b.field("type", "keyword"); + b.field("index", true); + b.endObject(); + })); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("host.name"); + assertTrue(mapper.fieldType().hasDocValues()); + assertFalse(IndexSettings.USE_DOC_VALUES_SKIPPER.get(settings) && mapper.fieldType().isIndexed()); + if (IndexSettings.USE_DOC_VALUES_SKIPPER.get(settings)) { + assertTrue(mapper.fieldType().hasDocValuesSkipper()); + } else { + assertFalse(mapper.fieldType().hasDocValuesSkipper()); + } + } + + public void testFieldTypeDefault_ConfiguredDocValues() throws IOException { + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "host.name") + .build(); + final MapperService mapperService = createMapperService(settings, mapping(b -> { + b.startObject("host.name"); + b.field("type", "keyword"); + b.field("doc_values", true); + b.endObject(); + })); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("host.name"); + assertTrue(mapper.fieldType().hasDocValues()); + assertFalse(IndexSettings.USE_DOC_VALUES_SKIPPER.get(settings) && mapper.fieldType().isIndexed()); + if (IndexSettings.USE_DOC_VALUES_SKIPPER.get(settings)) { + assertTrue(mapper.fieldType().hasDocValuesSkipper()); + } else { + assertFalse(mapper.fieldType().hasDocValuesSkipper()); + } + } + + public void testFieldTypeDefault_LogsDbMode_NonSortField() throws IOException { + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(IndexSettings.USE_DOC_VALUES_SKIPPER.getKey(), true) + .build(); + final MapperService mapperService = createMapperService(settings, mapping(b -> { + b.startObject("host.name"); + b.field("type", "keyword"); + b.endObject(); + })); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("host.name"); + assertTrue(mapper.fieldType().hasDocValues()); + assertTrue(mapper.fieldType().isIndexed()); + assertFalse(mapper.fieldType().hasDocValuesSkipper()); + } + + public void testFieldTypeWithSkipDocValues_IndexedFalseDocValuesTrue() throws IOException { + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "host.name") + .build(); + final MapperService mapperService = createMapperService(settings, mapping(b -> { + b.startObject("host.name"); + b.field("type", "keyword"); + b.field("index", false); + b.field("doc_values", true); + b.endObject(); + })); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("host.name"); + assertTrue(mapper.fieldType().hasDocValues()); + assertFalse(IndexSettings.USE_DOC_VALUES_SKIPPER.get(settings) && mapper.fieldType().isIndexed()); + if (IndexSettings.USE_DOC_VALUES_SKIPPER.get(settings)) { + assertTrue(mapper.fieldType().hasDocValuesSkipper()); + } else { + assertFalse(mapper.fieldType().hasDocValuesSkipper()); + } + } + + public void testFieldTypeDefault_IndexedFalseDocValuesFalse() throws IOException { + final Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "host.name") + .build(); + final MapperService mapperService = createMapperService(settings, mapping(b -> { + b.startObject("host.name"); + b.field("type", "keyword"); + b.field("index", false); + b.field("doc_values", false); + b.endObject(); + })); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("host.name"); + assertFalse(mapper.fieldType().hasDocValues()); + assertFalse(mapper.fieldType().isIndexed()); + assertFalse(mapper.fieldType().hasDocValuesSkipper()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 507314b31d00d..c89753214b6a9 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -15,6 +15,9 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService.MergeReason; @@ -26,6 +29,7 @@ import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.StreamSupport; @@ -304,16 +308,56 @@ public void testMappingRecoverySkipFieldNameLengthLimit() throws Throwable { public void testIsMetadataField() throws IOException { IndexVersion version = IndexVersionUtils.randomCompatibleVersion(random()); - Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); - MapperService mapperService = createMapperService(settings, mapping(b -> {})); - assertFalse(mapperService.isMetadataField(randomAlphaOfLengthBetween(10, 15))); + CheckedFunction initMapperService = (indexMode) -> { + Settings.Builder settingsBuilder = Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, version) + .put(IndexSettings.MODE.getKey(), indexMode); - for (String builtIn : IndicesModule.getBuiltInMetadataFields()) { - if (NestedPathFieldMapper.NAME.equals(builtIn) && version.before(IndexVersions.V_8_0_0)) { - continue; // Nested field does not exist in the 7x line + if (indexMode == IndexMode.TIME_SERIES) { + settingsBuilder.put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "foo"); } - assertTrue("Expected " + builtIn + " to be a metadata field for version " + version, mapperService.isMetadataField(builtIn)); + + return createMapperService(settingsBuilder.build(), mapping(b -> {})); + }; + + Consumer assertMapperService = (mapperService) -> { + assertFalse(mapperService.isMetadataField(randomAlphaOfLengthBetween(10, 15))); + + for (String builtIn : IndicesModule.getBuiltInMetadataFields()) { + if (NestedPathFieldMapper.NAME.equals(builtIn) && version.before(IndexVersions.V_8_0_0)) { + continue; // Nested field does not exist in the 7x line + } + boolean isTimeSeriesField = builtIn.equals("_tsid") || builtIn.equals("_ts_routing_hash"); + boolean isTimeSeriesMode = mapperService.getIndexSettings().getMode().equals(IndexMode.TIME_SERIES); + + if (isTimeSeriesField && isTimeSeriesMode == false) { + assertFalse( + "Expected " + + builtIn + + " to not be a metadata field for version " + + version + + " and index mode " + + mapperService.getIndexSettings().getMode(), + mapperService.isMetadataField(builtIn) + ); + } else { + assertTrue( + "Expected " + + builtIn + + " to be a metadata field for version " + + version + + " and index mode " + + mapperService.getIndexSettings().getMode(), + mapperService.isMetadataField(builtIn) + ); + } + } + }; + + for (IndexMode indexMode : IndexMode.values()) { + MapperService mapperService = initMapperService.apply(indexMode); + assertMapperService.accept(mapperService); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java index b87ab09c530d6..4b674cf1985b2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.hamcrest.CoreMatchers; @@ -327,11 +326,7 @@ public void testBlankFieldNameBefore8_6_0() throws Exception { IndexVersions.MINIMUM_READONLY_COMPATIBLE, IndexVersions.V_8_5_0 ); - TransportVersion transportVersion = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - TransportVersions.V_8_5_0 - ); + TransportVersion transportVersion = TransportVersions.V_8_5_0; { XContentBuilder builder = mapping(b -> b.startObject(" ").field("type", randomFieldType()).endObject()); MappingParser mappingParser = createMappingParser(Settings.EMPTY, version, transportVersion); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java index a8b935c79ccc0..164e0232bf409 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java @@ -140,7 +140,8 @@ private static MappedFieldType unsearchable() { null, false, null, - null + null, + false ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index 8ad37908b2e9c..70010084cdb96 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -489,8 +489,13 @@ public void testRecoverySourceWithSyntheticSource() throws IOException { MapperService mapperService = createMapperService(settings, topMapping(b -> {})); DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source(b -> b.field("field1", "value1"))); - assertNotNull(doc.rootDoc().getField("_recovery_source")); - assertThat(doc.rootDoc().getField("_recovery_source").binaryValue(), equalTo(new BytesRef("{\"field1\":\"value1\"}"))); + if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled() == false) { + // TODO: remove this if branch when removing the 'index_recovery_use_synthetic_source' feature flag + assertNotNull(doc.rootDoc().getField("_recovery_source")); + assertThat(doc.rootDoc().getField("_recovery_source").binaryValue(), equalTo(new BytesRef("{\"field1\":\"value1\"}"))); + } else { + assertNull(doc.rootDoc().getField("_recovery_source")); + } } { Settings settings = Settings.builder() @@ -521,8 +526,16 @@ public void testRecoverySourceWithLogs() throws IOException { MapperService mapperService = createMapperService(settings, mapping(b -> {})); DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source(b -> { b.field("@timestamp", "2012-02-13"); })); - assertNotNull(doc.rootDoc().getField("_recovery_source")); - assertThat(doc.rootDoc().getField("_recovery_source").binaryValue(), equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\"}"))); + if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled() == false) { + // TODO: remove this if branch when removing the 'index_recovery_use_synthetic_source' feature flag + assertNotNull(doc.rootDoc().getField("_recovery_source")); + assertThat( + doc.rootDoc().getField("_recovery_source").binaryValue(), + equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\"}")) + ); + } else { + assertNull(doc.rootDoc().getField("_recovery_source")); + } } { Settings settings = Settings.builder() @@ -715,8 +728,16 @@ public void testRecoverySourceWithLogsCustom() throws IOException { MapperService mapperService = createMapperService(settings, mappings); DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source(b -> { b.field("@timestamp", "2012-02-13"); })); - assertNotNull(doc.rootDoc().getField("_recovery_source")); - assertThat(doc.rootDoc().getField("_recovery_source").binaryValue(), equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\"}"))); + if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled() == false) { + // TODO: remove this if branch when removing the 'index_recovery_use_synthetic_source' feature flag + assertNotNull(doc.rootDoc().getField("_recovery_source")); + assertThat( + doc.rootDoc().getField("_recovery_source").binaryValue(), + equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\"}")) + ); + } else { + assertNull(doc.rootDoc().getField("_recovery_source")); + } } { Settings settings = Settings.builder() @@ -742,11 +763,16 @@ public void testRecoverySourceWithTimeSeries() throws IOException { })); DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source("123", b -> b.field("@timestamp", "2012-02-13").field("field", "value1"), null)); - assertNotNull(doc.rootDoc().getField("_recovery_source")); - assertThat( - doc.rootDoc().getField("_recovery_source").binaryValue(), - equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\",\"field\":\"value1\"}")) - ); + if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled() == false) { + // TODO: remove this if branch when removing the 'index_recovery_use_synthetic_source' feature flag + assertNotNull(doc.rootDoc().getField("_recovery_source")); + assertThat( + doc.rootDoc().getField("_recovery_source").binaryValue(), + equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\",\"field\":\"value1\"}")) + ); + } else { + assertNull(doc.rootDoc().getField("_recovery_source")); + } } { Settings settings = Settings.builder() @@ -790,11 +816,16 @@ public void testRecoverySourceWithTimeSeriesCustom() throws IOException { MapperService mapperService = createMapperService(settings, mappings); DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source("123", b -> b.field("@timestamp", "2012-02-13").field("field", "value1"), null)); - assertNotNull(doc.rootDoc().getField("_recovery_source")); - assertThat( - doc.rootDoc().getField("_recovery_source").binaryValue(), - equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\",\"field\":\"value1\"}")) - ); + if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled() == false) { + // TODO: remove this if branch when removing the 'index_recovery_use_synthetic_source' feature flag + assertNotNull(doc.rootDoc().getField("_recovery_source")); + assertThat( + doc.rootDoc().getField("_recovery_source").binaryValue(), + equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\",\"field\":\"value1\"}")) + ); + } else { + assertNull(doc.rootDoc().getField("_recovery_source")); + } } { Settings settings = Settings.builder() diff --git a/libs/native/src/main22/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/ByteFieldBlockLoaderTests.java similarity index 50% rename from libs/native/src/main22/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java rename to server/src/test/java/org/elasticsearch/index/mapper/blockloader/ByteFieldBlockLoaderTests.java index 34776407f759e..28d7cbcfb42db 100644 --- a/libs/native/src/main22/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/ByteFieldBlockLoaderTests.java @@ -7,17 +7,18 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.nativeaccess; +package org.elasticsearch.index.mapper.blockloader; -public class NativeAccessUtil { - /** - * Enables native access for the provided module. Available to JDK 22+, required for JDK 24+ when using --illegal-native-access=deny - */ - public static void enableNativeAccess(ModuleLayer.Controller controller, Module module) { - controller.enableNativeAccess(module); +import org.elasticsearch.logsdb.datageneration.FieldType; + +public class ByteFieldBlockLoaderTests extends NumberFieldBlockLoaderTestCase { + public ByteFieldBlockLoaderTests() { + super(FieldType.BYTE); } - public static boolean isNativeAccessEnabled(Module module) { - return module.isNativeAccessEnabled(); + @Override + protected Integer convert(Number value) { + // All values that fit into int are represented as ints + return value.intValue(); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/DoubleFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/DoubleFieldBlockLoaderTests.java new file mode 100644 index 0000000000000..e0b62b21ad87a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/DoubleFieldBlockLoaderTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper.blockloader; + +import org.elasticsearch.logsdb.datageneration.FieldType; + +public class DoubleFieldBlockLoaderTests extends NumberFieldBlockLoaderTestCase { + public DoubleFieldBlockLoaderTests() { + super(FieldType.DOUBLE); + } + + @Override + protected Double convert(Number value) { + return value.doubleValue(); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/FloatFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/FloatFieldBlockLoaderTests.java new file mode 100644 index 0000000000000..63439a97d7c9d --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/FloatFieldBlockLoaderTests.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper.blockloader; + +import org.elasticsearch.logsdb.datageneration.FieldType; + +public class FloatFieldBlockLoaderTests extends NumberFieldBlockLoaderTestCase { + public FloatFieldBlockLoaderTests() { + super(FieldType.FLOAT); + } + + @Override + protected Double convert(Number value) { + // All float values are represented as double + return value.doubleValue(); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/HalfFloatFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/HalfFloatFieldBlockLoaderTests.java new file mode 100644 index 0000000000000..1e8cedb734af3 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/HalfFloatFieldBlockLoaderTests.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper.blockloader; + +import org.apache.lucene.sandbox.document.HalfFloatPoint; +import org.elasticsearch.logsdb.datageneration.FieldType; + +public class HalfFloatFieldBlockLoaderTests extends NumberFieldBlockLoaderTestCase { + public HalfFloatFieldBlockLoaderTests() { + super(FieldType.HALF_FLOAT); + } + + @Override + protected Double convert(Number value) { + // All float values are represented as double + return (double) HalfFloatPoint.sortableShortToHalfFloat(HalfFloatPoint.halfFloatToSortableShort(value.floatValue())); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/IntegerFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/IntegerFieldBlockLoaderTests.java new file mode 100644 index 0000000000000..5d7b9d78442cb --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/IntegerFieldBlockLoaderTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper.blockloader; + +import org.elasticsearch.logsdb.datageneration.FieldType; + +public class IntegerFieldBlockLoaderTests extends NumberFieldBlockLoaderTestCase { + public IntegerFieldBlockLoaderTests() { + super(FieldType.INTEGER); + } + + @Override + protected Integer convert(Number value) { + return value.intValue(); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/KeywordFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/KeywordFieldBlockLoaderTests.java index 40e5829b5b12e..909cccf9e7d54 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/KeywordFieldBlockLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/KeywordFieldBlockLoaderTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.index.mapper.BlockLoaderTestCase; import org.elasticsearch.logsdb.datageneration.FieldType; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; @@ -28,27 +27,30 @@ public KeywordFieldBlockLoaderTests() { @SuppressWarnings("unchecked") @Override protected Object expected(Map fieldMapping, Object value, boolean syntheticSource) { - if (value == null) { - return null; - } + var nullValue = (String) fieldMapping.get("null_value"); var ignoreAbove = fieldMapping.get("ignore_above") == null ? Integer.MAX_VALUE : ((Number) fieldMapping.get("ignore_above")).intValue(); + if (value == null) { + return convert(null, nullValue, ignoreAbove); + } + if (value instanceof String s) { - return convert(s, ignoreAbove); + return convert(s, nullValue, ignoreAbove); } - Function, Stream> convertValues = s -> s.map(v -> convert(v, ignoreAbove)).filter(Objects::nonNull); + Function, Stream> convertValues = s -> s.map(v -> convert(v, nullValue, ignoreAbove)) + .filter(Objects::nonNull); if ((boolean) fieldMapping.getOrDefault("doc_values", false)) { // Sorted and no duplicates - var values = new HashSet<>((List) value); - var resultList = convertValues.compose(s -> values.stream().filter(Objects::nonNull).sorted()) + var resultList = convertValues.andThen(Stream::distinct) + .andThen(Stream::sorted) .andThen(Stream::toList) - .apply(values.stream()); + .apply(((List) value).stream()); return maybeFoldList(resultList); } @@ -57,21 +59,13 @@ protected Object expected(Map fieldMapping, Object value, boolea return maybeFoldList(resultList); } - private Object maybeFoldList(List list) { - if (list.isEmpty()) { - return null; - } - - if (list.size() == 1) { - return list.get(0); - } - - return list; - } - - private BytesRef convert(String value, int ignoreAbove) { + private BytesRef convert(String value, String nullValue, int ignoreAbove) { if (value == null) { - return null; + if (nullValue != null) { + value = nullValue; + } else { + return null; + } } return value.length() <= ignoreAbove ? new BytesRef(value) : null; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/LongFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/LongFieldBlockLoaderTests.java new file mode 100644 index 0000000000000..ff953294fb618 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/LongFieldBlockLoaderTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper.blockloader; + +import org.elasticsearch.logsdb.datageneration.FieldType; + +public class LongFieldBlockLoaderTests extends NumberFieldBlockLoaderTestCase { + public LongFieldBlockLoaderTests() { + super(FieldType.LONG); + } + + @Override + protected Long convert(Number value) { + return value.longValue(); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/NumberFieldBlockLoaderTestCase.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/NumberFieldBlockLoaderTestCase.java new file mode 100644 index 0000000000000..e523d011c3ab1 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/NumberFieldBlockLoaderTestCase.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper.blockloader; + +import org.elasticsearch.index.mapper.BlockLoaderTestCase; +import org.elasticsearch.logsdb.datageneration.FieldType; + +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public abstract class NumberFieldBlockLoaderTestCase extends BlockLoaderTestCase { + public NumberFieldBlockLoaderTestCase(FieldType fieldType) { + super(fieldType); + } + + @Override + @SuppressWarnings("unchecked") + protected Object expected(Map fieldMapping, Object value, boolean syntheticSource) { + var nullValue = fieldMapping.get("null_value") != null ? convert((Number) fieldMapping.get("null_value")) : null; + + if (value instanceof List == false) { + return convert(value, nullValue); + } + + if ((boolean) fieldMapping.getOrDefault("doc_values", false)) { + // Sorted and no duplicates + var resultList = ((List) value).stream().map(v -> convert(v, nullValue)).filter(Objects::nonNull).sorted().toList(); + return maybeFoldList(resultList); + } + + // parsing from source + var resultList = ((List) value).stream().map(v -> convert(v, nullValue)).filter(Objects::nonNull).toList(); + return maybeFoldList(resultList); + } + + @SuppressWarnings("unchecked") + private T convert(Object value, T nullValue) { + if (value == null) { + return nullValue; + } + // String coercion is true by default + if (value instanceof String s && s.isEmpty()) { + return nullValue; + } + if (value instanceof Number n) { + return convert(n); + } + + // Malformed values are excluded + return null; + } + + protected abstract T convert(Number value); +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/ShortFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/ShortFieldBlockLoaderTests.java new file mode 100644 index 0000000000000..a40bc1c404f45 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/ShortFieldBlockLoaderTests.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper.blockloader; + +import org.elasticsearch.logsdb.datageneration.FieldType; + +public class ShortFieldBlockLoaderTests extends NumberFieldBlockLoaderTestCase { + public ShortFieldBlockLoaderTests() { + super(FieldType.SHORT); + } + + @Override + protected Integer convert(Number value) { + // All values that fit into int are represented as ints + return value.intValue(); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java index 8e0cd97e518fa..b2379ba579204 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java @@ -102,10 +102,6 @@ public void testDefaults() throws Exception { List fields = doc1.rootDoc().getFields("field"); assertEquals(2, fields.size()); - if (IndexVersion.current().luceneVersion().major == 10) { - // TODO: Update to use Lucene's FeatureField after upgrading to Lucene 10.1. - assertThat(IndexVersion.current().luceneVersion().minor, equalTo(0)); - } assertThat(fields.get(0), Matchers.instanceOf(XFeatureField.class)); XFeatureField featureField1 = null; XFeatureField featureField2 = null; diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalBuilderTests.java index 7005f17663d0d..3476655c705ae 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalBuilderTests.java @@ -46,7 +46,7 @@ public void testOrdered() throws IOException { CannedTokenStream ts = new CannedTokenStream(new Token("term1", 1, 2), new Token("term2", 3, 4), new Token("term3", 5, 6)); IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, true); - IntervalsSource expected = XIntervals.ordered(Intervals.term("term1"), Intervals.term("term2"), Intervals.term("term3")); + IntervalsSource expected = Intervals.ordered(Intervals.term("term1"), Intervals.term("term2"), Intervals.term("term3")); assertEquals(expected, source); @@ -57,7 +57,7 @@ public void testUnordered() throws IOException { CannedTokenStream ts = new CannedTokenStream(new Token("term1", 1, 2), new Token("term2", 3, 4), new Token("term3", 5, 6)); IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, false); - IntervalsSource expected = XIntervals.unordered(Intervals.term("term1"), Intervals.term("term2"), Intervals.term("term3")); + IntervalsSource expected = Intervals.unordered(Intervals.term("term1"), Intervals.term("term2"), Intervals.term("term3")); assertEquals(expected, source); @@ -101,7 +101,7 @@ public void testSimpleSynonyms() throws IOException { ); IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, true); - IntervalsSource expected = XIntervals.ordered( + IntervalsSource expected = Intervals.ordered( Intervals.term("term1"), Intervals.or(Intervals.term("term2"), Intervals.term("term4")), Intervals.term("term3") @@ -122,7 +122,7 @@ public void testSimpleSynonymsWithGap() throws IOException { ); IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, true); - IntervalsSource expected = XIntervals.ordered( + IntervalsSource expected = Intervals.ordered( Intervals.term("term1"), Intervals.extend(Intervals.or(Intervals.term("term2"), Intervals.term("term3"), Intervals.term("term4")), 1, 0), Intervals.term("term5") @@ -143,7 +143,7 @@ public void testGraphSynonyms() throws IOException { ); IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, true); - IntervalsSource expected = XIntervals.ordered( + IntervalsSource expected = Intervals.ordered( Intervals.term("term1"), Intervals.or(Intervals.term("term2"), Intervals.phrase("term3", "term4")), Intervals.term("term5") @@ -166,7 +166,7 @@ public void testGraphSynonymsWithGaps() throws IOException { ); IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, true); - IntervalsSource expected = XIntervals.ordered( + IntervalsSource expected = Intervals.ordered( Intervals.term("term1"), Intervals.or( Intervals.extend(Intervals.term("term2"), 1, 0), @@ -190,7 +190,7 @@ public void testGraphTerminatesOnGap() throws IOException { ); IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, true); - IntervalsSource expected = XIntervals.ordered( + IntervalsSource expected = Intervals.ordered( Intervals.term("term1"), Intervals.or(Intervals.term("term2"), Intervals.phrase("term3", "term4")), Intervals.extend(Intervals.term("term5"), 1, 0) diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java index f0084f4f24e98..aad8275f4749d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java @@ -203,7 +203,7 @@ public void testMatchInterval() throws IOException { }""", TEXT_FIELD_NAME); IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); - Query expected = new IntervalQuery(TEXT_FIELD_NAME, XIntervals.unordered(Intervals.term("hello"), Intervals.term("world"))); + Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.unordered(Intervals.term("hello"), Intervals.term("world"))); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); @@ -222,7 +222,7 @@ public void testMatchInterval() throws IOException { builder = (IntervalQueryBuilder) parseQuery(json); expected = new IntervalQuery( TEXT_FIELD_NAME, - Intervals.maxgaps(40, XIntervals.unordered(Intervals.term("hello"), Intervals.term("world"))) + Intervals.maxgaps(40, Intervals.unordered(Intervals.term("hello"), Intervals.term("world"))) ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); @@ -241,7 +241,7 @@ public void testMatchInterval() throws IOException { builder = (IntervalQueryBuilder) parseQuery(json); expected = new BoostQuery( - new IntervalQuery(TEXT_FIELD_NAME, XIntervals.ordered(Intervals.term("hello"), Intervals.term("world"))), + new IntervalQuery(TEXT_FIELD_NAME, Intervals.ordered(Intervals.term("hello"), Intervals.term("world"))), 2 ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); @@ -263,7 +263,7 @@ public void testMatchInterval() throws IOException { builder = (IntervalQueryBuilder) parseQuery(json); expected = new IntervalQuery( TEXT_FIELD_NAME, - Intervals.maxgaps(10, XIntervals.ordered(Intervals.term("Hello"), Intervals.term("world"))) + Intervals.maxgaps(10, Intervals.ordered(Intervals.term("Hello"), Intervals.term("world"))) ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); @@ -285,7 +285,7 @@ public void testMatchInterval() throws IOException { builder = (IntervalQueryBuilder) parseQuery(json); expected = new IntervalQuery( TEXT_FIELD_NAME, - Intervals.fixField(MASKED_FIELD, Intervals.maxgaps(10, XIntervals.ordered(Intervals.term("Hello"), Intervals.term("world")))) + Intervals.fixField(MASKED_FIELD, Intervals.maxgaps(10, Intervals.ordered(Intervals.term("Hello"), Intervals.term("world")))) ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); @@ -314,7 +314,7 @@ public void testMatchInterval() throws IOException { expected = new IntervalQuery( TEXT_FIELD_NAME, Intervals.containing( - Intervals.maxgaps(10, XIntervals.ordered(Intervals.term("Hello"), Intervals.term("world"))), + Intervals.maxgaps(10, Intervals.ordered(Intervals.term("Hello"), Intervals.term("world"))), Intervals.term("blah") ) ); @@ -426,7 +426,7 @@ public void testCombineInterval() throws IOException { Intervals.containedBy( Intervals.maxgaps( 30, - XIntervals.ordered(Intervals.term("one"), XIntervals.unordered(Intervals.term("two"), Intervals.term("three"))) + Intervals.ordered(Intervals.term("one"), Intervals.unordered(Intervals.term("two"), Intervals.term("three"))) ), Intervals.term("SENTENCE") ) @@ -486,7 +486,7 @@ public void testCombineDisjunctionInterval() throws IOException { Intervals.notContainedBy( Intervals.maxgaps( 30, - XIntervals.ordered(Intervals.term("atmosphere"), Intervals.or(Intervals.term("cold"), Intervals.term("outside"))) + Intervals.ordered(Intervals.term("atmosphere"), Intervals.or(Intervals.term("cold"), Intervals.term("outside"))) ), Intervals.term("freeze") ) diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 4549a329d499a..c07b396626c45 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -4563,11 +4563,9 @@ public void testResetEngine() throws Exception { var newEngineCreated = new CountDownLatch(2); var indexShard = newStartedShard(true, Settings.EMPTY, config -> { try { - return new ReadOnlyEngine(config, null, null, true, Function.identity(), true, true) { + return new ReadOnlyEngine(config, null, new TranslogStats(), false, Function.identity(), true, true) { @Override - public void prepareForEngineReset() throws IOException { - ; - } + public void prepareForEngineReset() throws IOException {} }; } finally { newEngineCreated.countDown(); diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java index 532e30804947c..feb44261ce3ed 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java @@ -34,6 +34,10 @@ public class ShardGetServiceTests extends IndexShardTestCase { + private GetResult getForUpdate(IndexShard indexShard, String id, long ifSeqNo, long ifPrimaryTerm) throws IOException { + return indexShard.getService().getForUpdate(id, ifSeqNo, ifPrimaryTerm, new String[] { RoutingFieldMapper.NAME }); + } + public void testGetForUpdate() throws IOException { Settings settings = indexSettings(IndexVersion.current(), 1, 1).build(); IndexMetadata metadata = IndexMetadata.builder("test").putMapping(""" @@ -44,7 +48,7 @@ public void testGetForUpdate() throws IOException { long translogInMemorySegmentCountExpected = 0; Engine.IndexResult test = indexDoc(primary, "test", "0", "{\"foo\" : \"bar\"}"); assertTrue(primary.getEngine().refreshNeeded()); - GetResult testGet = primary.getService().getForUpdate("0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + GetResult testGet = getForUpdate(primary, "0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertFalse(testGet.getFields().containsKey(RoutingFieldMapper.NAME)); assertEquals(testGet.sourceRef().utf8ToString(), "{\"foo\" : \"bar\"}"); assertEquals(translogInMemorySegmentCountExpected, translogInMemorySegmentCount.getAsLong()); @@ -54,7 +58,7 @@ public void testGetForUpdate() throws IOException { Engine.IndexResult test1 = indexDoc(primary, "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar"); assertTrue(primary.getEngine().refreshNeeded()); - GetResult testGet1 = primary.getService().getForUpdate("1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + GetResult testGet1 = getForUpdate(primary, "1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertEquals(testGet1.sourceRef().utf8ToString(), "{\"foo\" : \"baz\"}"); assertTrue(testGet1.getFields().containsKey(RoutingFieldMapper.NAME)); assertEquals("foobar", testGet1.getFields().get(RoutingFieldMapper.NAME).getValue()); @@ -70,19 +74,19 @@ public void testGetForUpdate() throws IOException { // now again from the reader Engine.IndexResult test2 = indexDoc(primary, "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar"); assertTrue(primary.getEngine().refreshNeeded()); - testGet1 = primary.getService().getForUpdate("1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + testGet1 = getForUpdate(primary, "1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertEquals(testGet1.sourceRef().utf8ToString(), "{\"foo\" : \"baz\"}"); assertTrue(testGet1.getFields().containsKey(RoutingFieldMapper.NAME)); assertEquals("foobar", testGet1.getFields().get(RoutingFieldMapper.NAME).getValue()); assertEquals(translogInMemorySegmentCountExpected, translogInMemorySegmentCount.getAsLong()); final long primaryTerm = primary.getOperationPrimaryTerm(); - testGet1 = primary.getService().getForUpdate("1", test2.getSeqNo(), primaryTerm); + testGet1 = getForUpdate(primary, "1", test2.getSeqNo(), primaryTerm); assertEquals(testGet1.sourceRef().utf8ToString(), "{\"foo\" : \"baz\"}"); assertEquals(translogInMemorySegmentCountExpected, translogInMemorySegmentCount.getAsLong()); - expectThrows(VersionConflictEngineException.class, () -> primary.getService().getForUpdate("1", test2.getSeqNo() + 1, primaryTerm)); - expectThrows(VersionConflictEngineException.class, () -> primary.getService().getForUpdate("1", test2.getSeqNo(), primaryTerm + 1)); + expectThrows(VersionConflictEngineException.class, () -> getForUpdate(primary, "1", test2.getSeqNo() + 1, primaryTerm)); + expectThrows(VersionConflictEngineException.class, () -> getForUpdate(primary, "1", test2.getSeqNo(), primaryTerm + 1)); closeShards(primary); } @@ -183,7 +187,7 @@ private void runGetFromTranslogWithOptions( Engine.IndexResult res = indexDoc(primary, "test", "0", docToIndex); assertTrue(res.isCreated()); assertTrue(primary.getEngine().refreshNeeded()); - GetResult testGet = primary.getService().getForUpdate("0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + GetResult testGet = getForUpdate(primary, "0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertFalse(testGet.getFields().containsKey(RoutingFieldMapper.NAME)); assertFalse(testGet.getFields().containsKey("foo")); assertFalse(testGet.getFields().containsKey("bar")); @@ -194,7 +198,7 @@ private void runGetFromTranslogWithOptions( indexDoc(primary, "1", docToIndex, XContentType.JSON, "foobar"); assertTrue(primary.getEngine().refreshNeeded()); - GetResult testGet1 = primary.getService().getForUpdate("1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + GetResult testGet1 = getForUpdate(primary, "1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertEquals(testGet1.sourceRef() == null ? "" : testGet1.sourceRef().utf8ToString(), expectedResult); assertTrue(testGet1.getFields().containsKey(RoutingFieldMapper.NAME)); assertFalse(testGet.getFields().containsKey("foo")); @@ -252,7 +256,7 @@ public void testTypelessGetForUpdate() throws IOException { Engine.IndexResult indexResult = indexDoc(shard, "some_type", "0", "{\"foo\" : \"bar\"}"); assertTrue(indexResult.isCreated()); - GetResult getResult = shard.getService().getForUpdate("0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + GetResult getResult = getForUpdate(shard, "0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertTrue(getResult.isExists()); closeShards(shard); diff --git a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java index abaab1ac8983b..dd6baee601146 100644 --- a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java @@ -453,7 +453,7 @@ public void testRegisterHunspellDictionary() throws Exception { InputStream aff = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.aff"); InputStream dic = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.dic"); Dictionary dictionary; - try (Directory tmp = newFSDirectory(environment.tmpFile())) { + try (Directory tmp = newFSDirectory(environment.tmpDir())) { dictionary = new Dictionary(tmp, "hunspell", aff, dic); } AnalysisModule module = new AnalysisModule(environment, singletonList(new AnalysisPlugin() { diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index 39c327ddee228..86053b7db8b39 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -133,7 +133,6 @@ public class ClusterStateChanges { private static final Settings SETTINGS = Settings.builder().put(PATH_HOME_SETTING.getKey(), "dummy").build(); - private final TransportService transportService; private final AllocationService allocationService; private final ClusterService clusterService; private final FeatureService featureService; @@ -221,7 +220,7 @@ protected ExecutorService createThreadPoolExecutor() { // services featureService = new FeatureService(List.of()); - transportService = new TransportService( + TransportService transportService = new TransportService( SETTINGS, transport, threadPool, diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ShardCloseExecutorTests.java b/server/src/test/java/org/elasticsearch/indices/cluster/ShardCloseExecutorTests.java new file mode 100644 index 0000000000000..d4699454a4b6e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ShardCloseExecutorTests.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.indices.cluster; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.concurrent.atomic.AtomicInteger; + +public class ShardCloseExecutorTests extends ESTestCase { + + public void testThrottling() { + // This defaults to the number of CPUs of the machine running the tests which could be either side of 10. + final var defaultProcessors = EsExecutors.NODE_PROCESSORS_SETTING.get(Settings.EMPTY).roundUp(); + ensureThrottling(Math.min(10, defaultProcessors), Settings.EMPTY); + + if (10 < defaultProcessors) { + ensureThrottling( + 10, + Settings.builder().put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), between(10, defaultProcessors - 1)).build() + ); + } // else we cannot run this check, the machine running the tests doesn't have enough CPUs + + if (1 < defaultProcessors) { + final var fewProcessors = between(1, Math.min(10, defaultProcessors - 1)); + ensureThrottling(fewProcessors, Settings.builder().put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), fewProcessors).build()); + } // else we cannot run this check, the machine running the tests has less than 2 whole CPUs (and we already tested the 1 case) + + // but in any case we can override the throttle regardless of its default value + final var override = between(1, defaultProcessors * 2); + ensureThrottling( + override, + Settings.builder().put(IndicesClusterStateService.CONCURRENT_SHARD_CLOSE_LIMIT.getKey(), override).build() + ); + } + + private static void ensureThrottling(int expectedLimit, Settings settings) { + final var tasksToRun = new ArrayList(expectedLimit + 1); + final var executor = new IndicesClusterStateService.ShardCloseExecutor(settings, tasksToRun::add); + final var runCount = new AtomicInteger(); + + // enqueue one more task than the throttling limit + for (int i = 0; i < expectedLimit + 1; i++) { + executor.execute(runCount::incrementAndGet); + } + + // check that we submitted tasks up to the expected limit, holding back the final task behind the throttle for now + assertEquals(expectedLimit, tasksToRun.size()); + + // now execute all the tasks one by one + for (int i = 0; i < expectedLimit + 1; i++) { + assertEquals(i, runCount.get()); + tasksToRun.get(i).run(); + assertEquals(i + 1, runCount.get()); + + // executing the first task enqueues the final task + assertEquals(expectedLimit + 1, tasksToRun.size()); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 78baa1699df00..bd4c5232a8ee4 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.FailureStoreMetrics; +import org.elasticsearch.action.bulk.IndexDocFailureStoreStatus; import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; @@ -118,7 +119,6 @@ import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoInteractions; @@ -198,12 +198,18 @@ public void testExecuteIndexPipelineDoesNotExist() { .setPipeline("_id") .setFinalPipeline("_none"); + Boolean noRedirect = randomBoolean() ? false : null; + IndexDocFailureStoreStatus fsStatus = noRedirect == null + ? IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN + : IndexDocFailureStoreStatus.NOT_ENABLED; + final SetOnce failure = new SetOnce<>(); - final BiConsumer failureHandler = (slot, e) -> { + final TriConsumer failureHandler = (slot, e, status) -> { failure.set(true); assertThat(slot, equalTo(0)); assertThat(e, instanceOf(IllegalArgumentException.class)); assertThat(e.getMessage(), equalTo("pipeline with id [_id] does not exist")); + assertThat(status, equalTo(fsStatus)); }; @SuppressWarnings("unchecked") @@ -213,7 +219,7 @@ public void testExecuteIndexPipelineDoesNotExist() { 1, List.of(indexRequest), indexReq -> {}, - (s) -> false, + (s) -> noRedirect, (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, @@ -1120,11 +1126,17 @@ public String getType() { IndexRequest indexRequest2 = new IndexRequest("_index").id("_id2").source(Map.of()).setPipeline(id).setFinalPipeline("_none"); bulkRequest.add(indexRequest2); - final BiConsumer failureHandler = (slot, e) -> { + Boolean noRedirect = randomBoolean() ? false : null; + IndexDocFailureStoreStatus fsStatus = noRedirect == null + ? IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN + : IndexDocFailureStoreStatus.NOT_ENABLED; + + final TriConsumer failureHandler = (slot, e, status) -> { assertThat(e.getCause(), instanceOf(IllegalStateException.class)); assertThat(e.getCause().getMessage(), equalTo("error")); failure.set(true); assertThat(slot, equalTo(1)); + assertThat(status, equalTo(fsStatus)); }; @SuppressWarnings("unchecked") @@ -1134,7 +1146,7 @@ public String getType() { bulkRequest.numberOfActions(), bulkRequest.requests(), indexReq -> {}, - (s) -> false, + (s) -> noRedirect, (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, @@ -1168,22 +1180,29 @@ public void testExecuteBulkPipelineDoesNotExist() { .setFinalPipeline("_none"); bulkRequest.add(indexRequest3); @SuppressWarnings("unchecked") - BiConsumer failureHandler = mock(BiConsumer.class); + TriConsumer failureHandler = mock(TriConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); + + Boolean noRedirect = randomBoolean() ? false : null; + IndexDocFailureStoreStatus fsStatus = noRedirect == null + ? IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN + : IndexDocFailureStoreStatus.NOT_ENABLED; + ingestService.executeBulkRequest( bulkRequest.numberOfActions(), bulkRequest.requests(), indexReq -> {}, - (s) -> false, + (s) -> noRedirect, (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - verify(failureHandler, times(1)).accept( + verify(failureHandler, times(1)).apply( argThat(item -> item == 2), - argThat(iae -> "pipeline with id [does_not_exist] does not exist".equals(iae.getMessage())) + argThat(iae -> "pipeline with id [does_not_exist] does not exist".equals(iae.getMessage())), + argThat(fsStatus::equals) ); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } @@ -1202,7 +1221,7 @@ public void testExecuteSuccess() { .setPipeline("_id") .setFinalPipeline("_none"); @SuppressWarnings("unchecked") - final BiConsumer failureHandler = mock(BiConsumer.class); + final TriConsumer failureHandler = mock(TriConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); ingestService.executeBulkRequest( @@ -1215,7 +1234,7 @@ public void testExecuteSuccess() { completionHandler, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - verify(failureHandler, never()).accept(any(), any()); + verifyNoInteractions(failureHandler); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } @@ -1241,7 +1260,9 @@ public void testDynamicTemplates() throws Exception { .setPipeline("_id") .setFinalPipeline("_none"); CountDownLatch latch = new CountDownLatch(1); - final BiConsumer failureHandler = (v, e) -> { throw new AssertionError("must never fail", e); }; + final TriConsumer failureHandler = (v, e, s) -> { + throw new AssertionError("must never fail", e); + }; final BiConsumer completionHandler = (t, e) -> latch.countDown(); ingestService.executeBulkRequest( 1, @@ -1270,7 +1291,7 @@ public void testExecuteEmptyPipeline() throws Exception { .setPipeline("_id") .setFinalPipeline("_none"); @SuppressWarnings("unchecked") - final BiConsumer failureHandler = mock(BiConsumer.class); + final TriConsumer failureHandler = mock(TriConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); ingestService.executeBulkRequest( @@ -1283,7 +1304,7 @@ public void testExecuteEmptyPipeline() throws Exception { completionHandler, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - verify(failureHandler, never()).accept(any(), any()); + verifyNoInteractions(failureHandler); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } @@ -1329,7 +1350,7 @@ public void testExecutePropagateAllMetadataUpdates() throws Exception { .setPipeline("_id") .setFinalPipeline("_none"); @SuppressWarnings("unchecked") - final BiConsumer failureHandler = mock(BiConsumer.class); + final TriConsumer failureHandler = mock(TriConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); ingestService.executeBulkRequest( @@ -1343,7 +1364,7 @@ public void testExecutePropagateAllMetadataUpdates() throws Exception { EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(any(), any()); - verify(failureHandler, never()).accept(any(), any()); + verifyNoInteractions(failureHandler); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); assertThat(indexRequest.index(), equalTo("update_index")); assertThat(indexRequest.id(), equalTo("update_id")); @@ -1379,21 +1400,26 @@ public void testExecuteFailure() throws Exception { doThrow(new RuntimeException()).when(processor) .execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); @SuppressWarnings("unchecked") - final BiConsumer failureHandler = mock(BiConsumer.class); + final TriConsumer failureHandler = mock(TriConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); + + Boolean noRedirect = randomBoolean() ? false : null; + IndexDocFailureStoreStatus fsStatus = noRedirect == null + ? IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN + : IndexDocFailureStoreStatus.NOT_ENABLED; ingestService.executeBulkRequest( 1, List.of(indexRequest), indexReq -> {}, - (s) -> false, + (s) -> noRedirect, (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); - verify(failureHandler, times(1)).accept(eq(0), any(RuntimeException.class)); + verify(failureHandler, times(1)).apply(eq(0), any(RuntimeException.class), eq(fsStatus)); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } @@ -1433,7 +1459,7 @@ public void testExecuteSuccessWithOnFailure() throws Exception { .setPipeline("_id") .setFinalPipeline("_none"); @SuppressWarnings("unchecked") - final BiConsumer failureHandler = mock(BiConsumer.class); + final TriConsumer failureHandler = mock(TriConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); ingestService.executeBulkRequest( @@ -1446,7 +1472,7 @@ public void testExecuteSuccessWithOnFailure() throws Exception { completionHandler, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - verify(failureHandler, never()).accept(eq(0), any(IngestProcessorException.class)); + verifyNoInteractions(failureHandler); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } @@ -1481,21 +1507,27 @@ public void testExecuteFailureWithNestedOnFailure() throws Exception { doThrow(new RuntimeException()).when(processor) .execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); @SuppressWarnings("unchecked") - final BiConsumer failureHandler = mock(BiConsumer.class); + final TriConsumer failureHandler = mock(TriConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); + + Boolean noRedirect = randomBoolean() ? false : null; + IndexDocFailureStoreStatus fsStatus = noRedirect == null + ? IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN + : IndexDocFailureStoreStatus.NOT_ENABLED; + ingestService.executeBulkRequest( 1, List.of(indexRequest), indexReq -> {}, - (s) -> false, + (s) -> noRedirect, (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); - verify(failureHandler, times(1)).accept(eq(0), any(RuntimeException.class)); + verify(failureHandler, times(1)).apply(eq(0), any(RuntimeException.class), eq(fsStatus)); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } @@ -1540,21 +1572,27 @@ public void testBulkRequestExecutionWithFailures() throws Exception { ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); @SuppressWarnings("unchecked") - BiConsumer requestItemErrorHandler = mock(BiConsumer.class); + TriConsumer requestItemErrorHandler = mock(TriConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); + + Boolean noRedirect = randomBoolean() ? false : null; + IndexDocFailureStoreStatus fsStatus = noRedirect == null + ? IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN + : IndexDocFailureStoreStatus.NOT_ENABLED; + ingestService.executeBulkRequest( numRequest, bulkRequest.requests(), indexReq -> {}, - (s) -> false, + (s) -> noRedirect, (slot, targetIndex, e) -> fail("Should not be redirecting failures"), requestItemErrorHandler, completionHandler, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - verify(requestItemErrorHandler, times(numIndexRequests)).accept(anyInt(), argThat(e -> e.getCause().equals(error))); + verify(requestItemErrorHandler, times(numIndexRequests)).apply(anyInt(), argThat(e -> e.getCause().equals(error)), eq(fsStatus)); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } @@ -1586,7 +1624,7 @@ public void testExecuteFailureRedirection() throws Exception { @SuppressWarnings("unchecked") final TriConsumer redirectHandler = mock(TriConsumer.class); @SuppressWarnings("unchecked") - final BiConsumer failureHandler = mock(BiConsumer.class); + final TriConsumer failureHandler = mock(TriConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); ingestService.executeBulkRequest( @@ -1605,6 +1643,53 @@ public void testExecuteFailureRedirection() throws Exception { verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } + public void testExecuteFailureStatusOnFailureWithoutRedirection() throws Exception { + final CompoundProcessor processor = mockCompoundProcessor(); + IngestService ingestService = createWithProcessors( + Map.of( + "mock", + (factories, tag, description, config) -> processor, + "set", + (factories, tag, description, config) -> new FakeProcessor("set", "", "", (ingestDocument) -> fail()) + ) + ); + PutPipelineRequest putRequest1 = putJsonPipelineRequest("_id1", "{\"processors\": [{\"mock\" : {}}]}"); + // given that set -> fail() above, it's a failure if a document executes against this pipeline + PutPipelineRequest putRequest2 = putJsonPipelineRequest("_id2", "{\"processors\": [{\"set\" : {}}]}"); + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty + ClusterState previousClusterState = clusterState; + clusterState = executePut(putRequest1, clusterState); + clusterState = executePut(putRequest2, clusterState); + ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); + final IndexRequest indexRequest = new IndexRequest("_index").id("_id") + .source(Map.of()) + .setPipeline("_id1") + .setFinalPipeline("_id2"); + doThrow(new RuntimeException()).when(processor) + .execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); + final Function redirectCheck = (idx) -> indexRequest.index().equals(idx) ? false : null; + @SuppressWarnings("unchecked") + final TriConsumer redirectHandler = mock(TriConsumer.class); + @SuppressWarnings("unchecked") + final TriConsumer failureHandler = mock(TriConsumer.class); + @SuppressWarnings("unchecked") + final BiConsumer completionHandler = mock(BiConsumer.class); + ingestService.executeBulkRequest( + 1, + List.of(indexRequest), + indexReq -> {}, + redirectCheck, + redirectHandler, + failureHandler, + completionHandler, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); + verifyNoInteractions(redirectHandler); + verify(failureHandler, times(1)).apply(eq(0), any(RuntimeException.class), eq(IndexDocFailureStoreStatus.NOT_ENABLED)); + verify(completionHandler, times(1)).accept(Thread.currentThread(), null); + } + public void testExecuteFailureRedirectionWithNestedOnFailure() throws Exception { final Processor processor = mock(Processor.class); when(processor.isAsync()).thenReturn(true); @@ -1639,7 +1724,7 @@ public void testExecuteFailureRedirectionWithNestedOnFailure() throws Exception @SuppressWarnings("unchecked") final TriConsumer redirectHandler = mock(TriConsumer.class); @SuppressWarnings("unchecked") - final BiConsumer failureHandler = mock(BiConsumer.class); + final TriConsumer failureHandler = mock(TriConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); ingestService.executeBulkRequest( @@ -1701,7 +1786,7 @@ public void testBulkRequestExecutionWithRedirectedFailures() throws Exception { @SuppressWarnings("unchecked") TriConsumer requestItemRedirectHandler = mock(TriConsumer.class); @SuppressWarnings("unchecked") - BiConsumer requestItemErrorHandler = mock(BiConsumer.class); + TriConsumer requestItemErrorHandler = mock(TriConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); ingestService.executeBulkRequest( @@ -1762,7 +1847,7 @@ public void testBulkRequestExecution() throws Exception { ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); @SuppressWarnings("unchecked") - BiConsumer requestItemErrorHandler = mock(BiConsumer.class); + TriConsumer requestItemErrorHandler = mock(TriConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); ingestService.executeBulkRequest( @@ -1776,7 +1861,7 @@ public void testBulkRequestExecution() throws Exception { EsExecutors.DIRECT_EXECUTOR_SERVICE ); - verify(requestItemErrorHandler, never()).accept(any(), any()); + verifyNoInteractions(requestItemErrorHandler); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); for (int i = 0; i < bulkRequest.requests().size(); i++) { DocWriteRequest docWriteRequest = bulkRequest.requests().get(i); @@ -1875,7 +1960,7 @@ public String execute() { indexReq -> {}, (s) -> false, (slot, targetIndex, e) -> fail("Should not be redirecting failures"), - (integer, e) -> {}, + (integer, e, status) -> {}, (thread, e) -> {}, EsExecutors.DIRECT_EXECUTOR_SERVICE ); @@ -1939,7 +2024,7 @@ public void testStats() throws Exception { ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); @SuppressWarnings("unchecked") - final BiConsumer failureHandler = mock(BiConsumer.class); + final TriConsumer failureHandler = mock(TriConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); @@ -2157,7 +2242,7 @@ public String getDescription() { bulkRequest.add(indexRequest2); @SuppressWarnings("unchecked") - final BiConsumer failureHandler = mock(BiConsumer.class); + final TriConsumer failureHandler = mock(TriConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") @@ -2172,7 +2257,7 @@ public String getDescription() { completionHandler, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - verify(failureHandler, never()).accept(any(), any()); + verifyNoInteractions(failureHandler); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); verify(dropHandler, times(1)).accept(1); } @@ -2250,7 +2335,7 @@ public void testCBORParsing() throws Exception { indexReq -> {}, (s) -> false, (slot, targetIndex, e) -> fail("Should not be redirecting failures"), - (integer, e) -> {}, + (integer, e, status) -> {}, (thread, e) -> {}, EsExecutors.DIRECT_EXECUTOR_SERVICE ); @@ -2322,7 +2407,7 @@ public void testSetsRawTimestamp() { indexReq -> {}, (s) -> false, (slot, targetIndex, e) -> fail("Should not be redirecting failures"), - (integer, e) -> {}, + (integer, e, status) -> {}, (thread, e) -> {}, EsExecutors.DIRECT_EXECUTOR_SERVICE ); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java index dc3fb2a473f43..d9189c56e6689 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java @@ -19,6 +19,7 @@ import java.util.Map; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.sameInstance; public class IngestStatsTests extends ESTestCase { @@ -31,6 +32,11 @@ public void testSerialization() throws IOException { assertIngestStats(ingestStats, serializedStats); } + public void testIdentitySerialization() throws IOException { + IngestStats serializedStats = serialize(IngestStats.IDENTITY); + assertThat(serializedStats, sameInstance(IngestStats.IDENTITY)); + } + public void testStatsMerge() { var first = randomStats(); var second = randomStats(); diff --git a/server/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java b/server/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java index 32edcc0ad82aa..c0e1c1143ef42 100644 --- a/server/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java +++ b/server/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java @@ -57,7 +57,7 @@ public void testEmptySettings() { assertEquals(defaultNodeName, settings.get("node.name")); assertNotNull(settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); // a cluster name was set String home = Environment.PATH_HOME_SETTING.get(baseEnvSettings); - String configDir = env.configFile().toString(); + String configDir = env.configDir().toString(); assertTrue(configDir, configDir.startsWith(home)); assertEquals("elasticsearch", settings.get("cluster.name")); } diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java index 97158e27b8528..8129f67947cf9 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java @@ -52,7 +52,7 @@ public class PluginsLoaderTests extends ESTestCase { static PluginsLoader newPluginsLoader(Settings settings) { return PluginsLoader.createPluginsLoader( Set.of(), - PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsFile()), + PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsDir()), Map.of(), false ); @@ -121,7 +121,7 @@ public void testStablePluginWithNativeAccess() throws Exception { var pluginsLoader = PluginsLoader.createPluginsLoader( Set.of(), - PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsFile()), + PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsDir()), Map.of(STABLE_PLUGIN_NAME, Set.of(STABLE_PLUGIN_MODULE_NAME)), false ); @@ -182,7 +182,7 @@ public void testModularPluginLoadingWithNativeAccess() throws Exception { var pluginsLoader = PluginsLoader.createPluginsLoader( Set.of(), - PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsFile()), + PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsDir()), Map.of(MODULAR_PLUGIN_NAME, Set.of(MODULAR_PLUGIN_MODULE_NAME)), false ); diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index e76994f69c01e..ef12e767c1e28 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.index.IndexModule; +import org.elasticsearch.jdk.RuntimeVersionFeature; import org.elasticsearch.plugin.analysis.CharFilterFactory; import org.elasticsearch.plugins.scanners.PluginInfo; import org.elasticsearch.plugins.spi.BarPlugin; @@ -70,7 +71,7 @@ static PluginsService newPluginsService(Settings settings) { null, PluginsLoader.createPluginsLoader( Set.of(), - PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsFile()), + PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsDir()), Map.of(), false ) @@ -873,6 +874,7 @@ public Reader create(Reader reader) { } public void testCanCreateAClassLoader() { + assumeTrue("security manager must be available", RuntimeVersionFeature.isSecurityManagerAvailable()); assertEquals( "access denied (\"java.lang.RuntimePermission\" \"createClassLoader\")", expectThrows(AccessControlException.class, () -> new Loader(this.getClass().getClassLoader())).getMessage() diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java index f5ebacde08820..250d10855b23f 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java @@ -40,9 +40,12 @@ import static org.elasticsearch.repositories.RepositoryData.EMPTY_REPO_GEN; import static org.elasticsearch.repositories.RepositoryData.MISSING_UUID; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.not; /** * Tests for the {@link RepositoryData} class. @@ -430,6 +433,19 @@ public void testFailsIfMinVersionNotSatisfied() throws IOException { } } + public void testToString() { + final var repositoryData = generateRandomRepoData(); + assertThat( + repositoryData.toString(), + allOf( + containsString("RepositoryData"), + containsString(repositoryData.getUuid()), + containsString(Long.toString(repositoryData.getGenId())), + not(containsString("@")) // not the default Object#toString which does a very expensive hashcode computation + ) + ); + } + public static RepositoryData generateRandomRepoData() { final int numIndices = randomIntBetween(1, 30); final List indices = new ArrayList<>(numIndices); diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java index 20ea43910e68d..e973073efb184 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.reservedstate.service; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorResult; import org.elasticsearch.health.SimpleHealthIndicatorDetails; @@ -21,6 +22,7 @@ import static org.elasticsearch.health.HealthStatus.GREEN; import static org.elasticsearch.health.HealthStatus.YELLOW; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.DESCRIPTION_LENGTH_LIMIT_KEY; import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.FAILURE_SYMPTOM; import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.INACTIVE_SYMPTOM; import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.NO_CHANGES_SYMPTOM; @@ -37,7 +39,7 @@ public class FileSettingsHealthIndicatorServiceTests extends ESTestCase { @Before public void initialize() { - healthIndicatorService = new FileSettingsHealthIndicatorService(); + healthIndicatorService = new FileSettingsHealthIndicatorService(Settings.EMPTY); } public void testInitiallyGreen() {} @@ -101,4 +103,28 @@ public void testGreenYellowYellowGreen() { healthIndicatorService.calculate(false, null) ); } + + public void testDescriptionIsTruncated() { + checkTruncatedDescription(9, "123456789", "123456789"); + checkTruncatedDescription(8, "123456789", "1234567…"); + checkTruncatedDescription(1, "12", "…"); + } + + private void checkTruncatedDescription(int lengthLimit, String description, String expectedTruncatedDescription) { + var service = new FileSettingsHealthIndicatorService(Settings.builder().put(DESCRIPTION_LENGTH_LIMIT_KEY, lengthLimit).build()); + service.startOccurred(); + service.changeOccurred(); + service.failureOccurred(description); + assertEquals( + new HealthIndicatorResult( + "file_settings", + YELLOW, + FAILURE_SYMPTOM, + new SimpleHealthIndicatorDetails(Map.of("failure_streak", 1L, "most_recent_failure", expectedTruncatedDescription)), + STALE_SETTINGS_IMPACT, + List.of() + ), + service.calculate(false, null) + ); + } } diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index 7cb12c1b316e8..0d7daf8315231 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -127,7 +127,7 @@ public void setUp() throws Exception { clusterService.getMasterService().setClusterStateSupplier(() -> clusterState); env = newEnvironment(Settings.EMPTY); - Files.createDirectories(env.configFile()); + Files.createDirectories(env.configDir()); ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); @@ -138,7 +138,7 @@ public void setUp() throws Exception { List.of(new ReservedClusterSettingsAction(clusterSettings)) ) ); - healthIndicatorService = spy(new FileSettingsHealthIndicatorService()); + healthIndicatorService = spy(new FileSettingsHealthIndicatorService(Settings.EMPTY)); fileSettingsService = spy(new FileSettingsService(clusterService, controller, env, healthIndicatorService)); } @@ -176,7 +176,7 @@ public void testStartStop() { public void testOperatorDirName() { Path operatorPath = fileSettingsService.watchedFileDir(); - assertTrue(operatorPath.startsWith(env.configFile())); + assertTrue(operatorPath.startsWith(env.configDir())); assertTrue(operatorPath.endsWith("operator")); Path operatorSettingsFile = fileSettingsService.watchedFile(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java index 2d0622dbb6322..230495db7327b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java @@ -92,7 +92,8 @@ private ValuesSourceConfig getVSConfig( null, false, null, - null + null, + false ); return ValuesSourceConfig.resolveFieldOnly(ft, context); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java index 8a72f8af7035c..06600441b0a44 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java @@ -343,10 +343,7 @@ private void testRandomCase(boolean forceMerge, boolean missingBucket, int index final SortedDocsProducer docsProducer = sources[0].createSortedDocsProducerOrNull(reader, new MatchAllDocsQuery()); for (LeafReaderContext leafReaderContext : reader.leaves()) { if (docsProducer != null && withProducer) { - assertEquals( - DocIdSet.EMPTY, - docsProducer.processLeaf(new MatchAllDocsQuery(), queue, leafReaderContext, false) - ); + assertEquals(DocIdSet.EMPTY, docsProducer.processLeaf(queue, leafReaderContext, false)); } else { final LeafBucketCollector leafCollector = new LeafBucketCollector() { @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java index ba186695bcdae..e7d19c0f56dbc 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java @@ -1494,7 +1494,8 @@ public void testDocValuesFieldExistsForNumber() throws IOException { null, false, null, - null + null, + false ); docValuesFieldExistsTestCase(new ExistsQueryBuilder("f"), ft, true, i -> { final LuceneDocument document = new LuceneDocument(); @@ -1517,7 +1518,8 @@ public void testDocValuesFieldExistsForNumberWithoutData() throws IOException { null, false, null, - null + null, + false ) ); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregatorTests.java index 32831f46c7a1b..ab92ea8593445 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregatorTests.java @@ -311,7 +311,8 @@ public void testUnboundedRanges() throws IOException { null, false, null, - null + null, + false ) ) ); @@ -426,7 +427,8 @@ public void testNotFitIntoDouble() throws IOException { null, false, null, - null + null, + false ); long start = 2L << 54; // Double stores 53 bits of mantissa, so we aggregate a bunch of bigger values @@ -707,7 +709,8 @@ private void testCase( null, false, null, - null + null, + false ); RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("test_range_agg"); aggregationBuilder.field(NUMBER_FIELD_NAME); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java index ae4ed3568683a..4151beda6ba0c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java @@ -19,7 +19,9 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import java.io.IOException; @@ -27,7 +29,7 @@ import java.util.function.Consumer; import static java.util.Collections.singleton; -import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; +import static org.hamcrest.Matchers.is; public class ExtendedStatsAggregatorTests extends AggregatorTestCase { private static final double TOLERANCE = 1e-5; @@ -304,6 +306,13 @@ public void testCase( testCase(buildIndex, verify, new AggTestConfig(aggBuilder, ft)); } + @Override + protected void verifyOutputFieldNames(T aggregationBuilder, V agg) + throws IOException { + assertTrue(aggregationBuilder.getOutputFieldNames().isPresent()); + assertThat(aggregationBuilder.getOutputFieldNames().get(), is(InternalExtendedStats.Fields.OUTPUT_FORMAT)); + } + static class ExtendedSimpleStatsAggregator extends StatsAggregatorTests.SimpleStatsAggregator { double sumOfSqrs = 0; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java index bbb007c9155ba..b919428c00ef9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java @@ -9,16 +9,29 @@ package org.elasticsearch.search.aggregations.metrics; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentFactory; +import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Predicate; + +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isA; +import static org.hamcrest.Matchers.notNullValue; public class InternalExtendedStatsTests extends InternalAggregationTestCase { @@ -209,4 +222,75 @@ private void verifySumOfSqrsOfDoubles(double[] values, double expectedSumOfSqrs, InternalExtendedStats reduced = (InternalExtendedStats) InternalAggregationTestCase.reduce(aggregations, null); assertEquals(expectedSumOfSqrs, reduced.getSumOfSquares(), delta); } + + @SuppressWarnings(value = "unchecked") + public void testAsMapMatchesXContent() throws IOException { + var stats = new InternalExtendedStats( + "testAsMapIsSameAsXContent", + randomLongBetween(1, 50), + randomDoubleBetween(1, 50, true), + randomDoubleBetween(1, 50, true), + randomDoubleBetween(1, 50, true), + randomDoubleBetween(1, 50, true), + sigma, + DocValueFormat.RAW, + Map.of() + ); + + var outputMap = stats.asIndexableMap(); + assertThat(outputMap, notNullValue()); + + Map xContentMap; + try (var builder = XContentFactory.jsonBuilder()) { + builder.startObject(); + stats.doXContentBody(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + xContentMap = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); + } + assertThat(xContentMap, notNullValue()); + + // serializing -> deserializing converts the long to an int, so we convert it back to test + var countMetricName = InternalStats.Metrics.count.name(); + var xContentCount = xContentMap.get(countMetricName); + assertThat(xContentCount, isA(Integer.class)); + assertThat(((Integer) xContentCount).longValue(), equalTo(outputMap.get(countMetricName))); + + // verify the entries in the bounds map are similar + var xContentStdDevBounds = (Map) xContentMap.get(InternalExtendedStats.Fields.STD_DEVIATION_BOUNDS); + var outputStdDevBounds = (Map) outputMap.get(InternalExtendedStats.Fields.STD_DEVIATION_BOUNDS); + xContentStdDevBounds.forEach((key, value) -> { + if (value instanceof String == false || Double.isFinite(Double.parseDouble(value.toString()))) { + assertThat(outputStdDevBounds.get(key), equalTo(value)); + } + }); + + // verify all the other entries that are not "std_deviation_bounds" or "count" + Predicate> notCountOrStdDevBounds = Predicate.not( + e -> e.getKey().equals(countMetricName) || e.getKey().equals(InternalExtendedStats.Fields.STD_DEVIATION_BOUNDS) + ); + xContentMap.entrySet().stream().filter(notCountOrStdDevBounds).forEach(e -> { + if (e.getValue() instanceof String == false || Double.isFinite(Double.parseDouble(e.getValue().toString()))) { + assertThat(outputMap.get(e.getKey()), equalTo(e.getValue())); + } + }); + } + + public void testIndexableMapExcludesNaN() { + var stats = new InternalExtendedStats( + "testAsMapIsSameAsXContent", + randomLongBetween(1, 50), + Double.NaN, + Double.NaN, + Double.NaN, + Double.NaN, + sigma, + DocValueFormat.RAW, + Map.of() + ); + + var outputMap = stats.asIndexableMap(); + assertThat(outputMap, is(aMapWithSize(1))); + assertThat(outputMap, hasKey(InternalStats.Metrics.count.name())); + assertThat(outputMap.get(InternalStats.Metrics.count.name()), is(stats.getCount())); + } } diff --git a/server/src/test/java/org/elasticsearch/search/collapse/CollapseBuilderTests.java b/server/src/test/java/org/elasticsearch/search/collapse/CollapseBuilderTests.java index 1261c8300902f..336fae2cfec2f 100644 --- a/server/src/test/java/org/elasticsearch/search/collapse/CollapseBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/collapse/CollapseBuilderTests.java @@ -154,7 +154,8 @@ public void testBuild() throws IOException { null, false, null, - null + null, + false ); when(searchExecutionContext.getFieldType("field")).thenReturn(numberFieldType); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> builder.build(searchExecutionContext)); @@ -172,7 +173,8 @@ public void testBuild() throws IOException { null, false, null, - null + null, + false ); when(searchExecutionContext.getFieldType("field")).thenReturn(numberFieldType); builder.setInnerHits(new InnerHitBuilder().setName("field")); diff --git a/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java b/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java index 0abf34d800dca..d28bb98547cec 100644 --- a/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java @@ -86,14 +86,14 @@ public void testSingleKnnSearch() throws IOException { List queryProfileShardResult = searchProfileDfsPhaseResult.getQueryProfileShardResult(); assertNotNull(queryProfileShardResult); CollectorResult collectorResult = queryProfileShardResult.get(0).getCollectorResult(); - assertEquals("SimpleTopScoreDocCollector", (collectorResult.getName())); + assertEquals("TopScoreDocCollector", (collectorResult.getName())); assertEquals("search_top_hits", (collectorResult.getReason())); assertTrue(collectorResult.getTime() > 0); List children = collectorResult.getChildrenResults(); if (children.size() > 0) { long totalTime = 0L; for (CollectorResult child : children) { - assertEquals("SimpleTopScoreDocCollector", (child.getName())); + assertEquals("TopScoreDocCollector", (child.getName())); assertEquals("search_top_hits", (child.getReason())); totalTime += child.getTime(); } diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java index b728d40900570..aa34968813229 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java @@ -135,7 +135,7 @@ public void testManagerWithSearcher() throws IOException { assertEquals(numDocs, topDocs.totalHits.value()); CollectorResult result = profileCollectorManager.getCollectorTree(); assertEquals("profiler_reason", result.getReason()); - assertEquals("SimpleTopScoreDocCollector", result.getName()); + assertEquals("TopScoreDocCollector", result.getName()); assertTrue(result.getTime() > 0); } } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java index b417f7adbc8b7..6b38e05bdc4e3 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java @@ -27,6 +27,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.Scorable; @@ -38,14 +39,29 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.Bits; +import org.apache.lucene.util.CharsRefBuilder; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchShardTask; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.text.Text; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.ParsedQuery; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardTestCase; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.internal.ShardSearchRequest; +import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.search.suggest.SuggestBuilder; +import org.elasticsearch.search.suggest.Suggester; +import org.elasticsearch.search.suggest.SuggestionSearchContext; import org.elasticsearch.test.TestSearchContext; +import org.hamcrest.Matchers; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -275,6 +291,117 @@ private TestSearchContext createSearchContext(Query query, int size) throws IOEx return context; } + public void testSuggestOnlyWithTimeout() throws Exception { + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().suggest(new SuggestBuilder()); + try (SearchContext context = createSearchContextWithSuggestTimeout(searchSourceBuilder)) { + assertTrue(context.hasOnlySuggest()); + QueryPhase.execute(context); + assertTrue(context.queryResult().searchTimedOut()); + assertNull(context.queryResult().suggest()); + assertNotNull(context.queryResult().topDocs()); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + } + } + + public void testSuggestAndQueryWithSuggestTimeout() throws Exception { + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().suggest(new SuggestBuilder()).query(new MatchAllQueryBuilder()); + try (SearchContext context = createSearchContextWithSuggestTimeout(searchSourceBuilder)) { + context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery())); + assertFalse(context.hasOnlySuggest()); + QueryPhase.execute(context); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), Matchers.greaterThan(0L)); + assertTrue(context.queryResult().searchTimedOut()); + assertNull(context.queryResult().suggest()); + } + } + + private TestSearchContext createSearchContextWithSuggestTimeout(SearchSourceBuilder searchSourceBuilder) throws IOException { + ContextIndexSearcher contextIndexSearcher = newContextSearcher(reader); + SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext(); + suggestionSearchContext.addSuggestion("suggestion", new TestSuggestionContext(new TestSuggester(contextIndexSearcher), null)); + TestSearchContext context = new TestSearchContext(null, indexShard, contextIndexSearcher) { + @Override + public SuggestionSearchContext suggest() { + return suggestionSearchContext; + } + + @Override + public ShardSearchRequest request() { + SearchRequest searchRequest = new SearchRequest(); + searchRequest.allowPartialSearchResults(true); + searchRequest.source(searchSourceBuilder); + return new ShardSearchRequest( + OriginalIndices.NONE, + searchRequest, + indexShard.shardId(), + 0, + 1, + AliasFilter.EMPTY, + 1F, + 0, + null + ); + } + }; + context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap())); + return context; + } + + private static final class TestSuggester extends Suggester { + private final ContextIndexSearcher contextIndexSearcher; + + TestSuggester(ContextIndexSearcher contextIndexSearcher) { + this.contextIndexSearcher = contextIndexSearcher; + } + + @Override + protected TestSuggestion innerExecute( + String name, + TestSuggestionContext suggestion, + IndexSearcher searcher, + CharsRefBuilder spare + ) { + contextIndexSearcher.throwTimeExceededException(); + throw new AssertionError("should have thrown TimeExceededException"); + } + + @Override + protected TestSuggestion emptySuggestion(String name, TestSuggestionContext suggestion, CharsRefBuilder spare) { + return new TestSuggestion(); + } + } + + private static final class TestSuggestionContext extends SuggestionSearchContext.SuggestionContext { + TestSuggestionContext(Suggester suggester, SearchExecutionContext searchExecutionContext) { + super(suggester, searchExecutionContext); + } + } + + private static final class TestSuggestion extends Suggest.Suggestion< + Suggest.Suggestion.Entry> { + TestSuggestion() { + super("suggestion", 10); + } + + @Override + protected Entry newEntry(StreamInput in) { + return new TestSuggestionEntry(); + } + + @Override + public String getWriteableName() { + return "suggestion"; + } + } + + private static final class TestSuggestionEntry extends Suggest.Suggestion.Entry { + @Override + protected Option newOption(StreamInput in) { + return new Option(new Text("text"), 1f) { + }; + } + } + private static class Score extends Scorable { float score; diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 061151a24c455..503ca22b3c313 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -2240,7 +2240,6 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { .scriptService(scriptService) .clusterService(clusterService) .client(client) - .featureService(new FeatureService(List.of())) .metaStateService(new MetaStateService(nodeEnv, namedXContentRegistry)) .mapperMetrics(MapperMetrics.NOOP) .build(); diff --git a/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java b/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java index 9b56cd3bde53c..cfb3cc68e035f 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.transport.InboundDecoder.ChannelType; @@ -126,105 +125,6 @@ public void testDecode() throws IOException { } - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // can delete test in v9 - public void testDecodePreHeaderSizeVariableInt() throws IOException { - Compression.Scheme compressionScheme = randomFrom(Compression.Scheme.DEFLATE, Compression.Scheme.DEFLATE, null); - String action = "test-request"; - long requestId = randomNonNegativeLong(); - final TransportVersion preHeaderVariableInt = TransportHandshaker.V7_HANDSHAKE_VERSION; - final String contentValue = randomAlphaOfLength(100); - // 8.0 is only compatible with handshakes on a pre-variable int version - final OutboundMessage message = new OutboundMessage.Request( - threadContext, - new TestRequest(contentValue), - preHeaderVariableInt, - action, - requestId, - true, - compressionScheme - ); - - try (RecyclerBytesStreamOutput os = new RecyclerBytesStreamOutput(recycler)) { - final BytesReference totalBytes = message.serialize(os); - int partialHeaderSize = TcpHeader.headerSize(preHeaderVariableInt); - - InboundDecoder decoder = new InboundDecoder(recycler); - final ArrayList fragments = new ArrayList<>(); - final ReleasableBytesReference releasable1 = wrapAsReleasable(totalBytes); - int bytesConsumed = decoder.decode(releasable1, fragments::add); - assertEquals(partialHeaderSize, bytesConsumed); - assertTrue(releasable1.hasReferences()); - - final Header header = (Header) fragments.get(0); - assertEquals(requestId, header.getRequestId()); - assertEquals(preHeaderVariableInt, header.getVersion()); - if (compressionScheme == null) { - assertFalse(header.isCompressed()); - } else { - assertTrue(header.isCompressed()); - } - assertTrue(header.isHandshake()); - assertTrue(header.isRequest()); - assertTrue(header.needsToReadVariableHeader()); - fragments.clear(); - - final BytesReference bytes2 = totalBytes.slice(bytesConsumed, totalBytes.length() - bytesConsumed); - final ReleasableBytesReference releasable2 = wrapAsReleasable(bytes2); - int bytesConsumed2 = decoder.decode(releasable2, fragments::add); - if (compressionScheme == null) { - assertEquals(2, fragments.size()); - } else { - assertEquals(3, fragments.size()); - final Object body = fragments.get(1); - assertThat(body, instanceOf(ReleasableBytesReference.class)); - ((ReleasableBytesReference) body).close(); - } - assertEquals(InboundDecoder.END_CONTENT, fragments.get(fragments.size() - 1)); - assertEquals(totalBytes.length() - bytesConsumed, bytesConsumed2); - } - } - - public void testDecodeHandshakeV7Compatibility() throws IOException { - String action = "test-request"; - long requestId = randomNonNegativeLong(); - final String headerKey = randomAlphaOfLength(10); - final String headerValue = randomAlphaOfLength(20); - threadContext.putHeader(headerKey, headerValue); - TransportVersion handshakeCompat = TransportHandshaker.V7_HANDSHAKE_VERSION; - OutboundMessage message = new OutboundMessage.Request( - threadContext, - new TestRequest(randomAlphaOfLength(100)), - handshakeCompat, - action, - requestId, - true, - null - ); - - try (RecyclerBytesStreamOutput os = new RecyclerBytesStreamOutput(recycler)) { - final BytesReference bytes = message.serialize(os); - int totalHeaderSize = TcpHeader.headerSize(handshakeCompat); - - InboundDecoder decoder = new InboundDecoder(recycler); - final ArrayList fragments = new ArrayList<>(); - final ReleasableBytesReference releasable1 = wrapAsReleasable(bytes); - int bytesConsumed = decoder.decode(releasable1, fragments::add); - assertEquals(totalHeaderSize, bytesConsumed); - assertTrue(releasable1.hasReferences()); - - final Header header = (Header) fragments.get(0); - assertEquals(requestId, header.getRequestId()); - assertEquals(handshakeCompat, header.getVersion()); - assertFalse(header.isCompressed()); - assertTrue(header.isHandshake()); - assertTrue(header.isRequest()); - // TODO: On 9.0 this will be true because all compatible versions with contain the variable header int - assertTrue(header.needsToReadVariableHeader()); - fragments.clear(); - } - - } - public void testDecodeHandshakeV8Compatibility() throws IOException { doHandshakeCompatibilityTest(TransportHandshaker.V8_HANDSHAKE_VERSION, null); doHandshakeCompatibilityTest(TransportHandshaker.V8_HANDSHAKE_VERSION, Compression.Scheme.DEFLATE); @@ -453,46 +353,6 @@ public void testCompressedDecode() throws IOException { } - public void testCompressedDecodeHandshakeCompatibility() throws IOException { - String action = "test-request"; - long requestId = randomNonNegativeLong(); - final String headerKey = randomAlphaOfLength(10); - final String headerValue = randomAlphaOfLength(20); - threadContext.putHeader(headerKey, headerValue); - TransportVersion handshakeCompat = TransportHandshaker.V7_HANDSHAKE_VERSION; - OutboundMessage message = new OutboundMessage.Request( - threadContext, - new TestRequest(randomAlphaOfLength(100)), - handshakeCompat, - action, - requestId, - true, - Compression.Scheme.DEFLATE - ); - - try (RecyclerBytesStreamOutput os = new RecyclerBytesStreamOutput(recycler)) { - final BytesReference bytes = message.serialize(os); - int totalHeaderSize = TcpHeader.headerSize(handshakeCompat); - - InboundDecoder decoder = new InboundDecoder(recycler); - final ArrayList fragments = new ArrayList<>(); - final ReleasableBytesReference releasable1 = wrapAsReleasable(bytes); - int bytesConsumed = decoder.decode(releasable1, fragments::add); - assertEquals(totalHeaderSize, bytesConsumed); - assertTrue(releasable1.hasReferences()); - - final Header header = (Header) fragments.get(0); - assertEquals(requestId, header.getRequestId()); - assertEquals(handshakeCompat, header.getVersion()); - assertTrue(header.isCompressed()); - assertTrue(header.isHandshake()); - assertTrue(header.isRequest()); - // TODO: On 9.0 this will be true because all compatible versions with contain the variable header int - assertTrue(header.needsToReadVariableHeader()); - fragments.clear(); - } - } - public void testVersionIncompatibilityDecodeException() throws IOException { String action = "test-request"; long requestId = randomNonNegativeLong(); diff --git a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java index cb266c58d70d5..3ec248e0d8d9a 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java @@ -290,7 +290,12 @@ public void testLogsSlowInboundProcessing() throws Exception { ); BytesStreamOutput byteData = new BytesStreamOutput(); TaskId.EMPTY_TASK_ID.writeTo(byteData); - TransportVersion.writeVersion(remoteVersion, byteData); + // simulate bytes of a transport handshake: vInt transport version then release version string + try (var payloadByteData = new BytesStreamOutput()) { + TransportVersion.writeVersion(remoteVersion, payloadByteData); + payloadByteData.writeString(randomIdentifier()); + byteData.writeBytesReference(payloadByteData.bytes()); + } final InboundMessage requestMessage = new InboundMessage( requestHeader, ReleasableBytesReference.wrap(byteData.bytes()), diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java index cb97d3dd6f8da..3f498df0a88de 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java @@ -100,6 +100,12 @@ public void testGetConnection() { proxyNodes.add(((ProxyConnection) remoteConnectionManager.getConnection(node4)).getConnection().getNode().getId()); assertThat(proxyNodes, containsInAnyOrder("node-2")); + + assertWarnings( + "The remote cluster connection to [remote-cluster] is using the certificate-based security model. " + + "The certificate-based security model is deprecated and will be removed in a future major version. " + + "Migrate the remote cluster from the certificate-based to the API key-based security model." + ); } public void testDisconnectedException() { @@ -124,7 +130,8 @@ public void testResolveRemoteClusterAlias() throws ExecutionException, Interrupt assertTrue(future.isDone()); Transport.Connection remoteConnection = remoteConnectionManager.getConnection(remoteNode1); - assertThat(RemoteConnectionManager.resolveRemoteClusterAlias(remoteConnection).get(), equalTo("remote-cluster")); + final String remoteClusterAlias = "remote-cluster"; + assertThat(RemoteConnectionManager.resolveRemoteClusterAlias(remoteConnection).get(), equalTo(remoteClusterAlias)); Transport.Connection localConnection = mock(Transport.Connection.class); assertThat(RemoteConnectionManager.resolveRemoteClusterAlias(localConnection).isPresent(), equalTo(false)); @@ -132,11 +139,19 @@ public void testResolveRemoteClusterAlias() throws ExecutionException, Interrupt DiscoveryNode remoteNode2 = DiscoveryNodeUtils.create("remote-node-2", address); Transport.Connection proxyConnection = remoteConnectionManager.getConnection(remoteNode2); assertThat(proxyConnection, instanceOf(ProxyConnection.class)); - assertThat(RemoteConnectionManager.resolveRemoteClusterAlias(proxyConnection).get(), equalTo("remote-cluster")); + assertThat(RemoteConnectionManager.resolveRemoteClusterAlias(proxyConnection).get(), equalTo(remoteClusterAlias)); PlainActionFuture future2 = new PlainActionFuture<>(); remoteConnectionManager.openConnection(remoteNode1, null, future2); - assertThat(RemoteConnectionManager.resolveRemoteClusterAlias(future2.get()).get(), equalTo("remote-cluster")); + assertThat(RemoteConnectionManager.resolveRemoteClusterAlias(future2.get()).get(), equalTo(remoteClusterAlias)); + + assertWarnings( + "The remote cluster connection to [" + + remoteClusterAlias + + "] is using the certificate-based security model. " + + "The certificate-based security model is deprecated and will be removed in a future major version. " + + "Migrate the remote cluster from the certificate-based to the API key-based security model." + ); } public void testRewriteHandshakeAction() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerRawMessageTests.java b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerRawMessageTests.java index de44ca70f2005..2bac41199ab83 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerRawMessageTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerRawMessageTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.core.UpdateForV10; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.TransportVersionUtils; @@ -38,56 +37,6 @@ public class TransportHandshakerRawMessageTests extends ESSingleNodeTestCase { - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // remove support for v7 handshakes in v9 - public void testV7Handshake() throws Exception { - final BytesRef handshakeRequestBytes; - final var requestId = randomNonNegativeLong(); - try (var outputStream = new BytesStreamOutput()) { - outputStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - outputStream.writeLong(requestId); - outputStream.writeByte(TransportStatus.setRequest(TransportStatus.setHandshake((byte) 0))); - outputStream.writeInt(TransportHandshaker.V7_HANDSHAKE_VERSION.id()); - outputStream.writeByte((byte) 0); // no request headers; - outputStream.writeByte((byte) 0); // no response headers; - outputStream.writeStringArray(new String[] { "x-pack" }); // one feature - outputStream.writeString("internal:tcp/handshake"); - outputStream.writeByte((byte) 0); // no parent task ID; - - final var requestNodeTransportVersionId = TransportVersionUtils.randomCompatibleVersion(random()).id(); - assertThat(requestNodeTransportVersionId, allOf(greaterThanOrEqualTo(1 << 22), lessThan(1 << 28))); // 4-byte vInt - outputStream.writeByte((byte) 4); // payload length - outputStream.writeVInt(requestNodeTransportVersionId); - - handshakeRequestBytes = outputStream.bytes().toBytesRef(); - } - - final BytesRef handshakeResponseBytes; - try (var socket = openTransportConnection()) { - var streamOutput = new OutputStreamStreamOutput(socket.getOutputStream()); - streamOutput.write("ES".getBytes(StandardCharsets.US_ASCII)); - streamOutput.writeInt(handshakeRequestBytes.length); - streamOutput.writeBytes(handshakeRequestBytes.bytes, handshakeRequestBytes.offset, handshakeRequestBytes.length); - streamOutput.flush(); - - var streamInput = new InputStreamStreamInput(socket.getInputStream()); - assertEquals((byte) 'E', streamInput.readByte()); - assertEquals((byte) 'S', streamInput.readByte()); - var responseLength = streamInput.readInt(); - handshakeResponseBytes = streamInput.readBytesRef(responseLength); - } - - try (var inputStream = new BytesArray(handshakeResponseBytes).streamInput()) { - assertEquals(requestId, inputStream.readLong()); - assertEquals(TransportStatus.setResponse(TransportStatus.setHandshake((byte) 0)), inputStream.readByte()); - assertEquals(TransportHandshaker.V7_HANDSHAKE_VERSION.id(), inputStream.readInt()); - assertEquals((byte) 0, inputStream.readByte()); // no request headers - assertEquals((byte) 0, inputStream.readByte()); // no response headers - inputStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - assertEquals(TransportVersion.current().id(), inputStream.readVInt()); - assertEquals(-1, inputStream.read()); - } - } - @UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // remove support for v8 handshakes in v10 public void testV8Handshake() throws Exception { final BytesRef handshakeRequestBytes; @@ -223,11 +172,10 @@ public void testOutboundHandshake() throws Exception { try (var inputStream = new BytesArray(handshakeRequestBytes).streamInput()) { assertThat(inputStream.readLong(), greaterThan(0L)); assertEquals(TransportStatus.setRequest(TransportStatus.setHandshake((byte) 0)), inputStream.readByte()); - assertEquals(TransportHandshaker.V8_HANDSHAKE_VERSION.id(), inputStream.readInt()); - assertEquals(0x1a, inputStream.readInt()); // length of variable-length header, always 0x1a + assertEquals(TransportHandshaker.V9_HANDSHAKE_VERSION.id(), inputStream.readInt()); + assertEquals(0x19, inputStream.readInt()); // length of variable-length header, always 0x19 assertEquals((byte) 0, inputStream.readByte()); // no request headers assertEquals((byte) 0, inputStream.readByte()); // no response headers - assertEquals((byte) 0, inputStream.readByte()); // no features assertEquals("internal:tcp/handshake", inputStream.readString()); assertEquals((byte) 0, inputStream.readByte()); // no parent task inputStream.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); @@ -236,8 +184,9 @@ public void testOutboundHandshake() throws Exception { } try (var inputStream = new BytesArray(payloadBytes).streamInput()) { - inputStream.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); + inputStream.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION); assertEquals(TransportVersion.current().id(), inputStream.readVInt()); + assertEquals(Build.current().version(), inputStream.readString()); assertEquals(-1, inputStream.read()); } } diff --git a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java index d260d66157651..32f088976b273 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java @@ -8,24 +8,31 @@ */ package org.elasticsearch.transport; +import org.apache.logging.log4j.Level; +import org.elasticsearch.Build; import org.elasticsearch.TransportVersion; -import org.elasticsearch.Version; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLog; import org.elasticsearch.test.TransportVersionUtils; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.TestThreadPool; import java.io.IOException; import java.util.Collections; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; @@ -39,8 +46,8 @@ public class TransportHandshakerTests extends ESTestCase { private TestThreadPool threadPool; private TransportHandshaker.HandshakeRequestSender requestSender; - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - private static final TransportVersion HANDSHAKE_REQUEST_VERSION = TransportHandshaker.V8_HANDSHAKE_VERSION; + @UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // new handshake version required in v10 + private static final TransportVersion HANDSHAKE_REQUEST_VERSION = TransportHandshaker.V9_HANDSHAKE_VERSION; @Override public void setUp() throws Exception { @@ -93,6 +100,40 @@ public void testHandshakeRequestAndResponse() throws IOException { assertEquals(TransportVersion.current(), versionFuture.actionGet()); } + @TestLogging(reason = "testing WARN logging", value = "org.elasticsearch.transport.TransportHandshaker:WARN") + public void testIncompatibleHandshakeRequest() throws IOException { + TransportHandshaker.HandshakeRequest handshakeRequest = new TransportHandshaker.HandshakeRequest( + getRandomIncompatibleTransportVersion(), + randomIdentifier() + ); + BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(); + bytesStreamOutput.setTransportVersion(HANDSHAKE_REQUEST_VERSION); + handshakeRequest.writeTo(bytesStreamOutput); + StreamInput input = bytesStreamOutput.bytes().streamInput(); + input.setTransportVersion(HANDSHAKE_REQUEST_VERSION); + final TestTransportChannel channel = new TestTransportChannel(ActionListener.running(() -> fail("should not complete"))); + + MockLog.assertThatLogger( + () -> assertThat( + expectThrows(IllegalStateException.class, () -> handshaker.handleHandshake(channel, randomNonNegativeLong(), input)) + .getMessage(), + allOf( + containsString("Rejecting unreadable transport handshake"), + containsString("[" + handshakeRequest.releaseVersion + "/" + handshakeRequest.transportVersion + "]"), + containsString("[" + Build.current().version() + "/" + TransportVersion.current() + "]"), + containsString("which has an incompatible wire format") + ) + ), + TransportHandshaker.class, + new MockLog.SeenEventExpectation( + "warning", + TransportHandshaker.class.getCanonicalName(), + Level.WARN, + "Rejecting unreadable transport handshake * incompatible wire format." + ) + ); + } + public void testHandshakeResponseFromOlderNode() throws Exception { final PlainActionFuture versionFuture = new PlainActionFuture<>(); final long reqId = randomNonNegativeLong(); @@ -108,6 +149,54 @@ public void testHandshakeResponseFromOlderNode() throws Exception { assertEquals(remoteVersion, versionFuture.result()); } + @TestLogging(reason = "testing WARN logging", value = "org.elasticsearch.transport.TransportHandshaker:WARN") + public void testHandshakeResponseFromOlderNodeWithPatchedProtocol() { + final PlainActionFuture versionFuture = new PlainActionFuture<>(); + final long reqId = randomNonNegativeLong(); + handshaker.sendHandshake(reqId, node, channel, SAFE_AWAIT_TIMEOUT, versionFuture); + TransportResponseHandler handler = handshaker.removeHandlerForHandshake(reqId); + + assertFalse(versionFuture.isDone()); + + final var handshakeResponse = new TransportHandshaker.HandshakeResponse( + getRandomIncompatibleTransportVersion(), + randomIdentifier() + ); + + MockLog.assertThatLogger( + () -> handler.handleResponse(handshakeResponse), + TransportHandshaker.class, + new MockLog.SeenEventExpectation( + "warning", + TransportHandshaker.class.getCanonicalName(), + Level.WARN, + "Rejecting unreadable transport handshake * incompatible wire format." + ) + ); + + assertTrue(versionFuture.isDone()); + assertThat( + expectThrows(ExecutionException.class, IllegalStateException.class, versionFuture::result).getMessage(), + allOf( + containsString("Rejecting unreadable transport handshake"), + containsString("[" + handshakeResponse.getReleaseVersion() + "/" + handshakeResponse.getTransportVersion() + "]"), + containsString("[" + Build.current().version() + "/" + TransportVersion.current() + "]"), + containsString("which has an incompatible wire format") + ) + ); + } + + private static TransportVersion getRandomIncompatibleTransportVersion() { + return randomBoolean() + // either older than MINIMUM_COMPATIBLE + ? new TransportVersion(between(1, TransportVersions.MINIMUM_COMPATIBLE.id() - 1)) + // or between MINIMUM_COMPATIBLE and current but not known + : randomValueOtherThanMany( + TransportVersion::isKnown, + () -> new TransportVersion(between(TransportVersions.MINIMUM_COMPATIBLE.id(), TransportVersion.current().id())) + ); + } + public void testHandshakeResponseFromNewerNode() throws Exception { final PlainActionFuture versionFuture = new PlainActionFuture<>(); final long reqId = randomNonNegativeLong(); @@ -133,10 +222,8 @@ public void testHandshakeRequestFutureVersionsCompatibility() throws IOException verify(requestSender).sendRequest(node, channel, reqId, HANDSHAKE_REQUEST_VERSION); - TransportHandshaker.HandshakeRequest handshakeRequest = new TransportHandshaker.HandshakeRequest( - TransportVersion.current(), - randomIdentifier() - ); + final var buildVersion = randomIdentifier(); + final var handshakeRequest = new TransportHandshaker.HandshakeRequest(TransportVersion.current(), buildVersion); BytesStreamOutput currentHandshakeBytes = new BytesStreamOutput(); currentHandshakeBytes.setTransportVersion(HANDSHAKE_REQUEST_VERSION); handshakeRequest.writeTo(currentHandshakeBytes); @@ -145,17 +232,27 @@ public void testHandshakeRequestFutureVersionsCompatibility() throws IOException BytesStreamOutput futureHandshake = new BytesStreamOutput(); TaskId.EMPTY_TASK_ID.writeTo(lengthCheckingHandshake); TaskId.EMPTY_TASK_ID.writeTo(futureHandshake); + final var extraDataSize = between(0, 1024); try (BytesStreamOutput internalMessage = new BytesStreamOutput()) { - Version.writeVersion(Version.CURRENT, internalMessage); + internalMessage.writeVInt(TransportVersion.current().id() + between(0, 100)); + internalMessage.writeString(buildVersion); lengthCheckingHandshake.writeBytesReference(internalMessage.bytes()); - internalMessage.write(new byte[1024]); + internalMessage.write(new byte[extraDataSize]); futureHandshake.writeBytesReference(internalMessage.bytes()); } StreamInput futureHandshakeStream = futureHandshake.bytes().streamInput(); // We check that the handshake we serialize for this test equals the actual request. // Otherwise, we need to update the test. assertEquals(currentHandshakeBytes.bytes().length(), lengthCheckingHandshake.bytes().length()); - assertEquals(1031, futureHandshakeStream.available()); + final var expectedInternalMessageSize = 4 /* transport version id */ + + (1 + buildVersion.length()) /* length prefixed release version string */ + + extraDataSize; + assertEquals( + 1 /* EMPTY_TASK_ID */ + + (expectedInternalMessageSize < 0x80 ? 1 : 2) /* internalMessage size vInt */ + + expectedInternalMessageSize /* internalMessage */, + futureHandshakeStream.available() + ); final PlainActionFuture responseFuture = new PlainActionFuture<>(); final TestTransportChannel channel = new TestTransportChannel(responseFuture); handshaker.handleHandshake(channel, reqId, futureHandshakeStream); @@ -166,43 +263,6 @@ public void testHandshakeRequestFutureVersionsCompatibility() throws IOException assertEquals(TransportVersion.current(), response.getTransportVersion()); } - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // v7 handshakes are not supported in v9 - public void testReadV7HandshakeRequest() throws IOException { - final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); - - final var requestPayloadStreamOutput = new BytesStreamOutput(); - requestPayloadStreamOutput.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - requestPayloadStreamOutput.writeVInt(transportVersion.id()); - - final var requestBytesStreamOutput = new BytesStreamOutput(); - requestBytesStreamOutput.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - TaskId.EMPTY_TASK_ID.writeTo(requestBytesStreamOutput); - requestBytesStreamOutput.writeBytesReference(requestPayloadStreamOutput.bytes()); - - final var requestBytesStream = requestBytesStreamOutput.bytes().streamInput(); - requestBytesStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - final var handshakeRequest = new TransportHandshaker.HandshakeRequest(requestBytesStream); - - assertEquals(transportVersion, handshakeRequest.transportVersion); - assertEquals(transportVersion.toReleaseVersion(), handshakeRequest.releaseVersion); - } - - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // v7 handshakes are not supported in v9 - public void testReadV7HandshakeResponse() throws IOException { - final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); - - final var responseBytesStreamOutput = new BytesStreamOutput(); - responseBytesStreamOutput.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - responseBytesStreamOutput.writeVInt(transportVersion.id()); - - final var responseBytesStream = responseBytesStreamOutput.bytes().streamInput(); - responseBytesStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); - final var handshakeResponse = new TransportHandshaker.HandshakeResponse(responseBytesStream); - - assertEquals(transportVersion, handshakeResponse.getTransportVersion()); - assertEquals(transportVersion.toReleaseVersion(), handshakeResponse.getReleaseVersion()); - } - public void testReadV8HandshakeRequest() throws IOException { final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); diff --git a/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java b/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java index c686329c4154c..d663e2d4c354d 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java @@ -377,7 +377,8 @@ public void testRejectsMismatchedBuildHash() { public void testAcceptsMismatchedServerlessBuildHash() { assumeTrue("Current build needs to be a snapshot", Build.current().isSnapshot()); assumeTrue("Security manager needs to be disabled", System.getSecurityManager() == null); - System.setProperty("es.serverless", Boolean.TRUE.toString()); // security manager blocks this + System.setProperty(TransportService.SERVERLESS_TRANSPORT_SYSTEM_PROPERTY, Boolean.TRUE.toString()); // security manager blocks + // this try { final DisruptingTransportInterceptor transportInterceptorA = new DisruptingTransportInterceptor(); final DisruptingTransportInterceptor transportInterceptorB = new DisruptingTransportInterceptor(); @@ -404,7 +405,7 @@ public void testAcceptsMismatchedServerlessBuildHash() { AbstractSimpleTransportTestCase.connectToNode(transportServiceA, transportServiceB.getLocalNode(), TestProfiles.LIGHT_PROFILE); assertTrue(transportServiceA.nodeConnected(transportServiceB.getLocalNode())); } finally { - System.clearProperty("es.serverless"); + System.clearProperty(TransportService.SERVERLESS_TRANSPORT_SYSTEM_PROPERTY); } } diff --git a/server/src/test/java/org/elasticsearch/transport/TransportStatsTests.java b/server/src/test/java/org/elasticsearch/transport/TransportStatsTests.java index c3965547abb5d..1c9cb4c9afc0f 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportStatsTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportStatsTests.java @@ -20,50 +20,8 @@ public class TransportStatsTests extends ESTestCase { public void testToXContent() { - assertEquals( - Strings.toString( - new TransportStats(1, 2, 3, ByteSizeUnit.MB.toBytes(4), 5, ByteSizeUnit.MB.toBytes(6), new long[0], new long[0], Map.of()), - false, - true - ), - """ - {"transport":{"server_open":1,"total_outbound_connections":2,\ - "rx_count":3,"rx_size":"4mb","rx_size_in_bytes":4194304,\ - "tx_count":5,"tx_size":"6mb","tx_size_in_bytes":6291456\ - }}""" - ); - final var histogram = new long[HandlingTimeTracker.BUCKET_COUNT]; - assertEquals( - Strings.toString( - new TransportStats(1, 2, 3, ByteSizeUnit.MB.toBytes(4), 5, ByteSizeUnit.MB.toBytes(6), histogram, histogram, Map.of()), - false, - true - ), - """ - {"transport":{"server_open":1,"total_outbound_connections":2,\ - "rx_count":3,"rx_size":"4mb","rx_size_in_bytes":4194304,\ - "tx_count":5,"tx_size":"6mb","tx_size_in_bytes":6291456,\ - "inbound_handling_time_histogram":[],\ - "outbound_handling_time_histogram":[]\ - }}""" - ); - histogram[4] = 10; - assertEquals( - Strings.toString( - new TransportStats(1, 2, 3, ByteSizeUnit.MB.toBytes(4), 5, ByteSizeUnit.MB.toBytes(6), histogram, histogram, Map.of()), - false, - true - ), - """ - {"transport":{"server_open":1,"total_outbound_connections":2,\ - "rx_count":3,"rx_size":"4mb","rx_size_in_bytes":4194304,\ - "tx_count":5,"tx_size":"6mb","tx_size_in_bytes":6291456,\ - "inbound_handling_time_histogram":[{"ge":"8ms","ge_millis":8,"lt":"16ms","lt_millis":16,"count":10}],\ - "outbound_handling_time_histogram":[{"ge":"8ms","ge_millis":8,"lt":"16ms","lt_millis":16,"count":10}]\ - }}""" - ); final var requestSizeHistogram = new long[29]; requestSizeHistogram[2] = 9; @@ -84,8 +42,8 @@ public void testToXContent() { ByteSizeUnit.MB.toBytes(4), 5, ByteSizeUnit.MB.toBytes(6), - new long[0], - new long[0], + histogram, + histogram, Map.of("internal:test/action", exampleActionStats) ), false, @@ -95,6 +53,8 @@ public void testToXContent() { {"transport":{"server_open":1,"total_outbound_connections":2,\ "rx_count":3,"rx_size":"4mb","rx_size_in_bytes":4194304,\ "tx_count":5,"tx_size":"6mb","tx_size_in_bytes":6291456,\ + "inbound_handling_time_histogram":[{"ge":"8ms","ge_millis":8,"lt":"16ms","lt_millis":16,"count":10}],\ + "outbound_handling_time_histogram":[{"ge":"8ms","ge_millis":8,"lt":"16ms","lt_millis":16,"count":10}],\ "actions":{"internal:test/action":%s}}}""", Strings.toString(exampleActionStats, false, true)) ); } diff --git a/settings.gradle b/settings.gradle index 8a15f74dcb286..a6b5367591ee2 100644 --- a/settings.gradle +++ b/settings.gradle @@ -79,6 +79,8 @@ List projects = [ 'distribution:bwc:maintenance', 'distribution:bwc:minor', 'distribution:bwc:staged', + 'distribution:bwc:staged2', + 'distribution:bwc:main', 'distribution:tools:java-version-checker', 'distribution:tools:cli-launcher', 'distribution:tools:server-cli', diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 2e68c094492fe..70f1ba529ec5e 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -7,9 +7,12 @@ package org.elasticsearch.xpack.esql.heap_attack; +import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; + import org.apache.http.HttpHost; import org.apache.http.client.config.RequestConfig; import org.apache.http.util.EntityUtils; +import org.apache.lucene.tests.util.TimeUnits; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -64,6 +67,7 @@ * Tests that run ESQL queries that use a ton of memory. We want to make * sure they don't consume the entire heap and crash Elasticsearch. */ +@TimeoutSuite(millis = 40 * TimeUnits.MINUTE) public class HeapAttackIT extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = Clusters.buildCluster(); @@ -85,8 +89,7 @@ public void skipOnAborted() { */ public void testSortByManyLongsSuccess() throws IOException { initManyLongs(); - Response response = sortByManyLongs(500); - Map map = responseAsMap(response); + Map response = sortByManyLongs(500); ListMatcher columns = matchesList().item(matchesMap().entry("name", "a").entry("type", "long")) .item(matchesMap().entry("name", "b").entry("type", "long")); ListMatcher values = matchesList(); @@ -95,7 +98,7 @@ public void testSortByManyLongsSuccess() throws IOException { values = values.item(List.of(0, b)); } } - assertResultMap(map, columns, values); + assertResultMap(response, columns, values); } /** @@ -103,7 +106,8 @@ public void testSortByManyLongsSuccess() throws IOException { */ public void testSortByManyLongsTooMuchMemory() throws IOException { initManyLongs(); - assertCircuitBreaks(() -> sortByManyLongs(5000)); + // 5000 is plenty to break on most nodes + assertCircuitBreaks(attempt -> sortByManyLongs(attempt * 5000)); } /** @@ -187,26 +191,42 @@ public void testSortByManyLongsTooMuchMemoryAsync() throws IOException { ); } - private void assertCircuitBreaks(ThrowingRunnable r) throws IOException { - ResponseException e = expectThrows(ResponseException.class, r); - Map map = responseAsMap(e.getResponse()); - logger.info("expected circuit breaker {}", map); - assertMap( - map, + private static final int MAX_ATTEMPTS = 5; + + interface TryCircuitBreaking { + Map attempt(int attempt) throws IOException; + } + + private void assertCircuitBreaks(TryCircuitBreaking tryBreaking) throws IOException { + assertCircuitBreaks( + tryBreaking, matchesMap().entry("status", 429).entry("error", matchesMap().extraOk().entry("type", "circuit_breaking_exception")) ); } - private void assertFoldCircuitBreaks(ThrowingRunnable r) throws IOException { - ResponseException e = expectThrows(ResponseException.class, r); - Map map = responseAsMap(e.getResponse()); - logger.info("expected fold circuit breaking {}", map); - assertMap( - map, + private void assertFoldCircuitBreaks(TryCircuitBreaking tryBreaking) throws IOException { + assertCircuitBreaks( + tryBreaking, matchesMap().entry("status", 400).entry("error", matchesMap().extraOk().entry("type", "fold_too_much_memory_exception")) ); } + private void assertCircuitBreaks(TryCircuitBreaking tryBreaking, MapMatcher responseMatcher) throws IOException { + int attempt = 1; + while (attempt <= MAX_ATTEMPTS) { + try { + Map response = tryBreaking.attempt(attempt); + logger.warn("{}: should circuit broken but got {}", attempt, response); + attempt++; + } catch (ResponseException e) { + Map map = responseAsMap(e.getResponse()); + assertMap(map, responseMatcher); + return; + } + } + fail("giving up circuit breaking after " + attempt + " attempts"); + } + private void assertParseFailure(ThrowingRunnable r) throws IOException { ResponseException e = expectThrows(ResponseException.class, r); Map map = responseAsMap(e.getResponse()); @@ -214,9 +234,9 @@ private void assertParseFailure(ThrowingRunnable r) throws IOException { assertMap(map, matchesMap().entry("status", 400).entry("error", matchesMap().extraOk().entry("type", "parsing_exception"))); } - private Response sortByManyLongs(int count) throws IOException { + private Map sortByManyLongs(int count) throws IOException { logger.info("sorting by {} longs", count); - return query(makeSortByManyLongs(count).toString(), null); + return responseAsMap(query(makeSortByManyLongs(count).toString(), null)); } private StringBuilder makeSortByManyLongs(int count) { @@ -314,8 +334,7 @@ private Response concat(int evals) throws IOException { public void testManyConcat() throws IOException { int strings = 300; initManyLongs(); - Response resp = manyConcat("FROM manylongs", strings); - assertManyStrings(resp, strings); + assertManyStrings(manyConcat("FROM manylongs", strings), strings); } /** @@ -323,7 +342,8 @@ public void testManyConcat() throws IOException { */ public void testHugeManyConcat() throws IOException { initManyLongs(); - assertCircuitBreaks(() -> manyConcat("FROM manylongs", 2000)); + // 2000 is plenty to break on most nodes + assertCircuitBreaks(attempt -> manyConcat("FROM manylongs", attempt * 2000)); } /** @@ -331,18 +351,18 @@ public void testHugeManyConcat() throws IOException { */ public void testManyConcatFromRow() throws IOException { int strings = 2000; - Response resp = manyConcat("ROW a=9999, b=9999, c=9999, d=9999, e=9999", strings); - assertManyStrings(resp, strings); + assertManyStrings(manyConcat("ROW a=9999, b=9999, c=9999, d=9999, e=9999", strings), strings); } /** * Hits a circuit breaker by building many moderately long strings. */ public void testHugeManyConcatFromRow() throws IOException { + // 5000 is plenty to break on most nodes assertFoldCircuitBreaks( - () -> manyConcat( + attempt -> manyConcat( "ROW a=9999999999999, b=99999999999999999, c=99999999999999999, d=99999999999999999, e=99999999999999999", - 5000 + attempt * 5000 ) ); } @@ -357,7 +377,7 @@ public void testHugeHugeManyConcatFromRow() throws IOException { /** * Tests that generate many moderately long strings. */ - private Response manyConcat(String init, int strings) throws IOException { + private Map manyConcat(String init, int strings) throws IOException { StringBuilder query = startQuery(); query.append(init).append(" | EVAL str = CONCAT("); query.append( @@ -384,7 +404,7 @@ private Response manyConcat(String init, int strings) throws IOException { query.append("str").append(s); } query.append("\"}"); - return query(query.toString(), "columns"); + return responseAsMap(query(query.toString(), "columns")); } /** @@ -393,8 +413,7 @@ private Response manyConcat(String init, int strings) throws IOException { public void testManyRepeat() throws IOException { int strings = 30; initManyLongs(); - Response resp = manyRepeat("FROM manylongs", strings); - assertManyStrings(resp, 30); + assertManyStrings(manyRepeat("FROM manylongs", strings), 30); } /** @@ -402,7 +421,8 @@ public void testManyRepeat() throws IOException { */ public void testHugeManyRepeat() throws IOException { initManyLongs(); - assertCircuitBreaks(() -> manyRepeat("FROM manylongs", 75)); + // 75 is plenty to break on most nodes + assertCircuitBreaks(attempt -> manyRepeat("FROM manylongs", attempt * 75)); } /** @@ -410,15 +430,15 @@ public void testHugeManyRepeat() throws IOException { */ public void testManyRepeatFromRow() throws IOException { int strings = 300; - Response resp = manyRepeat("ROW a = 99", strings); - assertManyStrings(resp, strings); + assertManyStrings(manyRepeat("ROW a = 99", strings), strings); } /** * Hits a circuit breaker by building many moderately long strings. */ public void testHugeManyRepeatFromRow() throws IOException { - assertFoldCircuitBreaks(() -> manyRepeat("ROW a = 99", 400)); + // 400 is enough to break on most nodes + assertFoldCircuitBreaks(attempt -> manyRepeat("ROW a = 99", attempt * 400)); } /** @@ -431,7 +451,7 @@ public void testHugeHugeManyRepeatFromRow() throws IOException { /** * Tests that generate many moderately long strings. */ - private Response manyRepeat(String init, int strings) throws IOException { + private Map manyRepeat(String init, int strings) throws IOException { StringBuilder query = startQuery(); query.append(init).append(" | EVAL str = TO_STRING(a)"); for (int s = 0; s < strings; s++) { @@ -445,23 +465,21 @@ private Response manyRepeat(String init, int strings) throws IOException { query.append("str").append(s); } query.append("\"}"); - return query(query.toString(), "columns"); + return responseAsMap(query(query.toString(), "columns")); } - private void assertManyStrings(Response resp, int strings) throws IOException { - Map map = responseAsMap(resp); + private void assertManyStrings(Map resp, int strings) throws IOException { ListMatcher columns = matchesList(); for (int s = 0; s < strings; s++) { columns = columns.item(matchesMap().entry("name", "str" + s).entry("type", "keyword")); } MapMatcher mapMatcher = matchesMap(); - assertMap(map, mapMatcher.entry("columns", columns)); + assertMap(resp, mapMatcher.entry("columns", columns)); } public void testManyEval() throws IOException { initManyLongs(); - Response resp = manyEval(1); - Map map = responseAsMap(resp); + Map response = manyEval(1); ListMatcher columns = matchesList(); columns = columns.item(matchesMap().entry("name", "a").entry("type", "long")); columns = columns.item(matchesMap().entry("name", "b").entry("type", "long")); @@ -471,15 +489,16 @@ public void testManyEval() throws IOException { for (int i = 0; i < 20; i++) { columns = columns.item(matchesMap().entry("name", "i0" + i).entry("type", "long")); } - assertResultMap(map, columns, hasSize(10_000)); + assertResultMap(response, columns, hasSize(10_000)); } public void testTooManyEval() throws IOException { initManyLongs(); - assertCircuitBreaks(() -> manyEval(490)); + // 490 is plenty to fail on most nodes + assertCircuitBreaks(attempt -> manyEval(attempt * 490)); } - private Response manyEval(int evalLines) throws IOException { + private Map manyEval(int evalLines) throws IOException { StringBuilder query = startQuery(); query.append("FROM manylongs"); for (int e = 0; e < evalLines; e++) { @@ -492,7 +511,7 @@ private Response manyEval(int evalLines) throws IOException { } } query.append("\n| LIMIT 10000\"}"); - return query(query.toString(), null); + return responseAsMap(query(query.toString(), null)); } private Response query(String query, String filterPath) throws IOException { @@ -550,99 +569,161 @@ protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOE public void testFetchManyBigFields() throws IOException { initManyBigFieldsIndex(100); - fetchManyBigFields(100); + Map response = fetchManyBigFields(100); + ListMatcher columns = matchesList(); + for (int f = 0; f < 1000; f++) { + columns = columns.item(matchesMap().entry("name", "f" + String.format(Locale.ROOT, "%03d", f)).entry("type", "keyword")); + } + assertMap(response, matchesMap().entry("columns", columns)); } public void testFetchTooManyBigFields() throws IOException { initManyBigFieldsIndex(500); - assertCircuitBreaks(() -> fetchManyBigFields(500)); + // 500 docs is plenty to circuit break on most nodes + assertCircuitBreaks(attempt -> fetchManyBigFields(attempt * 500)); } /** * Fetches documents containing 1000 fields which are {@code 1kb} each. */ - private void fetchManyBigFields(int docs) throws IOException { + private Map fetchManyBigFields(int docs) throws IOException { StringBuilder query = startQuery(); query.append("FROM manybigfields | SORT f000 | LIMIT " + docs + "\"}"); - Response response = query(query.toString(), "columns"); - Map map = responseAsMap(response); - ListMatcher columns = matchesList(); - for (int f = 0; f < 1000; f++) { - columns = columns.item(matchesMap().entry("name", "f" + String.format(Locale.ROOT, "%03d", f)).entry("type", "keyword")); - } - assertMap(map, matchesMap().entry("columns", columns)); + return responseAsMap(query(query.toString(), "columns")); } public void testAggMvLongs() throws IOException { int fieldValues = 100; initMvLongsIndex(1, 3, fieldValues); - Response response = aggMvLongs(3); - Map map = responseAsMap(response); + Map response = aggMvLongs(3); ListMatcher columns = matchesList().item(matchesMap().entry("name", "MAX(f00)").entry("type", "long")) .item(matchesMap().entry("name", "f00").entry("type", "long")) .item(matchesMap().entry("name", "f01").entry("type", "long")) .item(matchesMap().entry("name", "f02").entry("type", "long")); - assertMap(map, matchesMap().entry("columns", columns)); + assertMap(response, matchesMap().entry("columns", columns)); } public void testAggTooManyMvLongs() throws IOException { initMvLongsIndex(1, 3, 1000); - assertCircuitBreaks(() -> aggMvLongs(3)); + // 3 fields is plenty on most nodes + assertCircuitBreaks(attempt -> aggMvLongs(attempt * 3)); } - private Response aggMvLongs(int fields) throws IOException { + private Map aggMvLongs(int fields) throws IOException { StringBuilder query = startQuery(); query.append("FROM mv_longs | STATS MAX(f00) BY f00"); for (int f = 1; f < fields; f++) { query.append(", f").append(String.format(Locale.ROOT, "%02d", f)); } - return query(query.append("\"}").toString(), "columns"); + return responseAsMap(query(query.append("\"}").toString(), "columns")); } public void testFetchMvLongs() throws IOException { int fields = 100; initMvLongsIndex(100, fields, 1000); - Response response = fetchMvLongs(); - Map map = responseAsMap(response); + Map response = fetchMvLongs(); ListMatcher columns = matchesList(); for (int f = 0; f < fields; f++) { columns = columns.item(matchesMap().entry("name", String.format(Locale.ROOT, "f%02d", f)).entry("type", "long")); } - assertMap(map, matchesMap().entry("columns", columns)); + assertMap(response, matchesMap().entry("columns", columns)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106683") public void testFetchTooManyMvLongs() throws IOException { initMvLongsIndex(500, 100, 1000); - assertCircuitBreaks(() -> fetchMvLongs()); + assertCircuitBreaks(attempt -> fetchMvLongs()); } - private Response fetchMvLongs() throws IOException { + private Map fetchMvLongs() throws IOException { StringBuilder query = startQuery(); query.append("FROM mv_longs\"}"); - return query(query.toString(), "columns"); + return responseAsMap(query(query.toString(), "columns")); } public void testLookupExplosion() throws IOException { - int sensorDataCount = 7500; + int sensorDataCount = 500; int lookupEntries = 10000; Map map = lookupExplosion(sensorDataCount, lookupEntries); assertMap(map, matchesMap().extraOk().entry("values", List.of(List.of(sensorDataCount * lookupEntries)))); } public void testLookupExplosionManyMatches() throws IOException { - assertCircuitBreaks(() -> { - Map result = lookupExplosion(8500, 10000); - logger.error("should have failed but got {}", result); - }); + // 1500, 10000 is enough locally, but some CI machines need more. + assertCircuitBreaks(attempt -> lookupExplosion(attempt * 1500, 10000)); } - private Map lookupExplosion(int sensorDataCount, int lookupEntries) throws IOException { + public void testLookupExplosionNoFetch() throws IOException { + int sensorDataCount = 7500; + int lookupEntries = 10000; + Map map = lookupExplosionNoFetch(sensorDataCount, lookupEntries); + assertMap(map, matchesMap().extraOk().entry("values", List.of(List.of(sensorDataCount * lookupEntries)))); + } + + public void testLookupExplosionNoFetchManyMatches() throws IOException { + // 8500 is plenty on most nodes + assertCircuitBreaks(attempt -> lookupExplosionNoFetch(attempt * 8500, 10000)); + } + + public void testLookupExplosionBigString() throws IOException { + int sensorDataCount = 150; + int lookupEntries = 1; + Map map = lookupExplosionBigString(sensorDataCount, lookupEntries); + assertMap(map, matchesMap().extraOk().entry("values", List.of(List.of(sensorDataCount * lookupEntries)))); + } + + public void testLookupExplosionBigStringManyMatches() throws IOException { + // 500, 1 is enough to make it fail locally but some CI needs more + assertCircuitBreaks(attempt -> lookupExplosionBigString(attempt * 500, 1)); + } + + private Map lookupExplosion(int sensorDataCount, int lookupEntries) throws IOException { + try { + lookupExplosionData(sensorDataCount, lookupEntries); + StringBuilder query = startQuery(); + query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(location)\"}"); + return responseAsMap(query(query.toString(), null)); + } finally { + deleteIndex("sensor_data"); + deleteIndex("sensor_lookup"); + } + } + + private Map lookupExplosionNoFetch(int sensorDataCount, int lookupEntries) throws IOException { + try { + lookupExplosionData(sensorDataCount, lookupEntries); + StringBuilder query = startQuery(); + query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(*)\"}"); + return responseAsMap(query(query.toString(), null)); + } finally { + deleteIndex("sensor_data"); + deleteIndex("sensor_lookup"); + } + } + + private void lookupExplosionData(int sensorDataCount, int lookupEntries) throws IOException { initSensorData(sensorDataCount, 1); initSensorLookup(lookupEntries, 1, i -> "73.9857 40.7484"); - StringBuilder query = startQuery(); - query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(*)\"}"); - return responseAsMap(query(query.toString(), null)); + } + + private Map lookupExplosionBigString(int sensorDataCount, int lookupEntries) throws IOException { + try { + initSensorData(sensorDataCount, 1); + initSensorLookupString(lookupEntries, 1, i -> { + int target = Math.toIntExact(ByteSizeValue.ofMb(1).getBytes()); + StringBuilder str = new StringBuilder(Math.toIntExact(ByteSizeValue.ofMb(2).getBytes())); + while (str.length() < target) { + str.append("Lorem ipsum dolor sit amet, consectetur adipiscing elit."); + } + logger.info("big string is {} characters", str.length()); + return str.toString(); + }); + StringBuilder query = startQuery(); + query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(string)\"}"); + return responseAsMap(query(query.toString(), null)); + } finally { + deleteIndex("sensor_data"); + deleteIndex("sensor_lookup"); + } } public void testEnrichExplosion() throws IOException { @@ -653,22 +734,25 @@ public void testEnrichExplosion() throws IOException { } public void testEnrichExplosionManyMatches() throws IOException { - assertCircuitBreaks(() -> { - Map result = enrichExplosion(3000, 10000); - logger.error("should have failed but got {}", result); - }); + // 1000, 10000 is enough on most nodes + assertCircuitBreaks(attempt -> enrichExplosion(1000, attempt * 5000)); } - private Map enrichExplosion(int sensorDataCount, int lookupEntries) throws IOException { - initSensorData(sensorDataCount, 1); - initSensorEnrich(lookupEntries, 1, i -> "73.9857 40.7484"); + private Map enrichExplosion(int sensorDataCount, int lookupEntries) throws IOException { try { - StringBuilder query = startQuery(); - query.append("FROM sensor_data | ENRICH sensor ON id | STATS COUNT(*)\"}"); - return responseAsMap(query(query.toString(), null)); + initSensorData(sensorDataCount, 1); + initSensorEnrich(lookupEntries, 1, i -> "73.9857 40.7484"); + try { + StringBuilder query = startQuery(); + query.append("FROM sensor_data | ENRICH sensor ON id | STATS COUNT(*)\"}"); + return responseAsMap(query(query.toString(), null)); + } finally { + Request delete = new Request("DELETE", "/_enrich/policy/sensor"); + assertMap(responseAsMap(client().performRequest(delete)), matchesMap().entry("acknowledged", true)); + } } finally { - Request delete = new Request("DELETE", "/_enrich/policy/sensor"); - assertMap(responseAsMap(client().performRequest(delete)), matchesMap().entry("acknowledged", true)); + deleteIndex("sensor_data"); + deleteIndex("sensor_lookup"); } } @@ -830,6 +914,31 @@ private void initSensorLookup(int lookupEntries, int sensorCount, IntFunction string) throws IOException { + logger.info("loading sensor lookup with huge strings"); + createIndex("sensor_lookup", Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.LOOKUP.getName()).build(), """ + { + "properties": { + "id": { "type": "long" }, + "string": { "type": "text" } + } + }"""); + int docsPerBulk = 10; + StringBuilder data = new StringBuilder(); + for (int i = 0; i < lookupEntries; i++) { + int sensor = i % sensorCount; + data.append(String.format(Locale.ROOT, """ + {"create":{}} + {"id": %d, "string": "%s"} + """, sensor, string.apply(sensor))); + if (i % docsPerBulk == docsPerBulk - 1) { + bulk("sensor_lookup", data.toString()); + data.setLength(0); + } + } + initIndex("sensor_lookup", data.toString()); + } + private void initSensorEnrich(int lookupEntries, int sensorCount, IntFunction location) throws IOException { initSensorLookup(lookupEntries, sensorCount, location); logger.info("loading sensor enrich"); diff --git a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoIP2-City.tgz b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoIP2-City.tgz index 76dd40000f132..708b94759e11e 100644 Binary files a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoIP2-City.tgz and b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoIP2-City.tgz differ diff --git a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-ASN.mmdb b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-ASN.mmdb new file mode 100644 index 0000000000000..af37b8ae278fd Binary files /dev/null and b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-ASN.mmdb differ diff --git a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-ASN.tgz b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-ASN.tgz index 4e92ad982227d..f47f34c74844e 100644 Binary files a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-ASN.tgz and b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-ASN.tgz differ diff --git a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-City.mmdb b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-City.mmdb index 0809201619b59..67dcd4a8e5a17 100644 Binary files a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-City.mmdb and b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-City.mmdb differ diff --git a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-City.tgz b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-City.tgz index a86f548d6e33e..44e0135bbfedc 100644 Binary files a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-City.tgz and b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-City.tgz differ diff --git a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-Country.mmdb b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-Country.mmdb index aa81cbe8a2f0e..8b1436b0c387f 100644 Binary files a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-Country.mmdb and b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-Country.mmdb differ diff --git a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-Country.tgz b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-Country.tgz index 2d000ce65ef2f..cf4fe0721b59c 100644 Binary files a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-Country.tgz and b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/GeoLite2-Country.tgz differ diff --git a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/MyCustomGeoLite2-City.mmdb b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/MyCustomGeoLite2-City.mmdb index 0809201619b59..67dcd4a8e5a17 100644 Binary files a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/MyCustomGeoLite2-City.mmdb and b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/MyCustomGeoLite2-City.mmdb differ diff --git a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/MyCustomGeoLite2-City.tgz b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/MyCustomGeoLite2-City.tgz index ffe71bc1d23cd..64db9c0a17e7a 100644 Binary files a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/MyCustomGeoLite2-City.tgz and b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/MyCustomGeoLite2-City.tgz differ diff --git a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/data.json b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/data.json index 3ce4ba88dae01..b58b6f0e29e46 100644 --- a/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/data.json +++ b/test/fixtures/geoip-fixture/src/main/resources/geoip-fixture/data.json @@ -1,24 +1,24 @@ [ { - "md5_hash": "da5bb1c00c74e3f5a34ca1ec0022c550", + "md5_hash": "ec117f3c605b4998365e45fb2c382396", "name": "GeoLite2-City.tgz", "url": "db/GeoLite2-City.tgz", "provider": "maxmind" }, { - "md5_hash": "61c38f0fcec4a7b0b359201f124004df", + "md5_hash": "60a8f72665e59fe34420a148f97d4e2c", "name": "GeoLite2-ASN.tgz", "url": "db/GeoLite2-ASN.tgz", "provider": "maxmind" }, { - "md5_hash": "8f3229d6158f85adef296f8781f7ab49", + "md5_hash": "2363fb6c277679b242b320282b21bf04", "name": "GeoLite2-Country.tgz", "url": "db/GeoLite2-Country.tgz", "provider": "maxmind" }, { - "md5_hash": "77dcf272548942520d1b549957065a9f", + "md5_hash": "102662f11359a3074dd756ab474495e8", "name": "MyCustomGeoLite2-City.tgz", "url": "db/MyCustomGeoLite2-City.tgz", "provider": "maxmind" diff --git a/test/fixtures/geoip-fixture/src/main/resources/ipinfo-fixture/ip_asn_sample.mmdb b/test/fixtures/geoip-fixture/src/main/resources/ipinfo-fixture/ip_asn_sample.mmdb index 3e1fc49ba48a5..fb52368827f6c 100644 Binary files a/test/fixtures/geoip-fixture/src/main/resources/ipinfo-fixture/ip_asn_sample.mmdb and b/test/fixtures/geoip-fixture/src/main/resources/ipinfo-fixture/ip_asn_sample.mmdb differ diff --git a/test/framework/build.gradle b/test/framework/build.gradle index c7e08eb3cdfa9..a5bff2d0166cc 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -25,14 +25,15 @@ dependencies { api "commons-codec:commons-codec:${versions.commonscodec}" // mockito - api 'org.mockito:mockito-core:5.11.0' - api 'org.mockito:mockito-subclass:5.11.0' - api 'net.bytebuddy:byte-buddy:1.14.12' + api 'org.mockito:mockito-core:5.15.2' + api 'org.mockito:mockito-subclass:5.15.2' + api 'net.bytebuddy:byte-buddy:1.15.11' api 'org.objenesis:objenesis:3.3' api "org.elasticsearch:mocksocket:${versions.mocksocket}" testImplementation project(':x-pack:plugin:mapper-unsigned-long') + testImplementation project(':x-pack:plugin:mapper-counted-keyword') testImplementation project(":modules:mapper-extras") } @@ -64,6 +65,7 @@ tasks.named("thirdPartyAudit").configure { 'org.apache.log4j.Priority', // mockito + 'net.bytebuddy.agent.Installer', 'net.bytebuddy.agent.ByteBuddyAgent', 'org.mockito.internal.creation.bytebuddy.inject.MockMethodDispatcher', 'org.opentest4j.AssertionFailedError', diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java index b49d10ba9c402..c3384ede3a1a7 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java @@ -94,7 +94,7 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { } public TestFileStore getTestFileStore(String nodeName) { - return fileSystemProvider.getTestFileStore(internalCluster().getInstance(Environment.class, nodeName).dataFiles()[0]); + return fileSystemProvider.getTestFileStore(internalCluster().getInstance(Environment.class, nodeName).dataDirs()[0]); } protected static class TestFileStore extends FilterFileStore { diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index ddfa61b53a0af..fbda65edee248 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -971,9 +971,7 @@ public final class ClusterNode { private AckedFakeThreadPoolMasterService masterService; private DisruptableClusterApplierService clusterApplierService; private ClusterService clusterService; - private FeatureService featureService; TransportService transportService; - private MasterHistoryService masterHistoryService; CoordinationDiagnosticsService coordinationDiagnosticsService; StableMasterHealthIndicatorService stableMasterHealthIndicatorService; private DisruptableMockTransport mockTransport; @@ -1134,8 +1132,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { threadPool ); clusterService = new ClusterService(settings, clusterSettings, masterService, clusterApplierService); - featureService = new FeatureService(List.of()); - masterHistoryService = new MasterHistoryService(transportService, threadPool, clusterService); + MasterHistoryService masterHistoryService = new MasterHistoryService(transportService, threadPool, clusterService); clusterService.setNodeConnectionsService( new NodeConnectionsService(clusterService.getSettings(), threadPool, transportService) ); @@ -1173,7 +1170,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { coordinationServices.getLeaderHeartbeatService(), coordinationServices.getPreVoteCollectorFactory(), CompatibilityVersionsUtils.staticCurrent(), - featureService + new FeatureService(List.of()) ); coordinationDiagnosticsService = new CoordinationDiagnosticsService( clusterService, diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index c3ce32d4ce333..845536792343d 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -662,7 +662,7 @@ public static MetadataRolloverService getMetadataRolloverService( ).build(MapperBuilderContext.root(false, true)); ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool); Environment env = mock(Environment.class); - when(env.sharedDataFile()).thenReturn(null); + when(env.sharedDataDir()).thenReturn(null); AllocationService allocationService = mock(AllocationService.class); when(allocationService.reroute(any(ClusterState.class), any(String.class), any())).then(i -> i.getArguments()[0]); when(allocationService.getShardRoutingRoleStrategy()).thenReturn(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY); diff --git a/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java b/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java index 4f559a5f3eaef..8aea7a5713cf1 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java +++ b/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java @@ -9,7 +9,9 @@ package org.elasticsearch.index; -import java.util.List; +import java.util.Collections; +import java.util.NavigableSet; +import java.util.TreeSet; /** * Provides access to all known index versions @@ -18,10 +20,12 @@ public class KnownIndexVersions { /** * A sorted list of all known index versions */ - public static final List ALL_VERSIONS = List.copyOf(IndexVersions.getAllVersions()); + public static final NavigableSet ALL_VERSIONS = Collections.unmodifiableNavigableSet( + new TreeSet<>(IndexVersions.getAllVersions()) + ); /** * A sorted list of all known index versions that can be written to */ - public static final List ALL_WRITE_VERSIONS = List.copyOf(IndexVersions.getAllWriteVersions()); + public static final NavigableSet ALL_WRITE_VERSIONS = ALL_VERSIONS.tailSet(IndexVersions.MINIMUM_COMPATIBLE, true); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/BlockLoaderTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/BlockLoaderTestCase.java index 8f5478e1181f1..a7595cf52297b 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/BlockLoaderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/BlockLoaderTestCase.java @@ -13,82 +13,171 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; import org.elasticsearch.logsdb.datageneration.DataGeneratorSpecification; -import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; +import org.elasticsearch.logsdb.datageneration.DocumentGenerator; import org.elasticsearch.logsdb.datageneration.FieldType; import org.elasticsearch.logsdb.datageneration.MappingGenerator; import org.elasticsearch.logsdb.datageneration.Template; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceHandler; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; +import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.search.fetch.StoredFieldsSpec; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Stream; public abstract class BlockLoaderTestCase extends MapperServiceTestCase { + private final FieldType fieldType; private final String fieldName; - private final Template template; private final MappingGenerator mappingGenerator; - private final FieldDataGenerator generator; + private final DocumentGenerator documentGenerator; protected BlockLoaderTestCase(FieldType fieldType) { + this.fieldType = fieldType; this.fieldName = randomAlphaOfLengthBetween(5, 10); - // Disable all dynamic mapping var specification = DataGeneratorSpecification.builder() .withFullyDynamicMapping(false) + // Disable dynamic mapping and disabled objects .withDataSourceHandlers(List.of(new DataSourceHandler() { @Override public DataSourceResponse.DynamicMappingGenerator handle(DataSourceRequest.DynamicMappingGenerator request) { return new DataSourceResponse.DynamicMappingGenerator(isObject -> false); } + + @Override + public DataSourceResponse.ObjectMappingParametersGenerator handle( + DataSourceRequest.ObjectMappingParametersGenerator request + ) { + return new DataSourceResponse.ObjectMappingParametersGenerator(HashMap::new); // just defaults + } })) .build(); - this.template = new Template(Map.of(fieldName, new Template.Leaf(fieldName, fieldType))); this.mappingGenerator = new MappingGenerator(specification); - this.generator = fieldType.generator(fieldName, specification.dataSource()); + this.documentGenerator = new DocumentGenerator(specification); } public void testBlockLoader() throws IOException { + var template = new Template(Map.of(fieldName, new Template.Leaf(fieldName, fieldType))); + runTest(template, fieldName); + } + + public void testBlockLoaderForFieldInObject() throws IOException { + int depth = randomIntBetween(0, 3); + + Map currentLevel = new HashMap<>(); + Map top = Map.of("top", new Template.Object("top", false, currentLevel)); + + var fullFieldName = new StringBuilder("top"); + int currentDepth = 0; + while (currentDepth++ < depth) { + fullFieldName.append('.').append("level").append(currentDepth); + + Map nextLevel = new HashMap<>(); + currentLevel.put("level" + currentDepth, new Template.Object("level" + currentDepth, false, nextLevel)); + currentLevel = nextLevel; + } + + fullFieldName.append('.').append(fieldName); + currentLevel.put(fieldName, new Template.Leaf(fieldName, fieldType)); + var template = new Template(top); + runTest(template, fullFieldName.toString()); + } + + private void runTest(Template template, String fieldName) throws IOException { var mapping = mappingGenerator.generate(template); var mappingXContent = XContentBuilder.builder(XContentType.JSON.xContent()).map(mapping.raw()); var syntheticSource = randomBoolean(); var mapperService = syntheticSource ? createSytheticSourceMapperService(mappingXContent) : createMapperService(mappingXContent); - var fieldValue = generator.generateValue(); + var document = documentGenerator.generate(template, mapping); + var documentXContent = XContentBuilder.builder(XContentType.JSON.xContent()).map(document); - Object blockLoaderResult = setupAndInvokeBlockLoader(mapperService, fieldValue); - Object expected = expected(mapping.lookup().get(fieldName), fieldValue, syntheticSource); + Object blockLoaderResult = setupAndInvokeBlockLoader(mapperService, documentXContent, fieldName); + Object expected = expected(mapping.lookup().get(fieldName), getFieldValue(document, fieldName), syntheticSource); assertEquals(expected, blockLoaderResult); } protected abstract Object expected(Map fieldMapping, Object value, boolean syntheticSource); - private Object setupAndInvokeBlockLoader(MapperService mapperService, Object fieldValue) throws IOException { + private Object getFieldValue(Map document, String fieldName) { + var rawValues = new ArrayList<>(); + processLevel(document, fieldName, rawValues); + + if (rawValues.size() == 1) { + return rawValues.get(0); + } + + return rawValues.stream().flatMap(v -> v instanceof List l ? l.stream() : Stream.of(v)).toList(); + } + + @SuppressWarnings("unchecked") + private void processLevel(Map level, String field, ArrayList values) { + if (field.contains(".") == false) { + var value = level.get(field); + values.add(value); + return; + } + + var nameInLevel = field.split("\\.")[0]; + var entry = level.get(nameInLevel); + if (entry instanceof Map m) { + processLevel((Map) m, field.substring(field.indexOf('.') + 1), values); + } + if (entry instanceof List l) { + for (var object : l) { + processLevel((Map) object, field.substring(field.indexOf('.') + 1), values); + } + } + } + + protected static Object maybeFoldList(List list) { + if (list.isEmpty()) { + return null; + } + + if (list.size() == 1) { + return list.get(0); + } + + return list; + } + + private Object setupAndInvokeBlockLoader(MapperService mapperService, XContentBuilder document, String fieldName) throws IOException { try (Directory directory = newDirectory()) { RandomIndexWriter iw = new RandomIndexWriter(random(), directory); - LuceneDocument doc = mapperService.documentMapper().parse(source(b -> { - b.field(fieldName); - b.value(fieldValue); - })).rootDoc(); + var source = new SourceToParse( + "1", + BytesReference.bytes(document), + XContentType.JSON, + null, + Map.of(), + true, + XContentMeteringParserDecorator.NOOP + ); + LuceneDocument doc = mapperService.documentMapper().parse(source).rootDoc(); iw.addDocument(doc); iw.close(); try (DirectoryReader reader = DirectoryReader.open(directory)) { LeafReaderContext context = reader.leaves().get(0); - return load(createBlockLoader(mapperService), context, mapperService); + return load(createBlockLoader(mapperService, fieldName), context, mapperService); } } } @@ -98,6 +187,9 @@ private Object load(BlockLoader blockLoader, LeafReaderContext context, MapperSe var columnAtATimeReader = blockLoader.columnAtATimeReader(context); if (columnAtATimeReader != null) { var block = (TestBlock) columnAtATimeReader.read(TestBlock.factory(context.reader().numDocs()), TestBlock.docs(0)); + if (block.size() == 0) { + return null; + } return block.get(0); } @@ -119,10 +211,13 @@ private Object load(BlockLoader blockLoader, LeafReaderContext context, MapperSe BlockLoader.Builder builder = blockLoader.builder(TestBlock.factory(context.reader().numDocs()), 1); blockLoader.rowStrideReader(context).read(0, storedFieldsLoader, builder); var block = (TestBlock) builder.build(); + if (block.size() == 0) { + return null; + } return block.get(0); } - private BlockLoader createBlockLoader(MapperService mapperService) { + private BlockLoader createBlockLoader(MapperService mapperService, String fieldName) { SearchLookup searchLookup = new SearchLookup(mapperService.mappingLookup().fieldTypesLookup()::get, null, null); return mapperService.fieldType(fieldName).blockLoader(new MappedFieldType.BlockLoaderContext() { diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index 459480d1d7316..b62e400826836 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -884,8 +884,11 @@ protected void validateRoundTripReader(String syntheticSource, DirectoryReader r throws IOException { assertReaderEquals( "round trip " + syntheticSource, - new FieldMaskingReader(SourceFieldMapper.RECOVERY_SOURCE_NAME, reader), - new FieldMaskingReader(SourceFieldMapper.RECOVERY_SOURCE_NAME, roundTripReader) + new FieldMaskingReader(Set.of(SourceFieldMapper.RECOVERY_SOURCE_NAME, SourceFieldMapper.RECOVERY_SOURCE_SIZE_NAME), reader), + new FieldMaskingReader( + Set.of(SourceFieldMapper.RECOVERY_SOURCE_NAME, SourceFieldMapper.RECOVERY_SOURCE_SIZE_NAME), + roundTripReader + ) ); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java index 2c53fa782db85..14beb979b96cf 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java @@ -147,6 +147,11 @@ public SingletonOrdsBuilder appendOrd(int value) { } return new SingletonOrdsBuilder(); } + + @Override + public BlockLoader.AggregateMetricDoubleBuilder aggregateMetricDoubleBuilder(int count) { + return null; + } }; } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DocumentGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DocumentGenerator.java index 9b2878ff7bfc8..cfdec40bf9190 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DocumentGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/DocumentGenerator.java @@ -40,7 +40,10 @@ public DocumentGenerator(DataGeneratorSpecification specification) { public Map generate(Template template, Mapping mapping) { var documentMap = new TreeMap(); for (var predefinedField : specification.predefinedFields()) { - documentMap.put(predefinedField.name(), predefinedField.generator(specification.dataSource()).generateValue()); + documentMap.put( + predefinedField.name(), + predefinedField.generator(specification.dataSource()).generateValue(predefinedField.mapping()) + ); } generateFields(documentMap, template.template(), new Context("", mapping.lookup())); @@ -53,16 +56,18 @@ private void generateFields(Map document, Map arrayLength = objectArrayGenerator.lengthGenerator().get(); diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldDataGenerator.java index 7e28a0a0fab25..1d84c5adc4eda 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldDataGenerator.java @@ -9,11 +9,13 @@ package org.elasticsearch.logsdb.datageneration; +import java.util.Map; + /** * Entity responsible for generating a valid value for a field. * * Generator is expected to produce a different value on every call. */ public interface FieldDataGenerator { - Object generateValue(); + Object generateValue(Map fieldMapping); } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java index 07744851aba3e..4bf65fcf6ecf6 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java @@ -11,11 +11,14 @@ import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.fields.leaf.ByteFieldDataGenerator; +import org.elasticsearch.logsdb.datageneration.fields.leaf.CountedKeywordFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.DoubleFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.FloatFieldDataGenerator; +import org.elasticsearch.logsdb.datageneration.fields.leaf.HalfFloatFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.IntegerFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.KeywordFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.LongFieldDataGenerator; +import org.elasticsearch.logsdb.datageneration.fields.leaf.ScaledFloatFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.ShortFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.UnsignedLongFieldDataGenerator; @@ -30,7 +33,10 @@ public enum FieldType { SHORT("short"), BYTE("byte"), DOUBLE("double"), - FLOAT("float"); + FLOAT("float"), + HALF_FLOAT("half_float"), + SCALED_FLOAT("scaled_float"), + COUNTED_KEYWORD("counted_keyword"); private final String name; @@ -48,6 +54,9 @@ public FieldDataGenerator generator(String fieldName, DataSource dataSource) { case BYTE -> new ByteFieldDataGenerator(fieldName, dataSource); case DOUBLE -> new DoubleFieldDataGenerator(fieldName, dataSource); case FLOAT -> new FloatFieldDataGenerator(fieldName, dataSource); + case HALF_FLOAT -> new HalfFloatFieldDataGenerator(fieldName, dataSource); + case SCALED_FLOAT -> new ScaledFloatFieldDataGenerator(fieldName, dataSource); + case COUNTED_KEYWORD -> new CountedKeywordFieldDataGenerator(fieldName, dataSource); }; } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceHandler.java index df28282fca407..beef9fb4dd799 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceHandler.java @@ -54,6 +54,14 @@ default DataSourceResponse.ArrayWrapper handle(DataSourceRequest.ArrayWrapper re return null; } + default DataSourceResponse.RepeatingWrapper handle(DataSourceRequest.RepeatingWrapper request) { + return null; + } + + default DataSourceResponse.MalformedWrapper handle(DataSourceRequest.MalformedWrapper request) { + return null; + } + default DataSourceResponse.ChildFieldGenerator handle(DataSourceRequest.ChildFieldGenerator request) { return null; } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceRequest.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceRequest.java index d77925f097b5a..0e6e796ff6d54 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceRequest.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceRequest.java @@ -15,6 +15,7 @@ import org.elasticsearch.logsdb.datageneration.fields.DynamicMapping; import java.util.Set; +import java.util.function.Supplier; public interface DataSourceRequest { TResponse accept(DataSourceHandler handler); @@ -85,6 +86,18 @@ public DataSourceResponse.ArrayWrapper accept(DataSourceHandler handler) { } } + record RepeatingWrapper() implements DataSourceRequest { + public DataSourceResponse.RepeatingWrapper accept(DataSourceHandler handler) { + return handler.handle(this); + } + } + + record MalformedWrapper(Supplier malformedValues) implements DataSourceRequest { + public DataSourceResponse.MalformedWrapper accept(DataSourceHandler handler) { + return handler.handle(this); + } + } + record ChildFieldGenerator(DataGeneratorSpecification specification) implements DataSourceRequest { diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceResponse.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceResponse.java index fa8f56b3e071b..e9f1adb98d248 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceResponse.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceResponse.java @@ -39,6 +39,10 @@ record NullWrapper(Function, Supplier> wrapper) impleme record ArrayWrapper(Function, Supplier> wrapper) implements DataSourceResponse {} + record RepeatingWrapper(Function, Supplier> wrapper) implements DataSourceResponse {} + + record MalformedWrapper(Function, Supplier> wrapper) implements DataSourceResponse {} + interface ChildFieldGenerator extends DataSourceResponse { int generateChildFieldCount(); diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java index db13867fe71ad..bf99ab71d0149 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java @@ -11,7 +11,7 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.ObjectMapper; -import org.elasticsearch.logsdb.datageneration.fields.DynamicMapping; +import org.elasticsearch.logsdb.datageneration.FieldType; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -32,7 +32,9 @@ public DataSourceResponse.LeafMappingParametersGenerator handle(DataSourceReques return new DataSourceResponse.LeafMappingParametersGenerator(switch (request.fieldType()) { case KEYWORD -> keywordMapping(request, map); - case LONG, INTEGER, SHORT, BYTE, DOUBLE, FLOAT, UNSIGNED_LONG -> plain(map); + case LONG, INTEGER, SHORT, BYTE, DOUBLE, FLOAT, HALF_FLOAT, UNSIGNED_LONG -> numberMapping(map, request.fieldType()); + case SCALED_FLOAT -> scaledFloatMapping(map); + case COUNTED_KEYWORD -> plain(Map.of("index", ESTestCase.randomBoolean())); }); } @@ -40,6 +42,30 @@ private Supplier> plain(Map injected) { return () -> injected; } + private Supplier> numberMapping(Map injected, FieldType fieldType) { + return () -> { + if (ESTestCase.randomBoolean()) { + injected.put("ignore_malformed", ESTestCase.randomBoolean()); + } + if (ESTestCase.randomDouble() <= 0.2) { + Number value = switch (fieldType) { + case LONG -> ESTestCase.randomLong(); + case UNSIGNED_LONG -> ESTestCase.randomNonNegativeLong(); + case INTEGER -> ESTestCase.randomInt(); + case SHORT -> ESTestCase.randomShort(); + case BYTE -> ESTestCase.randomByte(); + case DOUBLE -> ESTestCase.randomDouble(); + case FLOAT, HALF_FLOAT -> ESTestCase.randomFloat(); + default -> throw new IllegalStateException("Unexpected field type"); + }; + + injected.put("null_value", value); + } + + return injected; + }; + } + private Supplier> keywordMapping( DataSourceRequest.LeafMappingParametersGenerator request, Map injected @@ -49,11 +75,7 @@ private Supplier> keywordMapping( // We only add copy_to to keywords because we get into trouble with numeric fields that are copied to dynamic fields. // If first copied value is numeric, dynamic field is created with numeric field type and then copy of text values fail. // Actual value being copied does not influence the core logic of copy_to anyway. - // - // TODO - // We don't use copy_to on fields that are inside an object with dynamic: strict - // because we'll hit https://github.com/elastic/elasticsearch/issues/113049. - if (request.dynamicMapping() != DynamicMapping.FORBIDDEN && ESTestCase.randomDouble() <= 0.05) { + if (ESTestCase.randomDouble() <= 0.05) { var options = request.eligibleCopyToFields() .stream() .filter(f -> f.equals(request.fieldName()) == false) @@ -67,6 +89,9 @@ private Supplier> keywordMapping( if (ESTestCase.randomDouble() <= 0.2) { injected.put("ignore_above", ESTestCase.randomIntBetween(1, 100)); } + if (ESTestCase.randomDouble() <= 0.2) { + injected.put("null_value", ESTestCase.randomAlphaOfLengthBetween(0, 10)); + } return injected; }; @@ -75,6 +100,15 @@ private Supplier> keywordMapping( private Supplier> scaledFloatMapping(Map injected) { return () -> { injected.put("scaling_factor", ESTestCase.randomFrom(10, 1000, 100000, 100.5)); + + if (ESTestCase.randomDouble() <= 0.2) { + injected.put("null_value", ESTestCase.randomFloat()); + } + + if (ESTestCase.randomBoolean()) { + injected.put("ignore_malformed", ESTestCase.randomBoolean()); + } + return injected; }; } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultWrappersHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultWrappersHandler.java index 8af26c28ef5b3..ac686e0201327 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultWrappersHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultWrappersHandler.java @@ -11,6 +11,7 @@ import org.elasticsearch.test.ESTestCase; +import java.util.HashSet; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.IntStream; @@ -26,6 +27,16 @@ public DataSourceResponse.ArrayWrapper handle(DataSourceRequest.ArrayWrapper ign return new DataSourceResponse.ArrayWrapper(wrapInArray()); } + @Override + public DataSourceResponse.RepeatingWrapper handle(DataSourceRequest.RepeatingWrapper ignored) { + return new DataSourceResponse.RepeatingWrapper(repeatValues()); + } + + @Override + public DataSourceResponse.MalformedWrapper handle(DataSourceRequest.MalformedWrapper request) { + return new DataSourceResponse.MalformedWrapper(injectMalformed(request.malformedValues())); + } + private static Function, Supplier> injectNulls() { // Inject some nulls but majority of data should be non-null (as it likely is in reality). return (values) -> () -> ESTestCase.randomDouble() <= 0.05 ? null : values.get(); @@ -41,4 +52,23 @@ private static Function, Supplier> wrapInArray() { return values.get(); }; } + + private static Function, Supplier> repeatValues() { + return (values) -> { + HashSet previousValues = new HashSet<>(); + return () -> { + if (previousValues.size() > 0 && ESTestCase.randomBoolean()) { + return ESTestCase.randomFrom(previousValues); + } else { + var value = values.get(); + previousValues.add(value); + return value; + } + }; + }; + } + + private static Function, Supplier> injectMalformed(Supplier malformedValues) { + return (values) -> () -> ESTestCase.randomDouble() <= 0.1 ? malformedValues.get() : values.get(); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ByteFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ByteFieldDataGenerator.java index 4ead8ffd0b718..809354c01e94c 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ByteFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ByteFieldDataGenerator.java @@ -13,21 +13,28 @@ import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; +import java.util.Map; import java.util.function.Supplier; public class ByteFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; + private final Supplier valueGeneratorWithMalformed; public ByteFieldDataGenerator(String fieldName, DataSource dataSource) { - var bytes = dataSource.get(new DataSourceRequest.ByteGenerator()); - var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); - var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); + var bytes = dataSource.get(new DataSourceRequest.ByteGenerator()).generator(); - this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> bytes.generator().get()); + this.valueGenerator = Wrappers.defaults(bytes::get, dataSource); + + var strings = dataSource.get(new DataSourceRequest.StringGenerator()).generator(); + this.valueGeneratorWithMalformed = Wrappers.defaultsWithMalformed(bytes::get, strings::get, dataSource); } @Override - public Object generateValue() { + public Object generateValue(Map fieldMapping) { + if (fieldMapping != null && (Boolean) fieldMapping.getOrDefault("ignore_malformed", false)) { + return valueGeneratorWithMalformed.get(); + } + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/CountedKeywordFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/CountedKeywordFieldDataGenerator.java new file mode 100644 index 0000000000000..64a40704a64ce --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/CountedKeywordFieldDataGenerator.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.logsdb.datageneration.fields.leaf; + +import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; +import org.elasticsearch.logsdb.datageneration.datasource.DataSource; +import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; + +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; + +public class CountedKeywordFieldDataGenerator implements FieldDataGenerator { + private final Supplier valueGenerator; + private final Set previousStrings = new HashSet<>(); + + public CountedKeywordFieldDataGenerator(String fieldName, DataSource dataSource) { + var strings = dataSource.get(new DataSourceRequest.StringGenerator()); + var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); + var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); + var repeats = dataSource.get(new DataSourceRequest.RepeatingWrapper()); + + this.valueGenerator = arrays.wrapper().compose(nulls.wrapper().compose(repeats.wrapper())).apply(() -> strings.generator().get()); + } + + @Override + public Object generateValue(Map fieldMapping) { + return valueGenerator.get(); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/DoubleFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/DoubleFieldDataGenerator.java index cf2c4f6abdbf4..8cd72c93cb6d1 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/DoubleFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/DoubleFieldDataGenerator.java @@ -13,21 +13,28 @@ import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; +import java.util.Map; import java.util.function.Supplier; public class DoubleFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; + private final Supplier valueGeneratorWithMalformed; public DoubleFieldDataGenerator(String fieldName, DataSource dataSource) { - var doubles = dataSource.get(new DataSourceRequest.DoubleGenerator()); - var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); - var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); + var doubles = dataSource.get(new DataSourceRequest.DoubleGenerator()).generator(); - this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> doubles.generator().get()); + this.valueGenerator = Wrappers.defaults(doubles::get, dataSource); + + var strings = dataSource.get(new DataSourceRequest.StringGenerator()).generator(); + this.valueGeneratorWithMalformed = Wrappers.defaultsWithMalformed(doubles::get, strings::get, dataSource); } @Override - public Object generateValue() { + public Object generateValue(Map fieldMapping) { + if (fieldMapping != null && (Boolean) fieldMapping.getOrDefault("ignore_malformed", false)) { + return valueGeneratorWithMalformed.get(); + } + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/FloatFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/FloatFieldDataGenerator.java index b59d5ceabb188..651c1e40e4100 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/FloatFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/FloatFieldDataGenerator.java @@ -13,21 +13,28 @@ import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; +import java.util.Map; import java.util.function.Supplier; public class FloatFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; + private final Supplier valueGeneratorWithMalformed; public FloatFieldDataGenerator(String fieldName, DataSource dataSource) { - var floats = dataSource.get(new DataSourceRequest.FloatGenerator()); - var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); - var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); + var floats = dataSource.get(new DataSourceRequest.FloatGenerator()).generator(); - this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> floats.generator().get()); + this.valueGenerator = Wrappers.defaults(floats::get, dataSource); + + var strings = dataSource.get(new DataSourceRequest.StringGenerator()).generator(); + this.valueGeneratorWithMalformed = Wrappers.defaultsWithMalformed(floats::get, strings::get, dataSource); } @Override - public Object generateValue() { + public Object generateValue(Map fieldMapping) { + if (fieldMapping != null && (Boolean) fieldMapping.getOrDefault("ignore_malformed", false)) { + return valueGeneratorWithMalformed.get(); + } + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/HalfFloatFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/HalfFloatFieldDataGenerator.java index e2ed299f1a4dc..35de92b093084 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/HalfFloatFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/HalfFloatFieldDataGenerator.java @@ -13,21 +13,28 @@ import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; +import java.util.Map; import java.util.function.Supplier; public class HalfFloatFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; + private final Supplier valueGeneratorWithMalformed; public HalfFloatFieldDataGenerator(String fieldName, DataSource dataSource) { - var halfFloats = dataSource.get(new DataSourceRequest.HalfFloatGenerator()); - var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); - var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); + var halfFloats = dataSource.get(new DataSourceRequest.HalfFloatGenerator()).generator(); - this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> halfFloats.generator().get()); + this.valueGenerator = Wrappers.defaults(halfFloats::get, dataSource); + + var strings = dataSource.get(new DataSourceRequest.StringGenerator()).generator(); + this.valueGeneratorWithMalformed = Wrappers.defaultsWithMalformed(halfFloats::get, strings::get, dataSource); } @Override - public Object generateValue() { + public Object generateValue(Map fieldMapping) { + if (fieldMapping != null && (Boolean) fieldMapping.getOrDefault("ignore_malformed", false)) { + return valueGeneratorWithMalformed.get(); + } + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/IntegerFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/IntegerFieldDataGenerator.java index f2fe8ed8362e5..0d7d8194c4001 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/IntegerFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/IntegerFieldDataGenerator.java @@ -13,21 +13,28 @@ import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; +import java.util.Map; import java.util.function.Supplier; public class IntegerFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; + private final Supplier valueGeneratorWithMalformed; public IntegerFieldDataGenerator(String fieldName, DataSource dataSource) { - var ints = dataSource.get(new DataSourceRequest.IntegerGenerator()); - var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); - var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); + var ints = dataSource.get(new DataSourceRequest.IntegerGenerator()).generator(); - this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> ints.generator().get()); + this.valueGenerator = Wrappers.defaults(ints::get, dataSource); + + var strings = dataSource.get(new DataSourceRequest.StringGenerator()).generator(); + this.valueGeneratorWithMalformed = Wrappers.defaultsWithMalformed(ints::get, strings::get, dataSource); } @Override - public Object generateValue() { + public Object generateValue(Map fieldMapping) { + if (fieldMapping != null && (Boolean) fieldMapping.getOrDefault("ignore_malformed", false)) { + return valueGeneratorWithMalformed.get(); + } + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/KeywordFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/KeywordFieldDataGenerator.java index 8dc4d8b8767c4..4dbbc6b740dc2 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/KeywordFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/KeywordFieldDataGenerator.java @@ -13,6 +13,7 @@ import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; +import java.util.Map; import java.util.function.Supplier; public class KeywordFieldDataGenerator implements FieldDataGenerator { @@ -27,7 +28,7 @@ public KeywordFieldDataGenerator(String fieldName, DataSource dataSource) { } @Override - public Object generateValue() { + public Object generateValue(Map fieldMapping) { return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/LongFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/LongFieldDataGenerator.java index f17610e501ed7..bd22d58af16a7 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/LongFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/LongFieldDataGenerator.java @@ -13,21 +13,28 @@ import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; +import java.util.Map; import java.util.function.Supplier; public class LongFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; + private final Supplier valueGeneratorWithMalformed; public LongFieldDataGenerator(String fieldName, DataSource dataSource) { - var longs = dataSource.get(new DataSourceRequest.LongGenerator()); - var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); - var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); + var longs = dataSource.get(new DataSourceRequest.LongGenerator()).generator(); - this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> longs.generator().get()); + this.valueGenerator = Wrappers.defaults(longs::get, dataSource); + + var strings = dataSource.get(new DataSourceRequest.StringGenerator()).generator(); + this.valueGeneratorWithMalformed = Wrappers.defaultsWithMalformed(longs::get, strings::get, dataSource); } @Override - public Object generateValue() { + public Object generateValue(Map fieldMapping) { + if (fieldMapping != null && (Boolean) fieldMapping.getOrDefault("ignore_malformed", false)) { + return valueGeneratorWithMalformed.get(); + } + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ScaledFloatFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ScaledFloatFieldDataGenerator.java index 008dd04179dcd..117682e739461 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ScaledFloatFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ScaledFloatFieldDataGenerator.java @@ -13,21 +13,28 @@ import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; +import java.util.Map; import java.util.function.Supplier; public class ScaledFloatFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; + private final Supplier valueGeneratorWithMalformed; public ScaledFloatFieldDataGenerator(String fieldName, DataSource dataSource) { - var doubles = dataSource.get(new DataSourceRequest.DoubleGenerator()); - var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); - var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); + var doubles = dataSource.get(new DataSourceRequest.DoubleGenerator()).generator(); - this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> doubles.generator().get()); + this.valueGenerator = Wrappers.defaults(doubles::get, dataSource); + + var strings = dataSource.get(new DataSourceRequest.StringGenerator()).generator(); + this.valueGeneratorWithMalformed = Wrappers.defaultsWithMalformed(doubles::get, strings::get, dataSource); } @Override - public Object generateValue() { + public Object generateValue(Map fieldMapping) { + if (fieldMapping != null && (Boolean) fieldMapping.getOrDefault("ignore_malformed", false)) { + return valueGeneratorWithMalformed.get(); + } + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ShortFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ShortFieldDataGenerator.java index 85bff2c85e538..a7e50ffc6d0f5 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ShortFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/ShortFieldDataGenerator.java @@ -13,21 +13,28 @@ import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; +import java.util.Map; import java.util.function.Supplier; public class ShortFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; + private final Supplier valueGeneratorWithMalformed; public ShortFieldDataGenerator(String fieldName, DataSource dataSource) { - var shorts = dataSource.get(new DataSourceRequest.ShortGenerator()); - var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); - var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); + var shorts = dataSource.get(new DataSourceRequest.ShortGenerator()).generator(); - this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> shorts.generator().get()); + this.valueGenerator = Wrappers.defaults(shorts::get, dataSource); + + var strings = dataSource.get(new DataSourceRequest.StringGenerator()).generator(); + this.valueGeneratorWithMalformed = Wrappers.defaultsWithMalformed(shorts::get, strings::get, dataSource); } @Override - public Object generateValue() { + public Object generateValue(Map fieldMapping) { + if (fieldMapping != null && (Boolean) fieldMapping.getOrDefault("ignore_malformed", false)) { + return valueGeneratorWithMalformed.get(); + } + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/UnsignedLongFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/UnsignedLongFieldDataGenerator.java index 329f684bef70d..18dd61f0c8ff1 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/UnsignedLongFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/UnsignedLongFieldDataGenerator.java @@ -13,21 +13,28 @@ import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; +import java.util.Map; import java.util.function.Supplier; public class UnsignedLongFieldDataGenerator implements FieldDataGenerator { private final Supplier valueGenerator; + private final Supplier valueGeneratorWithMalformed; public UnsignedLongFieldDataGenerator(String fieldName, DataSource dataSource) { - var unsignedLongs = dataSource.get(new DataSourceRequest.UnsignedLongGenerator()); - var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); - var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); + var unsignedLongs = dataSource.get(new DataSourceRequest.UnsignedLongGenerator()).generator(); - this.valueGenerator = arrays.wrapper().compose(nulls.wrapper()).apply(() -> unsignedLongs.generator().get()); + this.valueGenerator = Wrappers.defaults(unsignedLongs::get, dataSource); + + var strings = dataSource.get(new DataSourceRequest.StringGenerator()).generator(); + this.valueGeneratorWithMalformed = Wrappers.defaultsWithMalformed(unsignedLongs::get, strings::get, dataSource); } @Override - public Object generateValue() { + public Object generateValue(Map fieldMapping) { + if (fieldMapping != null && (Boolean) fieldMapping.getOrDefault("ignore_malformed", false)) { + return valueGeneratorWithMalformed.get(); + } + return valueGenerator.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/Wrappers.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/Wrappers.java new file mode 100644 index 0000000000000..74be106620d46 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/Wrappers.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.logsdb.datageneration.fields.leaf; + +import org.elasticsearch.logsdb.datageneration.datasource.DataSource; +import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; + +import java.util.function.Supplier; + +public class Wrappers { + /** + * Applies default wrappers for raw values - adds nulls and wraps values in arrays. + * @return + */ + static Supplier defaults(Supplier rawValues, DataSource dataSource) { + var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); + var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); + + return arrays.wrapper().compose(nulls.wrapper()).apply(rawValues::get); + } + + /** + * Applies default wrappers for raw values and also adds malformed values. + * @return + */ + static Supplier defaultsWithMalformed(Supplier rawValues, Supplier malformedValues, DataSource dataSource) { + var nulls = dataSource.get(new DataSourceRequest.NullWrapper()); + var malformed = dataSource.get(new DataSourceRequest.MalformedWrapper(malformedValues)); + var arrays = dataSource.get(new DataSourceRequest.ArrayWrapper()); + + return arrays.wrapper().compose(nulls.wrapper()).compose(malformed.wrapper()).apply(rawValues::get); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/DynamicFieldMatcher.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/DynamicFieldMatcher.java index 5bcf53cfa5c9a..7957ed1eebe4d 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/DynamicFieldMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/DynamicFieldMatcher.java @@ -10,6 +10,7 @@ package org.elasticsearch.logsdb.datageneration.matchers.source; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.logsdb.datageneration.matchers.ListEqualMatcher; import org.elasticsearch.logsdb.datageneration.matchers.MatchResult; import org.elasticsearch.xcontent.XContentBuilder; @@ -46,9 +47,12 @@ class DynamicFieldMatcher { * @return {#{@link MatchResult}} if field values need special treatment by this matcher. * If field values can be matched using generic mapper, returns {@link Optional#empty()}. */ - public Optional match(List actual, List expected) { + public MatchResult match(List actual, List expected) { if (expected == null) { - return Optional.empty(); + expected = List.of(); + } + if (actual == null) { + actual = List.of(); } // Floating point values are always mapped as float with dynamic mapping. @@ -59,7 +63,7 @@ public Optional match(List actual, List expected) { var normalizedActual = normalizeDoubles(actual); var normalizedExpected = normalizeDoubles(expected); - var matchResult = normalizedActual.equals(normalizedExpected) + return normalizedActual.equals(normalizedExpected) ? MatchResult.match() : MatchResult.noMatch( formatErrorMessage( @@ -71,10 +75,9 @@ public Optional match(List actual, List expected) { + prettyPrintCollections(normalizedActual, normalizedExpected) ) ); - return Optional.of(matchResult); } - return Optional.empty(); + return matchWithGenericMatcher(actual, expected); } private static Set normalizeDoubles(List values) { @@ -85,4 +88,18 @@ private static Set normalizeDoubles(List values) { Function toFloat = (o) -> o instanceof Number n ? n.floatValue() : Float.parseFloat((String) o); return values.stream().filter(Objects::nonNull).map(toFloat).collect(Collectors.toSet()); } + + private MatchResult matchWithGenericMatcher(List actualValues, List expectedValues) { + var genericListMatcher = new ListEqualMatcher( + actualMappings, + actualSettings, + expectedMappings, + expectedSettings, + SourceTransforms.normalizeValues(actualValues), + SourceTransforms.normalizeValues(expectedValues), + true + ); + + return genericListMatcher.match(); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/FieldSpecificMatcher.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/FieldSpecificMatcher.java index 960cc38e55c82..e2acea4ad91de 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/FieldSpecificMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/FieldSpecificMatcher.java @@ -19,7 +19,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.function.Function; +import java.util.TreeMap; import java.util.stream.Collectors; import static org.elasticsearch.logsdb.datageneration.matchers.Messages.formatErrorMessage; @@ -28,13 +28,13 @@ interface FieldSpecificMatcher { MatchResult match(List actual, List expected, Map actualMapping, Map expectedMapping); - class HalfFloatMatcher implements FieldSpecificMatcher { + class CountedKeywordMatcher implements FieldSpecificMatcher { private final XContentBuilder actualMappings; private final Settings.Builder actualSettings; private final XContentBuilder expectedMappings; private final Settings.Builder expectedSettings; - HalfFloatMatcher( + CountedKeywordMatcher( XContentBuilder actualMappings, Settings.Builder actualSettings, XContentBuilder expectedMappings, @@ -53,35 +53,83 @@ public MatchResult match( Map actualMapping, Map expectedMapping ) { - var actualHalfFloatBytes = normalize(actual); - var expectedHalfFloatBytes = normalize(expected); + var actualNormalized = normalize(actual); + var expectedNormalized = normalize(expected); - return actualHalfFloatBytes.equals(expectedHalfFloatBytes) - ? MatchResult.match() - : MatchResult.noMatch( + Map counts = new TreeMap<>(); + for (String value : actualNormalized) { + counts.put(value, counts.getOrDefault(value, 0) + 1); + } + for (String value : expectedNormalized) { + int newCount = counts.getOrDefault(value, 0) - 1; + if (newCount == 0) { + counts.remove(value); + } else { + counts.put(value, newCount); + } + } + + if (counts.isEmpty() == false) { + var extraValuesMessage = new StringBuilder("extra values: "); + for (var entry : counts.entrySet()) { + extraValuesMessage.append('\n').append(entry.getKey()).append(": ").append(entry.getValue()); + } + + return MatchResult.noMatch( formatErrorMessage( actualMappings, actualSettings, expectedMappings, expectedSettings, - "Values of type [half_float] don't match after normalization, normalized " - + prettyPrintCollections(actualHalfFloatBytes, expectedHalfFloatBytes) + "Values of type [counted_keyword] don't match, " + + extraValuesMessage + + ".\n" + + prettyPrintCollections(actualNormalized, expectedNormalized) ) ); + } + + return MatchResult.match(); } - private static Set normalize(List values) { - if (values == null) { - return Set.of(); - } + private static List normalize(List values) { + return values.stream().filter(Objects::nonNull).map(it -> (String) it).toList(); + } + } - Function toFloat = (o) -> o instanceof Number n ? n.floatValue() : Float.parseFloat((String) o); - return values.stream() - .filter(Objects::nonNull) - .map(toFloat) - // Based on logic in NumberFieldMapper - .map(HalfFloatPoint::halfFloatToSortableShort) - .collect(Collectors.toSet()); + class HalfFloatMatcher extends GenericMappingAwareMatcher { + HalfFloatMatcher( + XContentBuilder actualMappings, + Settings.Builder actualSettings, + XContentBuilder expectedMappings, + Settings.Builder expectedSettings + ) { + super("half_float", actualMappings, actualSettings, expectedMappings, expectedSettings); + } + + @Override + Object convert(Object value, Object nullValue) { + var nullValueShort = nullValue != null ? HalfFloatPoint.halfFloatToSortableShort(((Number) nullValue).floatValue()) : null; + + return switch (value) { + case null -> nullValueShort; + case Number n -> HalfFloatPoint.halfFloatToSortableShort(n.floatValue()); + case String s -> { + // Special case for number coercion from strings + if (s.isEmpty()) { + yield nullValueShort; + } + + try { + var f = Float.parseFloat(s); + yield HalfFloatPoint.halfFloatToSortableShort(f); + } catch (NumberFormatException e) { + // Malformed, leave it be and match as is + yield s; + } + } + default -> value; + }; } } @@ -110,23 +158,23 @@ public MatchResult match( Map actualMapping, Map expectedMapping ) { - var scalingFactor = actualMapping.get("scaling_factor"); - var expectedScalingFactor = expectedMapping.get("scaling_factor"); - if (Objects.equals(scalingFactor, expectedScalingFactor) == false) { - throw new IllegalStateException("Scaling factor for scaled_float field does not match between actual and expected mapping"); - } + var scalingFactor = FieldSpecificMatcher.getMappingParameter("scaling_factor", actualMapping, expectedMapping); assert scalingFactor instanceof Number; double scalingFactorDouble = ((Number) scalingFactor).doubleValue(); + + var nullValue = (Number) FieldSpecificMatcher.getNullValue(actualMapping, expectedMapping); + // It is possible that we receive a mix of reduced precision values and original values. // F.e. in case of `synthetic_source_keep: "arrays"` in nested objects only arrays are preserved as is // and therefore any singleton values have reduced precision. // Therefore, we need to match either an exact value or a normalized value. - var expectedNormalized = normalizeValues(expected); - var actualNormalized = normalizeValues(actual); - for (var expectedValue : expectedNormalized) { - if (actualNormalized.contains(expectedValue) == false - && actualNormalized.contains(encodeDecodeWithPrecisionLoss(expectedValue, scalingFactorDouble)) == false) { + var expectedNumbers = numbers(expected, nullValue); + var actualNumbers = numbers(actual, nullValue); + + for (var expectedValue : expectedNumbers) { + if (actualNumbers.contains(expectedValue) == false + && actualNumbers.contains(encodeDecodeWithPrecisionLoss(expectedValue, scalingFactorDouble)) == false) { return MatchResult.noMatch( formatErrorMessage( actualMappings, @@ -134,7 +182,24 @@ public MatchResult match( expectedMappings, expectedSettings, "Values of type [scaled_float] don't match after normalization, normalized " - + prettyPrintCollections(actualNormalized, expectedNormalized) + + prettyPrintCollections(actualNumbers, expectedNumbers) + ) + ); + } + } + + var expectedNotNumbers = notNumbers(expected); + var actualNotNumbers = notNumbers(actual); + for (var expectedValue : expectedNotNumbers) { + if (actualNotNumbers.contains(expectedValue) == false) { + return MatchResult.noMatch( + formatErrorMessage( + actualMappings, + actualSettings, + expectedMappings, + expectedSettings, + "Malformed values of [scaled_float] field don't match, values:" + + prettyPrintCollections(actualNotNumbers, expectedNotNumbers) ) ); } @@ -143,18 +208,49 @@ public MatchResult match( return MatchResult.match(); } - private Double encodeDecodeWithPrecisionLoss(double value, double scalingFactor) { - // Based on logic in ScaledFloatFieldMapper - var encoded = Math.round(value * scalingFactor); - return encoded / scalingFactor; + private Set numbers(List values, Number nullValue) { + if (values == null) { + return Set.of(); + } + + return values.stream() + .map(v -> convertNumber(v, nullValue)) + .filter(Objects::nonNull) + .map(ScaledFloatMatcher::toDouble) + .collect(Collectors.toSet()); } - private static Set normalizeValues(List values) { + private static Object convertNumber(Object value, Number nullValue) { + if (value == null) { + return nullValue; + } + // Special case for number coercion from strings + if (value instanceof String s && s.isEmpty()) { + return nullValue; + } + if (value instanceof Number n) { + return n; + } + + return null; + } + + private Set notNumbers(List values) { if (values == null) { return Set.of(); } - return values.stream().filter(Objects::nonNull).map(ScaledFloatMatcher::toDouble).collect(Collectors.toSet()); + return values.stream() + .filter(Objects::nonNull) + .filter(v -> v instanceof Number == false) + .filter(v -> v instanceof String == false || ((String) v).isEmpty() == false) + .collect(Collectors.toSet()); + } + + private Double encodeDecodeWithPrecisionLoss(double value, double scalingFactor) { + // Based on logic in ScaledFloatFieldMapper + var encoded = Math.round(value * scalingFactor); + return encoded / scalingFactor; } private static double toDouble(Object value) { @@ -162,18 +258,118 @@ private static double toDouble(Object value) { } } - class UnsignedLongMatcher implements FieldSpecificMatcher { + class UnsignedLongMatcher extends GenericMappingAwareMatcher { + UnsignedLongMatcher( + XContentBuilder actualMappings, + Settings.Builder actualSettings, + XContentBuilder expectedMappings, + Settings.Builder expectedSettings + ) { + super("unsigned_long", actualMappings, actualSettings, expectedMappings, expectedSettings); + } + + @Override + Object convert(Object value, Object nullValue) { + var nullValueBigInt = nullValue != null ? BigInteger.valueOf(((Number) nullValue).longValue()) : null; + + return switch (value) { + case null -> nullValueBigInt; + case String s -> { + // Special case for number coercion from strings + if (s.isEmpty()) { + yield nullValueBigInt; + } + + yield s; + } + case Long l -> BigInteger.valueOf(l); + default -> value; + }; + + } + } + + class KeywordMatcher extends GenericMappingAwareMatcher { + KeywordMatcher( + XContentBuilder actualMappings, + Settings.Builder actualSettings, + XContentBuilder expectedMappings, + Settings.Builder expectedSettings + ) { + super("keyword", actualMappings, actualSettings, expectedMappings, expectedSettings); + } + + @Override + Object convert(Object value, Object nullValue) { + if (value == null) { + return nullValue; + } + + return value; + } + } + + class NumberMatcher extends GenericMappingAwareMatcher { + NumberMatcher( + String fieldType, + XContentBuilder actualMappings, + Settings.Builder actualSettings, + XContentBuilder expectedMappings, + Settings.Builder expectedSettings + ) { + super(fieldType, actualMappings, actualSettings, expectedMappings, expectedSettings); + } + + @Override + Object convert(Object value, Object nullValue) { + if (value == null) { + return nullValue; + } + // Special case for number coercion from strings + if (value instanceof String s && s.isEmpty()) { + return nullValue; + } + + return value; + } + } + + // TODO basic implementation only right now + class DateMatcher extends GenericMappingAwareMatcher { + DateMatcher( + XContentBuilder actualMappings, + Settings.Builder actualSettings, + XContentBuilder expectedMappings, + Settings.Builder expectedSettings + ) { + super("date", actualMappings, actualSettings, expectedMappings, expectedSettings); + } + + @Override + Object convert(Object value, Object nullValue) { + return value; + } + } + + /** + * Generic matcher that supports common matching logic like null values. + */ + abstract class GenericMappingAwareMatcher implements FieldSpecificMatcher { + private final String fieldType; + private final XContentBuilder actualMappings; private final Settings.Builder actualSettings; private final XContentBuilder expectedMappings; private final Settings.Builder expectedSettings; - UnsignedLongMatcher( + GenericMappingAwareMatcher( + String fieldType, XContentBuilder actualMappings, Settings.Builder actualSettings, XContentBuilder expectedMappings, Settings.Builder expectedSettings ) { + this.fieldType = fieldType; this.actualMappings = actualMappings; this.actualSettings = actualSettings; this.expectedMappings = expectedMappings; @@ -181,14 +377,17 @@ class UnsignedLongMatcher implements FieldSpecificMatcher { } @Override + @SuppressWarnings("unchecked") public MatchResult match( List actual, List expected, Map actualMapping, Map expectedMapping ) { - var expectedNormalized = normalize(expected); - var actualNormalized = normalize(actual); + var nullValue = getNullValue(actualMapping, expectedMapping); + + var expectedNormalized = normalize(expected, nullValue); + var actualNormalized = normalize(actual, nullValue); return actualNormalized.equals(expectedNormalized) ? MatchResult.match() @@ -198,29 +397,35 @@ public MatchResult match( actualSettings, expectedMappings, expectedSettings, - "Values of type [unsigned_long] don't match after normalization, normalized " + "Values of type [" + + fieldType + + "] don't match after normalization, normalized " + prettyPrintCollections(actualNormalized, expectedNormalized) ) ); } - private static Set normalize(List values) { + private Set normalize(List values, Object nullValue) { if (values == null) { return Set.of(); } - return values.stream().filter(Objects::nonNull).map(UnsignedLongMatcher::toBigInteger).collect(Collectors.toSet()); + return values.stream().map(v -> convert(v, nullValue)).filter(Objects::nonNull).collect(Collectors.toSet()); } - private static BigInteger toBigInteger(Object value) { - if (value instanceof String s) { - return new BigInteger(s, 10); - } - if (value instanceof Long l) { - return BigInteger.valueOf(l); - } + abstract Object convert(Object value, Object nullValue); + } + + private static Object getNullValue(Map actualMapping, Map expectedMapping) { + return getMappingParameter("null_value", actualMapping, expectedMapping); + } - return (BigInteger) value; + private static Object getMappingParameter(String name, Map actualMapping, Map expectedMapping) { + var actualValue = actualMapping.get(name); + var expectedValue = expectedMapping.get(name); + if (Objects.equals(actualValue, expectedValue) == false) { + throw new IllegalStateException("[" + name + "] parameter does not match between actual and expected mapping"); } + return actualValue; } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java index eb62598712f03..7390f846b017a 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java @@ -13,14 +13,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.logsdb.datageneration.matchers.GenericEqualsMatcher; -import org.elasticsearch.logsdb.datageneration.matchers.ListEqualMatcher; import org.elasticsearch.logsdb.datageneration.matchers.MatchResult; import org.elasticsearch.xcontent.XContentBuilder; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; import static org.elasticsearch.logsdb.datageneration.matchers.Messages.formatErrorMessage; import static org.elasticsearch.logsdb.datageneration.matchers.Messages.prettyPrintCollections; @@ -51,14 +50,52 @@ public SourceMatcher( .v2(); this.expectedNormalizedMapping = MappingTransforms.normalizeMapping(expectedMappingAsMap); - this.fieldSpecificMatchers = Map.of( - "half_float", - new FieldSpecificMatcher.HalfFloatMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings), - "scaled_float", - new FieldSpecificMatcher.ScaledFloatMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings), - "unsigned_long", - new FieldSpecificMatcher.UnsignedLongMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings) - ); + this.fieldSpecificMatchers = new HashMap<>() { + { + put("keyword", new FieldSpecificMatcher.KeywordMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings)); + put("date", new FieldSpecificMatcher.DateMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings)); + put( + "long", + new FieldSpecificMatcher.NumberMatcher("long", actualMappings, actualSettings, expectedMappings, expectedSettings) + ); + put( + "unsigned_long", + new FieldSpecificMatcher.UnsignedLongMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings) + ); + put( + "integer", + new FieldSpecificMatcher.NumberMatcher("integer", actualMappings, actualSettings, expectedMappings, expectedSettings) + ); + put( + "short", + new FieldSpecificMatcher.NumberMatcher("short", actualMappings, actualSettings, expectedMappings, expectedSettings) + ); + put( + "byte", + new FieldSpecificMatcher.NumberMatcher("byte", actualMappings, actualSettings, expectedMappings, expectedSettings) + ); + put( + "double", + new FieldSpecificMatcher.NumberMatcher("double", actualMappings, actualSettings, expectedMappings, expectedSettings) + ); + put( + "float", + new FieldSpecificMatcher.NumberMatcher("float", actualMappings, actualSettings, expectedMappings, expectedSettings) + ); + put( + "half_float", + new FieldSpecificMatcher.HalfFloatMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings) + ); + put( + "scaled_float", + new FieldSpecificMatcher.ScaledFloatMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings) + ); + put( + "counted_keyword", + new FieldSpecificMatcher.CountedKeywordMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings) + ); + } + }; this.dynamicFieldMatcher = new DynamicFieldMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings); } @@ -100,18 +137,7 @@ private MatchResult compareSource(Map> actual, Map> actual, Map matchWithFieldSpecificMatcher(String fieldName, List actualValues, List expectedValues) { + private MatchResult matchWithFieldSpecificMatcher(String fieldName, List actualValues, List expectedValues) { var actualFieldMapping = actualNormalizedMapping.get(fieldName); if (actualFieldMapping == null) { if (expectedNormalizedMapping.get(fieldName) != null @@ -159,42 +185,14 @@ private Optional matchWithFieldSpecificMatcher(String fieldName, Li } } - if (sourceMatchesExactly(expectedFieldMapping, expectedValues)) { - return Optional.empty(); - } - var fieldSpecificMatcher = fieldSpecificMatchers.get(actualFieldType); - if (fieldSpecificMatcher == null) { - return Optional.empty(); - } + assert fieldSpecificMatcher != null : "Missing matcher for field type [" + actualFieldType + "]"; - MatchResult matched = fieldSpecificMatcher.match( + return fieldSpecificMatcher.match( actualValues, expectedValues, actualFieldMapping.mappingParameters(), expectedFieldMapping.mappingParameters() ); - return Optional.of(matched); - } - - // Checks for scenarios when source is stored exactly and therefore can be compared without special logic. - private boolean sourceMatchesExactly(MappingTransforms.FieldMapping mapping, List expectedValues) { - return mapping.parentMappingParameters().stream().anyMatch(m -> m.getOrDefault("enabled", "true").equals("false")) - || mapping.mappingParameters().getOrDefault("synthetic_source_keep", "none").equals("all") - || expectedValues.size() > 1 && mapping.mappingParameters().getOrDefault("synthetic_source_keep", "none").equals("arrays"); - } - - private MatchResult matchWithGenericMatcher(List actualValues, List expectedValues) { - var genericListMatcher = new ListEqualMatcher( - actualMappings, - actualSettings, - expectedMappings, - expectedSettings, - SourceTransforms.normalizeValues(actualValues), - SourceTransforms.normalizeValues(expectedValues), - true - ); - - return genericListMatcher.match(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java index 0a4c99eb8b52a..74db1147f23b8 100644 --- a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java +++ b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java @@ -42,16 +42,12 @@ public class MockPluginsService extends PluginsService { * @param classpathPlugins Plugins that exist in the classpath which should be loaded */ public MockPluginsService(Settings settings, Environment environment, Collection> classpathPlugins) { - super( - settings, - environment.configFile(), - new PluginsLoader(Collections.emptySet(), Collections.emptySet(), Collections.emptyMap()) - ); + super(settings, environment.configDir(), new PluginsLoader(Collections.emptySet(), Collections.emptySet(), Collections.emptyMap())); List pluginsLoaded = new ArrayList<>(); for (Class pluginClass : classpathPlugins) { - Plugin plugin = loadPlugin(pluginClass, settings, environment.configFile()); + Plugin plugin = loadPlugin(pluginClass, settings, environment.configDir()); PluginDescriptor pluginInfo = new PluginDescriptor( pluginClass.getName(), "classpath plugin", diff --git a/test/framework/src/main/java/org/elasticsearch/search/ErrorTraceHelper.java b/test/framework/src/main/java/org/elasticsearch/search/ErrorTraceHelper.java new file mode 100644 index 0000000000000..a9fa5ba36fde0 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/search/ErrorTraceHelper.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.search; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.transport.TransportMessageListener; +import org.elasticsearch.transport.TransportService; + +import java.util.Optional; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.BooleanSupplier; + +/** + * Utilities around testing the `error_trace` message header in search. + */ +public enum ErrorTraceHelper { + ; + + public static BooleanSupplier setupErrorTraceListener(InternalTestCluster internalCluster) { + final AtomicBoolean transportMessageHasStackTrace = new AtomicBoolean(false); + internalCluster.getDataNodeInstances(TransportService.class).forEach(ts -> ts.addMessageListener(new TransportMessageListener() { + @Override + public void onResponseSent(long requestId, String action, Exception error) { + TransportMessageListener.super.onResponseSent(requestId, action, error); + if (action.startsWith("indices:data/read/search")) { + Optional throwable = ExceptionsHelper.unwrapCausesAndSuppressed(error, t -> t.getStackTrace().length > 0); + transportMessageHasStackTrace.set(throwable.isPresent()); + } + } + })); + return transportMessageHasStackTrace::get; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java index 79c61cacb58eb..42b11173a3b19 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java +++ b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java @@ -9,7 +9,6 @@ package org.elasticsearch.search; -import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.TimeValue; @@ -23,6 +22,7 @@ import org.elasticsearch.search.internal.ReaderContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.ThreadPool; @@ -46,7 +46,7 @@ public static class TestPlugin extends Plugin {} private Consumer onCreateSearchContext = context -> {}; - private Function onCheckCancelled = Function.identity(); + private Function onCheckCancelled = Function.identity(); /** Throw an {@link AssertionError} if there are still in-flight contexts. */ public static void assertNoInFlightContext() { @@ -132,7 +132,7 @@ public void setOnCreateSearchContext(Consumer onCreateSearchConte protected SearchContext createContext( ReaderContext readerContext, ShardSearchRequest request, - SearchShardTask task, + CancellableTask task, ResultsType resultsType, boolean includeAggregations ) throws IOException { @@ -154,12 +154,12 @@ public SearchContext createSearchContext(ShardSearchRequest request, TimeValue t return searchContext; } - public void setOnCheckCancelled(Function onCheckCancelled) { + public void setOnCheckCancelled(Function onCheckCancelled) { this.onCheckCancelled = onCheckCancelled; } @Override - protected void checkCancelled(SearchShardTask task) { + protected void checkCancelled(CancellableTask task) { super.checkCancelled(onCheckCancelled.apply(task)); } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 9e2dee4d94212..d034e6e6679c1 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -129,6 +129,7 @@ import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.MultiBucketConsumerService.MultiBucketConsumer; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MetricsAggregator; import org.elasticsearch.search.aggregations.metrics.MultiValueAggregation; @@ -149,6 +150,7 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.internal.SubSearchContext; +import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -251,29 +253,12 @@ protected List getSearchPlugins() { return List.of(); } - /** - * Deprecated - this will be made private in a future update - */ - @Deprecated - protected A createAggregator( - AggregationBuilder aggregationBuilder, - IndexReader indexReader, - MappedFieldType... fieldTypes - ) throws IOException { - return createAggregator(aggregationBuilder, createAggregationContext(indexReader, new MatchAllDocsQuery(), fieldTypes)); - } - protected A createAggregator(AggregationBuilder aggregationBuilder, AggregationContext context) throws IOException { return createAggregator(new AggregatorFactories.Builder().addAggregator(aggregationBuilder), context); } - /** - * Deprecated - this will be made private in a future update - */ - @Deprecated - protected A createAggregator(AggregatorFactories.Builder builder, AggregationContext context) - throws IOException { + private A createAggregator(AggregatorFactories.Builder builder, AggregationContext context) throws IOException { Aggregator[] aggregators = builder.build(context, null).createTopLevelAggregators(); assertThat(aggregators.length, equalTo(1)); @SuppressWarnings("unchecked") @@ -310,10 +295,7 @@ protected AggregationContext createAggregationContext(IndexReader indexReader, Q * While {@linkplain AggregationContext} is {@link Releasable} the caller is * not responsible for releasing it. Instead, it is released automatically in * in {@link #cleanupReleasables()}. - * - * Deprecated - this will be made private in a future update */ - @Deprecated protected AggregationContext createAggregationContext( IndexReader indexReader, IndexSettings indexSettings, @@ -346,6 +328,56 @@ private AggregationContext createAggregationContext( int maxBucket, boolean isInSortOrderExecutionRequired, MappedFieldType... fieldTypes + ) { + return createAggregationContext( + searcher, + indexSettings, + query, + breakerService, + bytesToPreallocate, + maxBucket, + isInSortOrderExecutionRequired, + () -> false, + fieldTypes + ); + } + + /** + * Creates an aggregation context that will randomly report that the query has been cancelled + */ + private AggregationContext createCancellingAggregationContext( + IndexSearcher searcher, + IndexSettings indexSettings, + Query query, + CircuitBreakerService breakerService, + long bytesToPreallocate, + int maxBucket, + boolean isInSortOrderExecutionRequired, + MappedFieldType... fieldTypes + ) { + return createAggregationContext( + searcher, + indexSettings, + query, + breakerService, + bytesToPreallocate, + maxBucket, + isInSortOrderExecutionRequired, + () -> ESTestCase.random().nextInt(20) == 0, + fieldTypes + ); + } + + private AggregationContext createAggregationContext( + IndexSearcher searcher, + IndexSettings indexSettings, + Query query, + CircuitBreakerService breakerService, + long bytesToPreallocate, + int maxBucket, + boolean isInSortOrderExecutionRequired, + Supplier isCancelled, + MappedFieldType... fieldTypes ) { MappingLookup mappingLookup = MappingLookup.fromMappers( Mapping.EMPTY, @@ -409,7 +441,7 @@ public Iterable dimensionFields() { bitsetFilterCache, randomInt(), () -> 0L, - () -> false, + isCancelled, q -> q, true, isInSortOrderExecutionRequired @@ -536,9 +568,11 @@ protected A searchAndReduce(IndexReader reader, IndexSettings indexSettings = createIndexSettings(); // First run it to find circuit breaker leaks on the aggregator runWithCrankyCircuitBreaker(indexSettings, searcher, aggTestConfig); - // Second run it to the end CircuitBreakerService breakerService = new NoneCircuitBreakerService(); - return searchAndReduce(indexSettings, searcher, breakerService, aggTestConfig); + // Next, try with random cancellations, again looking for leaks + runWithCancellingConfig(indexSettings, searcher, breakerService, aggTestConfig); + // Finally, run it to the end + return searchAndReduce(indexSettings, searcher, breakerService, aggTestConfig, this::createAggregationContext); } /** @@ -552,7 +586,7 @@ private void runWithCrankyCircuitBreaker(IndexSettings indexSettings, IndexSearc CircuitBreakerService crankyService = new CrankyCircuitBreakerService(); for (int i = 0; i < 5; i++) { try { - searchAndReduce(indexSettings, searcher, crankyService, aggTestConfig); + searchAndReduce(indexSettings, searcher, crankyService, aggTestConfig, this::createAggregationContext); } catch (CircuitBreakingException e) { // Circuit breaks from the cranky breaker are expected - it randomly fails, after all assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); @@ -560,12 +594,43 @@ private void runWithCrankyCircuitBreaker(IndexSettings indexSettings, IndexSearc } } + private void runWithCancellingConfig( + IndexSettings indexSettings, + IndexSearcher searcher, + CircuitBreakerService breakerService, + AggTestConfig aggTestConfig + ) throws IOException { + for (int i = 0; i < 5; i++) { + try { + searchAndReduce(indexSettings, searcher, breakerService, aggTestConfig, this::createCancellingAggregationContext); + } catch (TaskCancelledException e) { + // we don't want to expectThrows this because the randomizer might just never report cancellation, + // but it's also normal that it should throw here. + } + } + } + + @FunctionalInterface + public interface AggregationcContextSupplier { + AggregationContext get( + IndexSearcher searcher, + IndexSettings indexSettings, + Query query, + CircuitBreakerService breakerService, + long bytesToPreallocate, + int maxBucket, + boolean isInSortOrderExecutionRequired, + MappedFieldType... fieldTypes + ); + } + @SuppressWarnings("unchecked") private A searchAndReduce( IndexSettings indexSettings, IndexSearcher searcher, CircuitBreakerService breakerService, - AggTestConfig aggTestConfig + AggTestConfig aggTestConfig, + AggregationcContextSupplier contextSupplier ) throws IOException { Query query = aggTestConfig.query(); AggregatorFactories.Builder builder = new AggregatorFactories.Builder().addAggregator(aggTestConfig.builder()); @@ -591,7 +656,7 @@ private A searchAndReduce( subSearchers[searcherIDX] = new ShardSearcher(leave, compCTX); } for (ShardSearcher subSearcher : subSearchers) { - AggregationContext context = createAggregationContext( + AggregationContext context = contextSupplier.get( subSearcher, indexSettings, query, @@ -620,7 +685,7 @@ private A searchAndReduce( } } } else { - AggregationContext context = createAggregationContext( + AggregationContext context = contextSupplier.get( searcher, indexSettings, query, @@ -688,8 +753,41 @@ private A searchAndReduce( assertRoundTrip(internalAggregation.copyResults()); } } + /* Verify that cancellation during final reduce correctly throws. + * We check reduce time cancellation only when consuming buckets. + */ + if (aggTestConfig.testReductionCancellation()) { + try { + // I can't remember if we mutate the InternalAggregations list, so make a defensive copy + List internalAggsCopy = new ArrayList<>(internalAggs); + A internalAgg = doFinalReduce(maxBucket, bigArraysForReduction, builder, internalAggsCopy, true); + if (internalAgg instanceof MultiBucketsAggregation mb) { + // Empty mutli-bucket aggs are expected to return before even getting to the cancellation check + assertEquals("Got non-empty result for a cancelled reduction", 0, mb.getBuckets().size()); + } // other cases? + } catch (TaskCancelledException e) { + /* We may not always honor cancellation in reduce, for example if we are returning no results, so we can't + * just expectThrows here. + */ + } + } // now do the final reduce + A internalAgg = doFinalReduce(maxBucket, bigArraysForReduction, builder, internalAggs, false); + assertRoundTrip(internalAgg); + if (aggTestConfig.builder instanceof ValuesSourceAggregationBuilder.MetricsAggregationBuilder) { + verifyMetricNames((ValuesSourceAggregationBuilder.MetricsAggregationBuilder) aggTestConfig.builder, internalAgg); + } + return internalAgg; + } + + private A doFinalReduce( + int maxBucket, + BigArrays bigArraysForReduction, + Builder builder, + List internalAggs, + boolean cancelled + ) throws IOException { MultiBucketConsumer reduceBucketConsumer = new MultiBucketConsumer( maxBucket, new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST) @@ -697,7 +795,7 @@ private A searchAndReduce( AggregationReduceContext reduceContext = new AggregationReduceContext.ForFinal( bigArraysForReduction, getMockScriptService(), - () -> false, + () -> cancelled, builder, reduceBucketConsumer ); @@ -707,10 +805,6 @@ private A searchAndReduce( assertRoundTrip(internalAgg); doAssertReducedMultiBucketConsumer(internalAgg, reduceBucketConsumer); - assertRoundTrip(internalAgg); - if (aggTestConfig.builder instanceof ValuesSourceAggregationBuilder.MetricsAggregationBuilder) { - verifyMetricNames((ValuesSourceAggregationBuilder.MetricsAggregationBuilder) aggTestConfig.builder, internalAgg); - } return internalAgg; } @@ -1601,11 +1695,12 @@ public record AggTestConfig( boolean incrementalReduce, boolean useLogDocMergePolicy, + boolean testReductionCancellation, MappedFieldType... fieldTypes ) { public AggTestConfig(AggregationBuilder builder, MappedFieldType... fieldTypes) { - this(new MatchAllDocsQuery(), builder, DEFAULT_MAX_BUCKETS, randomBoolean(), true, randomBoolean(), false, fieldTypes); + this(new MatchAllDocsQuery(), builder, DEFAULT_MAX_BUCKETS, randomBoolean(), true, randomBoolean(), false, true, fieldTypes); } public AggTestConfig withQuery(Query query) { @@ -1617,6 +1712,7 @@ public AggTestConfig withQuery(Query query) { shouldBeCached, incrementalReduce, useLogDocMergePolicy, + testReductionCancellation, fieldTypes ); } @@ -1630,6 +1726,7 @@ public AggTestConfig withSplitLeavesIntoSeperateAggregators(boolean splitLeavesI shouldBeCached, incrementalReduce, useLogDocMergePolicy, + testReductionCancellation, fieldTypes ); } @@ -1643,6 +1740,7 @@ public AggTestConfig withShouldBeCached(boolean shouldBeCached) { shouldBeCached, incrementalReduce, useLogDocMergePolicy, + testReductionCancellation, fieldTypes ); } @@ -1656,6 +1754,7 @@ public AggTestConfig withMaxBuckets(int maxBuckets) { shouldBeCached, incrementalReduce, useLogDocMergePolicy, + testReductionCancellation, fieldTypes ); } @@ -1669,6 +1768,7 @@ public AggTestConfig withIncrementalReduce(boolean incrementalReduce) { shouldBeCached, incrementalReduce, useLogDocMergePolicy, + testReductionCancellation, fieldTypes ); } @@ -1682,6 +1782,21 @@ public AggTestConfig withLogDocMergePolicy() { shouldBeCached, incrementalReduce, true, + testReductionCancellation, + fieldTypes + ); + } + + public AggTestConfig noReductionCancellation() { + return new AggTestConfig( + query, + builder, + maxBuckets, + splitLeavesIntoSeparateAggregators, + shouldBeCached, + incrementalReduce, + useLogDocMergePolicy, + false, fieldTypes ); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBWCSerializationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBWCSerializationTestCase.java index d931340365cd6..22044e079018b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBWCSerializationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBWCSerializationTestCase.java @@ -14,7 +14,7 @@ import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; -import java.util.List; +import java.util.Collection; import static org.elasticsearch.test.BWCVersions.DEFAULT_BWC_VERSIONS; @@ -28,7 +28,7 @@ public abstract class AbstractBWCSerializationTestCase bwcVersions() { + protected Collection bwcVersions() { return DEFAULT_BWC_VERSIONS; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java index cc35f63d289eb..39b0f2b60662e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java @@ -145,8 +145,21 @@ private XContentTester( public void test() throws IOException { for (int runs = 0; runs < numberOfTestRuns; runs++) { XContentType xContentType = randomFrom(XContentType.values()).canonical(); - T testInstance = instanceSupplier.apply(xContentType); + T testInstance = null; try { + if (xContentType.equals(XContentType.YAML)) { + testInstance = randomValueOtherThanMany(instance -> { + // unicode character U+0085 (NEXT LINE (NEL)) doesn't survive YAML round trip tests (see #97716) + // get a new random instance if we detect this character in the xContent output + try { + return toXContent.apply(instance, xContentType).utf8ToString().contains("\u0085"); + } catch (IOException e) { + throw new AssertionError(e); + } + }, () -> instanceSupplier.apply(xContentType)); + } else { + testInstance = instanceSupplier.apply(xContentType); + } BytesReference originalXContent = toXContent.apply(testInstance, xContentType); BytesReference shuffledContent = insertRandomFieldsAndShuffle( originalXContent, @@ -173,7 +186,9 @@ public void test() throws IOException { dispose.accept(parsed); } } finally { - dispose.accept(testInstance); + if (testInstance != null) { + dispose.accept(testInstance); + } } } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/BWCVersions.java b/test/framework/src/main/java/org/elasticsearch/test/BWCVersions.java index 49859071b03cf..1cd0d0ddc4cd2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/BWCVersions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/BWCVersions.java @@ -12,17 +12,14 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import java.util.Collections; -import java.util.List; +import java.util.NavigableSet; public final class BWCVersions { private BWCVersions() {} - public static List getAllBWCVersions() { - List allVersions = TransportVersion.getAllVersions(); - int minCompatVersion = Collections.binarySearch(allVersions, TransportVersions.MINIMUM_COMPATIBLE); - return allVersions.subList(minCompatVersion, allVersions.size()); + public static NavigableSet getAllBWCVersions() { + return TransportVersionUtils.allReleasedVersions().tailSet(TransportVersions.MINIMUM_COMPATIBLE, true); } - public static final List DEFAULT_BWC_VERSIONS = getAllBWCVersions(); + public static final NavigableSet DEFAULT_BWC_VERSIONS = getAllBWCVersions(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index caa66e928827a..a92706e83ba99 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainRequest; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainResponse; import org.elasticsearch.action.admin.cluster.allocation.TransportClusterAllocationExplainAction; @@ -1753,7 +1754,8 @@ public void indexRandom(boolean forceRefresh, boolean dummyDocuments, boolean ma logger.info("Index [{}] docs async: [{}] bulk: [{}]", builders.size(), true, false); for (IndexRequestBuilder indexRequestBuilder : builders) { indexRequestBuilder.execute( - new LatchedActionListener(newLatch(inFlightAsyncOperations)).delegateResponse((l, e) -> fail(e)) + new LatchedActionListener(ActionListener.noop(), newLatch(inFlightAsyncOperations)) + .delegateResponse((l, e) -> fail(e)) ); postIndexAsyncActions(indicesArray, inFlightAsyncOperations, maybeFlush); } @@ -1845,17 +1847,17 @@ private void postIndexAsyncActions(String[] indices, List inFlig if (rarely()) { indicesAdmin().prepareRefresh(indices) .setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .execute(new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); + .execute(new LatchedActionListener<>(ActionListener.noop(), newLatch(inFlightAsyncOperations))); } else if (maybeFlush && rarely()) { indicesAdmin().prepareFlush(indices) .setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .execute(new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); + .execute(new LatchedActionListener<>(ActionListener.noop(), newLatch(inFlightAsyncOperations))); } else if (rarely()) { indicesAdmin().prepareForceMerge(indices) .setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setMaxNumSegments(between(1, 10)) .setFlush(maybeFlush && randomBoolean()) - .execute(new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); + .execute(new LatchedActionListener<>(ActionListener.noop(), newLatch(inFlightAsyncOperations))); } } while (inFlightAsyncOperations.size() > MAX_IN_FLIGHT_ASYNC_INDEXES) { @@ -1939,32 +1941,6 @@ public enum Scope { int numClientNodes() default InternalTestCluster.DEFAULT_NUM_CLIENT_NODES; } - private class LatchedActionListener implements ActionListener { - private final CountDownLatch latch; - - LatchedActionListener(CountDownLatch latch) { - this.latch = latch; - } - - @Override - public final void onResponse(Response response) { - latch.countDown(); - } - - @Override - public final void onFailure(Exception t) { - try { - logger.info("Action Failed", t); - addError(t); - } finally { - latch.countDown(); - } - } - - protected void addError(Exception e) {} - - } - /** * Clears the given scroll Ids */ @@ -2287,7 +2263,7 @@ public static Path randomRepoPath() { */ public static Path randomRepoPath(Settings settings) { Environment environment = TestEnvironment.newEnvironment(settings); - Path[] repoFiles = environment.repoFiles(); + Path[] repoFiles = environment.repoDirs(); assert repoFiles.length > 0; Path path; do { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index a271c999a2ba7..227d7ca3046f8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -2659,6 +2659,15 @@ public static T expectThrows(Class expectedType, Reques ); } + /** + * Checks a specific exception class with matched message is thrown by the given runnable, and returns it. + */ + public static T expectThrows(Class expectedType, Matcher messageMatcher, ThrowingRunnable runnable) { + var e = expectThrows(expectedType, runnable); + assertThat(e.getMessage(), messageMatcher); + return e; + } + /** * Same as {@link #runInParallel(int, IntConsumer)} but also attempts to start all tasks at the same time by blocking execution on a * barrier until all threads are started and ready to execute their task. diff --git a/test/framework/src/main/java/org/elasticsearch/test/FailingFieldPlugin.java b/test/framework/src/main/java/org/elasticsearch/test/FailingFieldPlugin.java index 64c90826fda85..670191676726a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/FailingFieldPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/test/FailingFieldPlugin.java @@ -24,12 +24,14 @@ public class FailingFieldPlugin extends Plugin implements ScriptPlugin { + public static final String FAILING_FIELD_LANG = "failing_field"; + @Override public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { return new ScriptEngine() { @Override public String getType() { - return "failing_field"; + return FAILING_FIELD_LANG; } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index ba3d801bf9d13..ae79636c6b14c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -1808,7 +1808,7 @@ private void rebuildUnicastHostFiles(List newNodes) { .distinct() .collect(Collectors.toList()); Set configPaths = Stream.concat(currentNodes.stream(), newNodes.stream()) - .map(nac -> nac.node.getEnvironment().configFile()) + .map(nac -> nac.node.getEnvironment().configDir()) .collect(Collectors.toSet()); logger.debug("configuring discovery with {} at {}", discoveryFileContents, configPaths); for (final Path configPath : configPaths) { @@ -1822,7 +1822,7 @@ private void rebuildUnicastHostFiles(List newNodes) { } public Collection configPaths() { - return nodes.values().stream().map(nac -> nac.node.getEnvironment().configFile()).toList(); + return nodes.values().stream().map(nac -> nac.node.getEnvironment().configDir()).toList(); } private void stopNodesAndClient(NodeAndClient nodeAndClient) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/test/LambdaMatchers.java b/test/framework/src/main/java/org/elasticsearch/test/LambdaMatchers.java index dd2e8e4ec5506..a329b5fbaebb2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/LambdaMatchers.java +++ b/test/framework/src/main/java/org/elasticsearch/test/LambdaMatchers.java @@ -23,58 +23,63 @@ public class LambdaMatchers { - private static class TransformMatcher extends BaseMatcher { + private static class TransformMatcher extends TypeSafeMatcher { + private final String transformDescription; private final Matcher matcher; private final Function transform; - private TransformMatcher(Matcher matcher, Function transform) { + private TransformMatcher(String transformDescription, Matcher matcher, Function transform) { + this.transformDescription = transformDescription; this.matcher = matcher; this.transform = transform; } @Override - @SuppressWarnings("unchecked") - public boolean matches(Object actual) { + protected boolean matchesSafely(T item) { U u; try { - u = transform.apply((T) actual); + u = transform.apply(item); } catch (ClassCastException e) { throw new AssertionError(e); } - return matcher.matches(u); } @Override - @SuppressWarnings("unchecked") - public void describeMismatch(Object item, Description description) { + protected void describeMismatchSafely(T item, Description description) { U u; try { - u = transform.apply((T) item); + u = transform.apply(item); } catch (ClassCastException e) { description.appendValue(item).appendText(" is not of the correct type (").appendText(e.getMessage()).appendText(")"); return; } - description.appendText("transformed value "); + description.appendText(transformDescription).appendText(" "); matcher.describeMismatch(u, description); } @Override public void describeTo(Description description) { - description.appendText("transformed to match ").appendDescriptionOf(matcher); + description.appendText(transformDescription).appendText(" matches ").appendDescriptionOf(matcher); } } public static Matcher transformedMatch(Function function, Matcher matcher) { - return new TransformMatcher<>(matcher, function); + return new TransformMatcher<>("transformed value", matcher, function); + } + + public static Matcher transformedMatch(String description, Function function, Matcher matcher) { + return new TransformMatcher<>(description, matcher, function); } private static class ListTransformMatcher extends TypeSafeMatcher> { + private final String transformDescription; private final Matcher> matcher; private final Function transform; - private ListTransformMatcher(Matcher> matcher, Function transform) { + private ListTransformMatcher(String transformDescription, Matcher> matcher, Function transform) { + this.transformDescription = transformDescription; this.matcher = matcher; this.transform = transform; } @@ -110,25 +115,35 @@ protected void describeMismatchSafely(Iterable item, Description description) } } - description.appendText("transformed item "); + description.appendText(transformDescription).appendText(" "); matcher.describeMismatch(us, description); } @Override public void describeTo(Description description) { - description.appendText("iterable with transformed items to match ").appendDescriptionOf(matcher); + description.appendText("iterable with ").appendText(transformDescription).appendText(" matching ").appendDescriptionOf(matcher); } } public static Matcher> transformedItemsMatch(Function function, Matcher> matcher) { - return new ListTransformMatcher<>(matcher, function); + return new ListTransformMatcher<>("transformed items", matcher, function); + } + + public static Matcher> transformedItemsMatch( + String transformDescription, + Function function, + Matcher> matcher + ) { + return new ListTransformMatcher<>(transformDescription, matcher, function); } private static class ArrayTransformMatcher extends TypeSafeMatcher { + private final String transformDescription; private final Matcher matcher; private final Function transform; - private ArrayTransformMatcher(Matcher matcher, Function transform) { + private ArrayTransformMatcher(String transformDescription, Matcher matcher, Function transform) { + this.transformDescription = transformDescription; this.matcher = matcher; this.transform = transform; } @@ -177,18 +192,26 @@ protected void describeMismatchSafely(T[] item, Description description) { us[i] = u; } - description.appendText("transformed item "); + description.appendText(transformDescription).appendText(" "); matcher.describeMismatch(us, description); } @Override public void describeTo(Description description) { - description.appendText("array with transformed items to match ").appendDescriptionOf(matcher); + description.appendText("array with ").appendText(transformDescription).appendText(" matching ").appendDescriptionOf(matcher); } } public static Matcher transformedArrayItemsMatch(Function function, Matcher matcher) { - return new ArrayTransformMatcher<>(matcher, function); + return new ArrayTransformMatcher<>("transformed items", matcher, function); + } + + public static Matcher transformedArrayItemsMatch( + String transformDescription, + Function function, + Matcher matcher + ) { + return new ArrayTransformMatcher<>(transformDescription, matcher, function); } private static class PredicateMatcher extends BaseMatcher> { diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 103cf1c15abc1..c46442485ff9e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.Query; import org.apache.lucene.search.TotalHits; -import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexService; @@ -49,6 +48,7 @@ import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestionSearchContext; +import org.elasticsearch.tasks.CancellableTask; import java.util.Collections; import java.util.HashMap; @@ -67,7 +67,7 @@ public class TestSearchContext extends SearchContext { ParsedQuery postFilter; Query query; Float minScore; - SearchShardTask task; + CancellableTask task; SortAndFormats sort; boolean trackScores = false; int trackTotalHitsUpTo = SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO; @@ -506,12 +506,12 @@ public SearchExecutionContext getSearchExecutionContext() { } @Override - public void setTask(SearchShardTask task) { + public void setTask(CancellableTask task) { this.task = task; } @Override - public SearchShardTask getTask() { + public CancellableTask getTask() { return task; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/TransportVersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/TransportVersionUtils.java index 0c7274a36b49a..9c7114425b8db 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TransportVersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TransportVersionUtils.java @@ -14,15 +14,23 @@ import org.elasticsearch.core.Nullable; import java.util.Collections; -import java.util.List; +import java.util.NavigableSet; import java.util.Random; import java.util.Set; +import java.util.TreeSet; import java.util.stream.Collectors; +import static org.apache.lucene.tests.util.LuceneTestCase.random; + public class TransportVersionUtils { + + private static final NavigableSet RELEASED_VERSIONS = Collections.unmodifiableNavigableSet( + new TreeSet<>(TransportVersion.getAllVersions()) + ); + /** Returns all released versions */ - public static List allReleasedVersions() { - return TransportVersion.getAllVersions(); + public static NavigableSet allReleasedVersions() { + return RELEASED_VERSIONS; } /** Returns the oldest known {@link TransportVersion} */ @@ -32,7 +40,7 @@ public static TransportVersion getFirstVersion() { /** Returns a random {@link TransportVersion} from all available versions. */ public static TransportVersion randomVersion() { - return ESTestCase.randomFrom(allReleasedVersions()); + return VersionUtils.randomFrom(random(), allReleasedVersions(), TransportVersion::fromId); } /** Returns a random {@link TransportVersion} from all available versions without the ignore set */ @@ -42,7 +50,7 @@ public static TransportVersion randomVersion(Set ignore) { /** Returns a random {@link TransportVersion} from all available versions. */ public static TransportVersion randomVersion(Random random) { - return allReleasedVersions().get(random.nextInt(allReleasedVersions().size())); + return VersionUtils.randomFrom(random, allReleasedVersions(), TransportVersion::fromId); } /** Returns a random {@link TransportVersion} between minVersion and maxVersion (inclusive). */ @@ -55,24 +63,21 @@ public static TransportVersion randomVersionBetween( throw new IllegalArgumentException("maxVersion [" + maxVersion + "] cannot be less than minVersion [" + minVersion + "]"); } - int minVersionIndex = 0; - List allReleasedVersions = allReleasedVersions(); + NavigableSet versions = allReleasedVersions(); if (minVersion != null) { - minVersionIndex = Collections.binarySearch(allReleasedVersions, minVersion); + if (versions.contains(minVersion) == false) { + throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); + } + versions = versions.tailSet(minVersion, true); } - int maxVersionIndex = allReleasedVersions.size() - 1; if (maxVersion != null) { - maxVersionIndex = Collections.binarySearch(allReleasedVersions, maxVersion); - } - if (minVersionIndex < 0) { - throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); - } else if (maxVersionIndex < 0) { - throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); - } else { - // minVersionIndex is inclusive so need to add 1 to this index - int range = maxVersionIndex + 1 - minVersionIndex; - return allReleasedVersions.get(minVersionIndex + random.nextInt(range)); + if (versions.contains(maxVersion) == false) { + throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); + } + versions = versions.headSet(maxVersion, true); } + + return VersionUtils.randomFrom(random, versions, TransportVersion::fromId); } public static TransportVersion getPreviousVersion() { @@ -82,16 +87,11 @@ public static TransportVersion getPreviousVersion() { } public static TransportVersion getPreviousVersion(TransportVersion version) { - int place = Collections.binarySearch(allReleasedVersions(), version); - if (place < 0) { - // version does not exist - need the item before the index this version should be inserted - place = -(place + 1); - } - - if (place < 1) { + TransportVersion lower = allReleasedVersions().lower(version); + if (lower == null) { throw new IllegalArgumentException("couldn't find any released versions before [" + version + "]"); } - return allReleasedVersions().get(place - 1); + return lower; } public static TransportVersion getNextVersion(TransportVersion version) { @@ -99,17 +99,8 @@ public static TransportVersion getNextVersion(TransportVersion version) { } public static TransportVersion getNextVersion(TransportVersion version, boolean createIfNecessary) { - List allReleasedVersions = allReleasedVersions(); - int place = Collections.binarySearch(allReleasedVersions, version); - if (place < 0) { - // version does not exist - need the item at the index this version should be inserted - place = -(place + 1); - } else { - // need the *next* version - place++; - } - - if (place < 0 || place >= allReleasedVersions.size()) { + TransportVersion higher = allReleasedVersions().higher(version); + if (higher == null) { if (createIfNecessary) { // create a new transport version one greater than specified return new TransportVersion(version.id() + 1); @@ -117,7 +108,7 @@ public static TransportVersion getNextVersion(TransportVersion version, boolean throw new IllegalArgumentException("couldn't find any released versions after [" + version + "]"); } } - return allReleasedVersions.get(place); + return higher; } /** Returns a random {@code TransportVersion} that is compatible with {@link TransportVersion#current()} */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java index 8b7ab620774b9..311f032088f74 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java @@ -9,23 +9,31 @@ package org.elasticsearch.test; +import com.carrotsearch.randomizedtesting.generators.RandomNumbers; + import org.elasticsearch.Build; import org.elasticsearch.Version; +import org.elasticsearch.common.VersionId; import org.elasticsearch.core.Nullable; +import java.util.Collections; import java.util.List; -import java.util.Optional; +import java.util.NavigableSet; import java.util.Random; +import java.util.TreeSet; +import java.util.function.IntFunction; /** Utilities for selecting versions in tests */ public class VersionUtils { - private static final List ALL_VERSIONS = Version.getDeclaredVersions(Version.class); + private static final NavigableSet ALL_VERSIONS = Collections.unmodifiableNavigableSet( + new TreeSet<>(Version.getDeclaredVersions(Version.class)) + ); /** * Returns an immutable, sorted list containing all versions, both released and unreleased. */ - public static List allVersions() { + public static NavigableSet allVersions() { return ALL_VERSIONS; } @@ -33,13 +41,11 @@ public static List allVersions() { * Get the version before {@code version}. */ public static Version getPreviousVersion(Version version) { - for (int i = ALL_VERSIONS.size() - 1; i >= 0; i--) { - Version v = ALL_VERSIONS.get(i); - if (v.before(version)) { - return v; - } + var versions = ALL_VERSIONS.headSet(version, false); + if (versions.isEmpty()) { + throw new IllegalArgumentException("couldn't find any versions before [" + version + "]"); } - throw new IllegalArgumentException("couldn't find any versions before [" + version + "]"); + return versions.getLast(); } /** @@ -56,8 +62,7 @@ public static Version getPreviousVersion() { * where the minor version is less than the currents minor version. */ public static Version getPreviousMinorVersion() { - for (int i = ALL_VERSIONS.size() - 1; i >= 0; i--) { - Version v = ALL_VERSIONS.get(i); + for (Version v : ALL_VERSIONS.descendingSet()) { if (v.minor < Version.CURRENT.minor || v.major < Version.CURRENT.major) { return v; } @@ -67,12 +72,12 @@ public static Version getPreviousMinorVersion() { /** Returns the oldest {@link Version} */ public static Version getFirstVersion() { - return ALL_VERSIONS.get(0); + return ALL_VERSIONS.getFirst(); } /** Returns a random {@link Version} from all available versions. */ public static Version randomVersion(Random random) { - return ALL_VERSIONS.get(random.nextInt(ALL_VERSIONS.size())); + return randomFrom(random, ALL_VERSIONS, Version::fromId); } /** Returns a random {@link Version} from all available versions, that is compatible with the given version. */ @@ -83,38 +88,42 @@ public static Version randomCompatibleVersion(Random random, Version version) { /** Returns a random {@link Version} between minVersion and maxVersion (inclusive). */ public static Version randomVersionBetween(Random random, @Nullable Version minVersion, @Nullable Version maxVersion) { - int minVersionIndex = 0; + if (minVersion != null && maxVersion != null && maxVersion.before(minVersion)) { + throw new IllegalArgumentException("maxVersion [" + maxVersion + "] cannot be less than minVersion [" + minVersion + "]"); + } + + NavigableSet versions = ALL_VERSIONS; if (minVersion != null) { - minVersionIndex = ALL_VERSIONS.indexOf(minVersion); + if (versions.contains(minVersion) == false) { + throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); + } + versions = versions.tailSet(minVersion, true); } - int maxVersionIndex = ALL_VERSIONS.size() - 1; if (maxVersion != null) { - maxVersionIndex = ALL_VERSIONS.indexOf(maxVersion); - } - if (minVersionIndex == -1) { - throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); - } else if (maxVersionIndex == -1) { - throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); - } else if (minVersionIndex > maxVersionIndex) { - throw new IllegalArgumentException("maxVersion [" + maxVersion + "] cannot be less than minVersion [" + minVersion + "]"); - } else { - // minVersionIndex is inclusive so need to add 1 to this index - int range = maxVersionIndex + 1 - minVersionIndex; - return ALL_VERSIONS.get(minVersionIndex + random.nextInt(range)); + if (versions.contains(maxVersion) == false) { + throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); + } + versions = versions.headSet(maxVersion, true); } - } - /** returns the first future compatible version */ - public static Version compatibleFutureVersion(Version version) { - final Optional opt = ALL_VERSIONS.stream().filter(version::before).filter(v -> v.isCompatible(version)).findAny(); - assert opt.isPresent() : "no future compatible version for " + version; - return opt.get(); + return randomFrom(random, versions, Version::fromId); } /** Returns the maximum {@link Version} that is compatible with the given version. */ public static Version maxCompatibleVersion(Version version) { - final List compatible = ALL_VERSIONS.stream().filter(version::isCompatible).filter(version::onOrBefore).toList(); - assert compatible.size() > 0; - return compatible.get(compatible.size() - 1); + return ALL_VERSIONS.tailSet(version, true).descendingSet().stream().filter(version::isCompatible).findFirst().orElseThrow(); + } + + public static > T randomFrom(Random random, NavigableSet set, IntFunction ctor) { + // get the first and last id, pick a random id in the middle, then find that id in the set in O(nlogn) time + // this assumes the id numbers are reasonably evenly distributed in the set + assert set.isEmpty() == false; + int lowest = set.getFirst().id(); + int highest = set.getLast().id(); + + T randomId = ctor.apply(RandomNumbers.randomIntBetween(random, lowest, highest)); + // try to find the id below, then the id above. We're just looking for *some* item in the set that is close to randomId + T found = set.floor(randomId); + return found != null ? found : set.ceiling(randomId); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 40aee8eed4235..b0d64a87c4d36 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -257,7 +257,7 @@ public static void assertBlocked(@Nullable final Integer expectedBlockId, Except assertThat( "Request should have been blocked by [" + expectedBlockId + "] instead of " + e.blocks(), e.blocks(), - hasItem(transformedMatch(ClusterBlock::id, equalTo(expectedBlockId))) + hasItem(transformedMatch("ClusterBlock id", ClusterBlock::id, equalTo(expectedBlockId))) ); } } @@ -764,33 +764,39 @@ public static void assertSuggestion(Suggest searchSuggest, int entry, String key * Assert that an index template is missing */ public static void assertIndexTemplateMissing(GetIndexTemplatesResponse templatesResponse, String name) { - assertThat(templatesResponse.getIndexTemplates(), not(hasItem(transformedMatch(IndexTemplateMetadata::name, equalTo(name))))); + assertThat( + templatesResponse.getIndexTemplates(), + not(hasItem(transformedMatch("IndexTemplateMetadata name", IndexTemplateMetadata::name, equalTo(name)))) + ); } /** * Assert that an index template exists */ public static void assertIndexTemplateExists(GetIndexTemplatesResponse templatesResponse, String name) { - assertThat(templatesResponse.getIndexTemplates(), hasItem(transformedMatch(IndexTemplateMetadata::name, equalTo(name)))); + assertThat( + templatesResponse.getIndexTemplates(), + hasItem(transformedMatch("IndexTemplateMetadata name", IndexTemplateMetadata::name, equalTo(name))) + ); } /* * matchers */ public static Matcher hasId(final String id) { - return transformedMatch(SearchHit::getId, equalTo(id)); + return transformedMatch("SearchHit id", SearchHit::getId, equalTo(id)); } public static Matcher hasIndex(final String index) { - return transformedMatch(SearchHit::getIndex, equalTo(index)); + return transformedMatch("SearchHit index", SearchHit::getIndex, equalTo(index)); } public static Matcher hasScore(final float score) { - return transformedMatch(SearchHit::getScore, equalTo(score)); + return transformedMatch("SearchHit score", SearchHit::getScore, equalTo(score)); } public static Matcher hasRank(final int rank) { - return transformedMatch(SearchHit::getRank, equalTo(rank)); + return transformedMatch("SearchHit rank", SearchHit::getRank, equalTo(rank)); } public static T assertBooleanSubQuery(Query query, Class subqueryType, int i) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java index 667149e4bdd3e..5bf20b18abc72 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java @@ -14,41 +14,43 @@ import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.KnownIndexVersions; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; -import java.util.Collections; -import java.util.List; +import java.util.NavigableSet; import java.util.Random; import java.util.Set; import java.util.stream.Collectors; +import static org.apache.lucene.tests.util.LuceneTestCase.random; + public class IndexVersionUtils { - private static final List ALL_VERSIONS = KnownIndexVersions.ALL_VERSIONS; - private static final List ALL_WRITE_VERSIONS = KnownIndexVersions.ALL_WRITE_VERSIONS; + private static final NavigableSet ALL_VERSIONS = KnownIndexVersions.ALL_VERSIONS; + private static final NavigableSet ALL_WRITE_VERSIONS = KnownIndexVersions.ALL_WRITE_VERSIONS; /** Returns all released versions */ - public static List allReleasedVersions() { + public static NavigableSet allReleasedVersions() { return ALL_VERSIONS; } /** Returns the oldest known {@link IndexVersion}. This version can only be read from and not written to */ public static IndexVersion getLowestReadCompatibleVersion() { - return ALL_VERSIONS.get(0); + return ALL_VERSIONS.getFirst(); } /** Returns the oldest known {@link IndexVersion} that can be written to */ public static IndexVersion getLowestWriteCompatibleVersion() { - return ALL_WRITE_VERSIONS.get(0); + return ALL_WRITE_VERSIONS.getFirst(); } /** Returns a random {@link IndexVersion} from all available versions. */ public static IndexVersion randomVersion() { - return ESTestCase.randomFrom(ALL_VERSIONS); + return VersionUtils.randomFrom(random(), ALL_VERSIONS, IndexVersion::fromId); } /** Returns a random {@link IndexVersion} from all versions that can be written to. */ public static IndexVersion randomWriteVersion() { - return ESTestCase.randomFrom(ALL_WRITE_VERSIONS); + return VersionUtils.randomFrom(random(), ALL_WRITE_VERSIONS, IndexVersion::fromId); } /** Returns a random {@link IndexVersion} from all available versions without the ignore set */ @@ -62,23 +64,21 @@ public static IndexVersion randomVersionBetween(Random random, @Nullable IndexVe throw new IllegalArgumentException("maxVersion [" + maxVersion + "] cannot be less than minVersion [" + minVersion + "]"); } - int minVersionIndex = 0; + NavigableSet versions = allReleasedVersions(); if (minVersion != null) { - minVersionIndex = Collections.binarySearch(ALL_VERSIONS, minVersion); + if (versions.contains(minVersion) == false) { + throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); + } + versions = versions.tailSet(minVersion, true); } - int maxVersionIndex = ALL_VERSIONS.size() - 1; if (maxVersion != null) { - maxVersionIndex = Collections.binarySearch(ALL_VERSIONS, maxVersion); - } - if (minVersionIndex < 0) { - throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); - } else if (maxVersionIndex < 0) { - throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); - } else { - // minVersionIndex is inclusive so need to add 1 to this index - int range = maxVersionIndex + 1 - minVersionIndex; - return ALL_VERSIONS.get(minVersionIndex + random.nextInt(range)); + if (versions.contains(maxVersion) == false) { + throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); + } + versions = versions.headSet(maxVersion, true); } + + return VersionUtils.randomFrom(random, versions, IndexVersion::fromId); } public static IndexVersion getPreviousVersion() { @@ -88,16 +88,11 @@ public static IndexVersion getPreviousVersion() { } public static IndexVersion getPreviousVersion(IndexVersion version) { - int place = Collections.binarySearch(ALL_VERSIONS, version); - if (place < 0) { - // version does not exist - need the item before the index this version should be inserted - place = -(place + 1); - } - - if (place < 1) { + IndexVersion lower = allReleasedVersions().lower(version); + if (lower == null) { throw new IllegalArgumentException("couldn't find any released versions before [" + version + "]"); } - return ALL_VERSIONS.get(place - 1); + return lower; } public static IndexVersion getPreviousMajorVersion(IndexVersion version) { @@ -105,19 +100,11 @@ public static IndexVersion getPreviousMajorVersion(IndexVersion version) { } public static IndexVersion getNextVersion(IndexVersion version) { - int place = Collections.binarySearch(ALL_VERSIONS, version); - if (place < 0) { - // version does not exist - need the item at the index this version should be inserted - place = -(place + 1); - } else { - // need the *next* version - place++; - } - - if (place < 0 || place >= ALL_VERSIONS.size()) { + IndexVersion higher = allReleasedVersions().higher(version); + if (higher == null) { throw new IllegalArgumentException("couldn't find any released versions after [" + version + "]"); } - return ALL_VERSIONS.get(place); + return higher; } /** Returns a random {@code IndexVersion} that is compatible with {@link IndexVersion#current()} */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 2647e21d34bc5..677924a553ec7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -166,6 +166,22 @@ public abstract class ESRestTestCase extends ESTestCase { private static final Logger SUITE_LOGGER = LogManager.getLogger(ESRestTestCase.class); + private static final String EXPECTED_ROLLUP_WARNING_MESSAGE = + "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information."; + public static final RequestOptions.Builder ROLLUP_REQUESTS_OPTIONS = RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warnings -> { + if (warnings.isEmpty()) { + return false; + } else { + // Sometimes multiple rollup deprecation warnings. Transport actions can be invoked multiple time on different nodes. + for (String warning : warnings) { + if (EXPECTED_ROLLUP_WARNING_MESSAGE.equals(warning) == false) { + return true; + } + } + return false; + } + }); + /** * Convert the entity from a {@link Response} into a map of maps. * Consumes the underlying HttpEntity, releasing any resources it may be holding. @@ -998,14 +1014,21 @@ private void wipeCluster() throws Exception { private void waitForClusterUpdates() throws Exception { logger.info("Waiting for all cluster updates up to this moment to be processed"); + try { assertOK(adminClient().performRequest(new Request("GET", "_cluster/health?wait_for_events=languid"))); } catch (ResponseException e) { if (e.getResponse().getStatusLine().getStatusCode() == HttpStatus.SC_REQUEST_TIMEOUT) { + StringBuilder logMessage = new StringBuilder("Timed out waiting for cluster updates to be processed."); final var pendingTasks = getPendingClusterStateTasks(); if (pendingTasks != null) { - logger.error("Timed out waiting for cluster updates to be processed, {}", pendingTasks); + logMessage.append('\n').append(pendingTasks); } + final var hotThreads = getHotThreads(); + if (hotThreads != null) { + logMessage.append("\nHot threads: ").append(hotThreads); + } + logger.error(logMessage.toString()); } throw e; } @@ -1015,8 +1038,8 @@ private static String getPendingClusterStateTasks() { try { Response response = adminClient().performRequest(new Request("GET", "/_cluster/pending_tasks")); List tasks = (List) entityAsMap(response).get("tasks"); - if (false == tasks.isEmpty()) { - StringBuilder message = new StringBuilder("there are still running tasks:"); + if (tasks.isEmpty() == false) { + StringBuilder message = new StringBuilder("There are still running tasks:"); for (Object task : tasks) { message.append('\n').append(task.toString()); } @@ -1028,6 +1051,18 @@ private static String getPendingClusterStateTasks() { return null; } + private String getHotThreads() { + try { + Response response = adminClient().performRequest( + new Request("GET", "/_nodes/hot_threads?ignore_idle_threads=false&threads=9999") + ); + return EntityUtils.toString(response.getEntity()); + } catch (IOException e) { + logger.error("Failed to retrieve hot threads in the cluster during cleanup", e); + } + return null; + } + /** * This method checks whether ILM policies or templates get recreated after they have been deleted. If so, we are probably deleting * them unnecessarily, potentially causing test performance problems. This could happen for example if someone adds a new standard ILM @@ -1305,7 +1340,9 @@ private static void wipeClusterSettings() throws IOException { private void wipeRollupJobs() throws IOException { final Response response; try { - response = adminClient().performRequest(new Request("GET", "/_rollup/job/_all")); + var request = new Request("GET", "/_rollup/job/_all"); + request.setOptions(ROLLUP_REQUESTS_OPTIONS); + response = adminClient().performRequest(request); } catch (ResponseException e) { // If we don't see the rollup endpoint (possibly because of running against an older ES version) we just bail if (e.getResponse().getStatusLine().getStatusCode() == RestStatus.NOT_FOUND.getStatus()) { @@ -1325,6 +1362,7 @@ private void wipeRollupJobs() throws IOException { @SuppressWarnings("unchecked") String jobId = (String) ((Map) jobConfig.get("config")).get("id"); Request request = new Request("POST", "/_rollup/job/" + jobId + "/_stop"); + request.setOptions(ROLLUP_REQUESTS_OPTIONS); setIgnoredErrorResponseCodes(request, RestStatus.NOT_FOUND); request.addParameter("wait_for_completion", "true"); request.addParameter("timeout", "10s"); @@ -1336,6 +1374,7 @@ private void wipeRollupJobs() throws IOException { @SuppressWarnings("unchecked") String jobId = (String) ((Map) jobConfig.get("config")).get("id"); Request request = new Request("DELETE", "/_rollup/job/" + jobId); + request.setOptions(ROLLUP_REQUESTS_OPTIONS); setIgnoredErrorResponseCodes(request, RestStatus.NOT_FOUND); // 404s imply someone was racing us to delete this logger.debug("deleting rollup job [{}]", jobId); adminClient().performRequest(request); @@ -1929,7 +1968,7 @@ protected static Map getIndexSettings(String index, boolean incl } @SuppressWarnings("unchecked") - protected Map getIndexSettingsAsMap(String index) throws IOException { + protected static Map getIndexSettingsAsMap(String index) throws IOException { Map indexSettings = getIndexSettings(index); return (Map) ((Map) indexSettings.get(index)).get("settings"); } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 4595fbf286077..0df978fe4937e 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -2758,8 +2758,8 @@ public void handleException(TransportException exp) { TransportStats transportStats = serviceC.transport.getStats(); // we did a single round-trip to do the initial handshake assertEquals(1, transportStats.getRxCount()); assertEquals(1, transportStats.getTxCount()); - assertEquals(29, transportStats.getRxSize().getBytes()); - assertEquals(55, transportStats.getTxSize().getBytes()); + assertEquals(35, transportStats.getRxSize().getBytes()); + assertEquals(60, transportStats.getTxSize().getBytes()); }); serviceC.sendRequest( connection, @@ -2773,16 +2773,16 @@ public void handleException(TransportException exp) { TransportStats transportStats = serviceC.transport.getStats(); // request has been send assertEquals(1, transportStats.getRxCount()); assertEquals(2, transportStats.getTxCount()); - assertEquals(29, transportStats.getRxSize().getBytes()); - assertEquals(114, transportStats.getTxSize().getBytes()); + assertEquals(35, transportStats.getRxSize().getBytes()); + assertEquals(119, transportStats.getTxSize().getBytes()); }); sendResponseLatch.countDown(); responseLatch.await(); stats = serviceC.transport.getStats(); // response has been received assertEquals(2, stats.getRxCount()); assertEquals(2, stats.getTxCount()); - assertEquals(54, stats.getRxSize().getBytes()); - assertEquals(114, stats.getTxSize().getBytes()); + assertEquals(60, stats.getRxSize().getBytes()); + assertEquals(119, stats.getTxSize().getBytes()); } finally { serviceC.close(); } @@ -2873,8 +2873,8 @@ public void handleException(TransportException exp) { TransportStats transportStats = serviceC.transport.getStats(); // request has been sent assertEquals(1, transportStats.getRxCount()); assertEquals(1, transportStats.getTxCount()); - assertEquals(29, transportStats.getRxSize().getBytes()); - assertEquals(55, transportStats.getTxSize().getBytes()); + assertEquals(35, transportStats.getRxSize().getBytes()); + assertEquals(60, transportStats.getTxSize().getBytes()); }); serviceC.sendRequest( connection, @@ -2888,8 +2888,8 @@ public void handleException(TransportException exp) { TransportStats transportStats = serviceC.transport.getStats(); // request has been sent assertEquals(1, transportStats.getRxCount()); assertEquals(2, transportStats.getTxCount()); - assertEquals(29, transportStats.getRxSize().getBytes()); - assertEquals(114, transportStats.getTxSize().getBytes()); + assertEquals(35, transportStats.getRxSize().getBytes()); + assertEquals(119, transportStats.getTxSize().getBytes()); }); sendResponseLatch.countDown(); responseLatch.await(); @@ -2904,8 +2904,8 @@ public void handleException(TransportException exp) { String failedMessage = "Unexpected read bytes size. The transport exception that was received=" + exception; // 57 bytes are the non-exception message bytes that have been received. It should include the initial // handshake message and the header, version, etc bytes in the exception message. - assertEquals(failedMessage, 57 + streamOutput.bytes().length(), stats.getRxSize().getBytes()); - assertEquals(114, stats.getTxSize().getBytes()); + assertEquals(failedMessage, 63 + streamOutput.bytes().length(), stats.getRxSize().getBytes()); + assertEquals(119, stats.getTxSize().getBytes()); } finally { serviceC.close(); } diff --git a/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGenerationTests.java b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGenerationTests.java index d9750328ff3fa..f5ba8bd02fa88 100644 --- a/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGenerationTests.java +++ b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/DataGenerationTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.countedkeyword.CountedKeywordMapperPlugin; import org.elasticsearch.xpack.unsignedlong.UnsignedLongMapperPlugin; import java.io.IOException; @@ -110,7 +111,7 @@ public DataSourceResponse.FieldTypeGenerator handle(DataSourceRequest.FieldTypeG var mappingService = new MapperServiceTestCase() { @Override protected Collection getPlugins() { - return List.of(new UnsignedLongMapperPlugin(), new MapperExtrasPlugin()); + return List.of(new UnsignedLongMapperPlugin(), new MapperExtrasPlugin(), new CountedKeywordMapperPlugin()); } }.createMapperService(mappingXContent); diff --git a/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/SourceMatcherTests.java b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/SourceMatcherTests.java index 74f70bae4d0c1..aba5cbc9878e1 100644 --- a/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/SourceMatcherTests.java +++ b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/SourceMatcherTests.java @@ -62,8 +62,8 @@ public void testDynamicMismatch() throws IOException { public void testMappedMatch() throws IOException { List> values = List.of( - Map.of("aaa", 124, "bbb", false, "ccc", 12.34), - Map.of("aaa", 124, "bbb", false, "ccc", 12.34) + Map.of("aaa", 124, "bbb", "hey", "ccc", 12.34), + Map.of("aaa", 124, "bbb", "yeh", "ccc", 12.34) ); var mapping = XContentBuilder.builder(XContentType.JSON.xContent()); @@ -71,7 +71,7 @@ public void testMappedMatch() throws IOException { mapping.startObject("_doc"); { mapping.startObject("aaa").field("type", "long").endObject(); - mapping.startObject("bbb").field("type", "boolean").endObject(); + mapping.startObject("bbb").field("type", "keyword").endObject(); mapping.startObject("ccc").field("type", "half_float").endObject(); } mapping.endObject(); @@ -83,12 +83,12 @@ public void testMappedMatch() throws IOException { public void testMappedMismatch() throws IOException { List> actual = List.of( - Map.of("aaa", 124, "bbb", false, "ccc", 12.34), - Map.of("aaa", 124, "bbb", false, "ccc", 12.34) + Map.of("aaa", 124, "bbb", "hey", "ccc", 12.34), + Map.of("aaa", 124, "bbb", "yeh", "ccc", 12.34) ); List> expected = List.of( - Map.of("aaa", 124, "bbb", false, "ccc", 12.34), - Map.of("aaa", 124, "bbb", false, "ccc", 12.35) + Map.of("aaa", 124, "bbb", "hey", "ccc", 12.34), + Map.of("aaa", 124, "bbb", "yeh", "ccc", 12.35) ); var mapping = XContentBuilder.builder(XContentType.JSON.xContent()); @@ -96,7 +96,7 @@ public void testMappedMismatch() throws IOException { mapping.startObject("_doc"); { mapping.startObject("aaa").field("type", "long").endObject(); - mapping.startObject("bbb").field("type", "boolean").endObject(); + mapping.startObject("bbb").field("type", "keyword").endObject(); mapping.startObject("ccc").field("type", "half_float").endObject(); } mapping.endObject(); @@ -105,4 +105,38 @@ public void testMappedMismatch() throws IOException { var sut = new SourceMatcher(mapping, Settings.builder(), mapping, Settings.builder(), actual, expected, false); assertFalse(sut.match().isMatch()); } + + public void testCountedKeywordMatch() throws IOException { + List> actual = List.of(Map.of("field", List.of("a", "b", "a", "c", "b", "a"))); + List> expected = List.of(Map.of("field", List.of("a", "b", "a", "c", "b", "a"))); + + var mapping = XContentBuilder.builder(XContentType.JSON.xContent()); + mapping.startObject(); + mapping.startObject("_doc"); + { + mapping.startObject("field").field("type", "counted_keyword").endObject(); + } + mapping.endObject(); + mapping.endObject(); + + var sut = new SourceMatcher(mapping, Settings.builder(), mapping, Settings.builder(), actual, expected, false); + assertTrue(sut.match().isMatch()); + } + + public void testCountedKeywordMismatch() throws IOException { + List> actual = List.of(Map.of("field", List.of("a", "b", "a", "c", "b", "a"))); + List> expected = List.of(Map.of("field", List.of("a", "b", "c", "a"))); + + var mapping = XContentBuilder.builder(XContentType.JSON.xContent()); + mapping.startObject(); + mapping.startObject("_doc"); + { + mapping.startObject("field").field("type", "counted_keyword").endObject(); + } + mapping.endObject(); + mapping.endObject(); + + var sut = new SourceMatcher(mapping, Settings.builder(), mapping, Settings.builder(), actual, expected, false); + assertFalse(sut.match().isMatch()); + } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java b/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java index b8f4dcb399ec7..e3cc3bba94a5c 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java @@ -12,11 +12,13 @@ import com.carrotsearch.randomizedtesting.RandomizedContext; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; +import java.io.IOException; import java.util.Map; import static org.hamcrest.Matchers.equalTo; @@ -49,4 +51,42 @@ public void testInsertRandomFieldsAndShuffle() throws Exception { assertThat(mapOrdered.keySet().iterator().next(), not(equalTo("field"))); } } + + private record TestToXContent(String field, String value) implements ToXContentFragment { + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.field(field, value); + } + } + + public void testYamlXContentRoundtripSanitization() throws Exception { + var test = new AbstractXContentTestCase() { + + @Override + protected TestToXContent createTestInstance() { + // we need to randomly create both a "problematic" and an okay version in order to ensure that the sanitization code + // can draw at least one okay version if polled often enough + return randomBoolean() ? new TestToXContent("a\u0085b", "def") : new TestToXContent("a b", "def"); + } + + @Override + protected TestToXContent doParseInstance(XContentParser parser) throws IOException { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + String name = parser.currentName(); + assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken()); + String value = parser.text(); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return new TestToXContent(name, value); + }; + + @Override + protected boolean supportsUnknownFields() { + return false; + } + }; + // testFromXContent runs 20 repetitions, enough to hit a YAML xcontent version very likely + test.testFromXContent(); + } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/LambdaMatchersTests.java b/test/framework/src/test/java/org/elasticsearch/test/LambdaMatchersTests.java index f7ab2349ec1ce..c97a369a9853e 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/LambdaMatchersTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/LambdaMatchersTests.java @@ -19,11 +19,13 @@ import static org.elasticsearch.test.LambdaMatchers.transformedItemsMatch; import static org.elasticsearch.test.LambdaMatchers.transformedMatch; import static org.elasticsearch.test.LambdaMatchers.trueWith; +import static org.hamcrest.Matchers.anything; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.emptyString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; public class LambdaMatchersTests extends ESTestCase { @@ -56,11 +58,13 @@ public void testTransformMatcher() { assertThat(new A("1"), transformedMatch(a -> a.str, equalTo("1"))); assertThat(new B("1"), transformedMatch((A a) -> a.str, equalTo("1"))); + assertMismatch((A) null, transformedMatch(A::toString, anything()), is("was null")); assertMismatch(new A("1"), transformedMatch(a -> a.str, emptyString()), equalTo("transformed value was \"1\"")); } public void testTransformDescription() { - assertDescribeTo(transformedMatch((A a) -> a.str, emptyString()), equalTo("transformed to match an empty string")); + assertDescribeTo(transformedMatch((A a) -> a.str, emptyString()), equalTo("transformed value matches an empty string")); + assertDescribeTo(transformedMatch("str field", (A a) -> a.str, emptyString()), equalTo("str field matches an empty string")); } public void testListTransformMatcher() { @@ -71,14 +75,23 @@ public void testListTransformMatcher() { assertMismatch( as, transformedItemsMatch(a -> a.str, containsInAnyOrder("1", "2", "4")), - equalTo("transformed item not matched: \"3\"") + equalTo("transformed items not matched: \"3\"") + ); + assertMismatch( + as, + transformedItemsMatch("str field", a -> a.str, containsInAnyOrder("1", "2", "4")), + equalTo("str field not matched: \"3\"") ); } public void testListTransformDescription() { assertDescribeTo( transformedItemsMatch((A a) -> a.str, containsInAnyOrder("1")), - equalTo("iterable with transformed items to match iterable with items [\"1\"] in any order") + equalTo("iterable with transformed items matching iterable with items [\"1\"] in any order") + ); + assertDescribeTo( + transformedItemsMatch("str field", (A a) -> a.str, containsInAnyOrder("1")), + equalTo("iterable with str field matching iterable with items [\"1\"] in any order") ); } @@ -89,14 +102,23 @@ public void testArrayTransformMatcher() { assertMismatch( as, transformedArrayItemsMatch(a -> a.str, arrayContainingInAnyOrder("1", "2", "4")), - equalTo("transformed item not matched: \"3\"") + equalTo("transformed items not matched: \"3\"") + ); + assertMismatch( + as, + transformedArrayItemsMatch("str field", a -> a.str, arrayContainingInAnyOrder("1", "2", "4")), + equalTo("str field not matched: \"3\"") ); } public void testArrayTransformDescription() { assertDescribeTo( transformedArrayItemsMatch((A a) -> a.str, arrayContainingInAnyOrder("1")), - equalTo("array with transformed items to match [\"1\"] in any order") + equalTo("array with transformed items matching [\"1\"] in any order") + ); + assertDescribeTo( + transformedArrayItemsMatch("str field", (A a) -> a.str, arrayContainingInAnyOrder("1")), + equalTo("array with str field matching [\"1\"] in any order") ); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java index 5ae7e5640fc91..9951878289d48 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java @@ -21,13 +21,6 @@ */ public class VersionUtilsTests extends ESTestCase { - public void testAllVersionsSorted() { - List allVersions = VersionUtils.allVersions(); - for (int i = 0, j = 1; j < allVersions.size(); ++i, ++j) { - assertTrue(allVersions.get(i).before(allVersions.get(j))); - } - } - public void testRandomVersionBetween() { // TODO: rework this test to use a dummy Version class so these don't need to change with each release // full range @@ -50,9 +43,9 @@ public void testRandomVersionBetween() { got = VersionUtils.randomVersionBetween(random(), null, fromId(7000099)); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); assertTrue(got.onOrBefore(fromId(7000099))); - got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allVersions().get(0)); + got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allVersions().getFirst()); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); - assertTrue(got.onOrBefore(VersionUtils.allVersions().get(0))); + assertTrue(got.onOrBefore(VersionUtils.allVersions().getFirst())); // unbounded upper got = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), null); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index 5630c33ad559c..013a502d93cef 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -19,7 +19,11 @@ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), SUB_OBJECTS_AUTO_ENABLED("es.sub_objects_auto_feature_flag_enabled=true", Version.fromString("8.16.0"), null), - INFERENCE_UNIFIED_API_ENABLED("es.inference_unified_feature_flag_enabled=true", Version.fromString("8.18.0"), null); + INDEX_RECOVERY_USE_SYNTHETIC_SOURCE( + "es.index_recovery_use_synthetic_source_feature_flag_enabled=true", + Version.fromString("8.18.0"), + null + ); public final String systemProperty; public final Version from; diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/LogType.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/LogType.java index 63dbd98da3730..548a372964887 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/LogType.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/LogType.java @@ -14,7 +14,8 @@ public enum LogType { SERVER_JSON("%s_server.json"), AUDIT("%s_audit.json"), SEARCH_SLOW("%s_index_search_slowlog.json"), - INDEXING_SLOW("%s_index_indexing_slowlog.json"); + INDEXING_SLOW("%s_index_indexing_slowlog.json"), + DEPRECATION("%s_deprecation.json"); private final String filenameFormat; diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java index c7007ac60fe57..c2d274bb0b3eb 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java @@ -350,15 +350,7 @@ private void initializeWorkingDirectory(boolean preserveWorkingDirectory) { IOUtils.deleteWithRetry(distributionDir); } - try { - IOUtils.syncWithLinks(distributionDescriptor.getDistributionDir(), distributionDir); - } catch (IOUtils.LinkCreationException e) { - // Note does not work for network drives, e.g. Vagrant - LOGGER.info("Failed to create working dir using hard links. Falling back to copy", e); - // ensure we get a clean copy - IOUtils.deleteWithRetry(distributionDir); - IOUtils.syncWithCopy(distributionDescriptor.getDistributionDir(), distributionDir); - } + IOUtils.syncMaybeWithLinks(distributionDescriptor.getDistributionDir(), distributionDir); } Files.createDirectories(repoDir); Files.createDirectories(dataDir); @@ -773,7 +765,13 @@ private void installModule(String moduleName, DefaultPluginInstallSpec installSp }); - IOUtils.syncWithCopy(modulePath, destination); + // If we aren't overriding anything we can use links here, otherwise do a full copy + if (installSpec.entitlementsOverride == null && installSpec.propertiesOverride == null) { + IOUtils.syncMaybeWithLinks(modulePath, destination); + } else { + IOUtils.syncWithCopy(modulePath, destination); + } + try { if (installSpec.entitlementsOverride != null) { Path entitlementsFile = modulePath.resolve(ENTITLEMENT_POLICY_YAML); @@ -801,7 +799,9 @@ private void installModule(String moduleName, DefaultPluginInstallSpec installSp if (extendedProperty != null) { String[] extendedModules = extendedProperty.split(","); for (String module : extendedModules) { - installModule(module, new DefaultPluginInstallSpec(), modulePaths); + if (spec.getModules().containsKey(module) == false) { + installModule(module, new DefaultPluginInstallSpec(), modulePaths); + } } } } catch (IOException e) { diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/distribution/SnapshotDistributionResolver.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/distribution/SnapshotDistributionResolver.java index 048a3c49fcade..a84211674f8c6 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/distribution/SnapshotDistributionResolver.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/distribution/SnapshotDistributionResolver.java @@ -38,7 +38,8 @@ public DistributionDescriptor resolve(Version version, DistributionType type) { } // Snapshot distributions are never release builds and always use the default distribution - return new DefaultDistributionDescriptor(version, true, distributionDir, DistributionType.DEFAULT); + Version realVersion = Version.fromString(System.getProperty("tests.bwc.main.version", version.toString())); + return new DefaultDistributionDescriptor(realVersion, true, distributionDir, DistributionType.DEFAULT); } return delegate.resolve(version, type); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/IOUtils.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/IOUtils.java index 43034e502fbfb..b1e2175205594 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/IOUtils.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/IOUtils.java @@ -9,6 +9,9 @@ package org.elasticsearch.test.cluster.util; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + import java.io.File; import java.io.IOException; import java.io.UncheckedIOException; @@ -20,6 +23,7 @@ import java.util.stream.Stream; public final class IOUtils { + private static final Logger LOGGER = LogManager.getLogger(IOUtils.class); private static final int RETRY_DELETE_MILLIS = OS.current() == OS.WINDOWS ? 500 : 0; private static final int MAX_RETRY_DELETE_TIMES = OS.current() == OS.WINDOWS ? 15 : 0; @@ -51,6 +55,30 @@ public static void uncheckedDeleteWithRetry(Path path) { } } + /** + * Attempts to do a copy via linking, falling back to a normal copy if an exception is encountered. + * + * @see #syncWithLinks(Path, Path) + * @see #syncWithCopy(Path, Path) + * @param sourceRoot where to copy from + * @param destinationRoot destination to link to + */ + public static void syncMaybeWithLinks(Path sourceRoot, Path destinationRoot) { + try { + syncWithLinks(sourceRoot, destinationRoot); + } catch (LinkCreationException e) { + // Note does not work for network drives, e.g. Vagrant + LOGGER.info("Failed to sync using hard links. Falling back to copy.", e); + // ensure we get a clean copy + try { + deleteWithRetry(destinationRoot); + } catch (IOException ex) { + throw new UncheckedIOException(ex); + } + syncWithCopy(sourceRoot, destinationRoot); + } + } + /** * Does the equivalent of `cp -lr` and `chmod -r a-w` to save space and improve speed. * We remove write permissions to make sure files are note mistakenly edited ( e.x. the config file ) and changes diff --git a/x-pack/plugin/analytics/build.gradle b/x-pack/plugin/analytics/build.gradle index 9a21f40a4c4a9..7aaaaaf668643 100644 --- a/x-pack/plugin/analytics/build.gradle +++ b/x-pack/plugin/analytics/build.gradle @@ -7,6 +7,7 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' +apply plugin: 'elasticsearch.internal-java-rest-test' esplugin { name = 'x-pack-analytics' @@ -18,6 +19,10 @@ base { archivesName = 'x-pack-analytics' } +tasks.named('javaRestTest') { + usesDefaultDistribution() +} + dependencies { api 'org.apache.commons:commons-math3:3.6.1' compileOnly project(path: xpackModule('core')) diff --git a/x-pack/plugin/analytics/src/javaRestTest/java/org/elasticsearch/multiterms/AggsTimeoutIT.java b/x-pack/plugin/analytics/src/javaRestTest/java/org/elasticsearch/multiterms/AggsTimeoutIT.java new file mode 100644 index 0000000000000..6ca7d38d87842 --- /dev/null +++ b/x-pack/plugin/analytics/src/javaRestTest/java/org/elasticsearch/multiterms/AggsTimeoutIT.java @@ -0,0 +1,321 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.multiterms; + +import org.apache.http.client.config.RequestConfig; +import org.apache.http.util.EntityUtils; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.client.Request; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.test.ListMatcher; +import org.elasticsearch.test.MapMatcher; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.junit.Before; +import org.junit.ClassRule; + +import java.io.IOException; +import java.net.SocketTimeoutException; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.test.ListMatcher.matchesList; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.hamcrest.Matchers.any; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; + +/** + * Runs slow aggregations with a timeout and asserts that they timeout and + * cancel the queries. + */ +public class AggsTimeoutIT extends ESRestTestCase { + private static final int DEPTH = 10; + private static final int VALUE_COUNT = 4; + private static final int TOTAL_DOCS = Math.toIntExact((long) Math.pow(VALUE_COUNT, DEPTH)); + private static final TimeValue TIMEOUT = TimeValue.timeValueSeconds(1); + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.watcher.enabled", "false") + .setting("xpack.ml.enabled", "false") + .setting("xpack.security.enabled", "false") + .setting("xpack.security.transport.ssl.enabled", "false") + .setting("xpack.security.http.ssl.enabled", "false") + .jvmArg("-Xmx1g") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public void testTerms() throws Exception { + Request request = new Request("POST", "/deep/_search"); + XContentBuilder body = JsonXContent.contentBuilder().prettyPrint().startObject(); + body.field("size", 0); + agg(body, "terms", 10); + request.setJsonEntity(Strings.toString(body.endObject())); + setTimeout(request); + try { + Map response = responseAsMap(client().performRequest(request)); + assertMap("not expected to finish", response, matchesMap()); + } catch (SocketTimeoutException timeout) { + logger.info("timed out"); + assertNoSearchesRunning(); + } + } + + private void agg(XContentBuilder body, String type, int depth) throws IOException { + if (depth == 0) { + return; + } + body.startObject("aggs").startObject(field("agg", depth)); + { + body.startObject(type); + body.field("field", field("kwd", depth - 1)); + body.endObject(); + } + agg(body, type, depth - 1); + body.endObject().endObject(); + } + + public void testMultiTerms() throws Exception { + Request request = new Request("POST", "/deep/_search"); + XContentBuilder body = JsonXContent.contentBuilder().prettyPrint().startObject(); + body.field("size", 0); + autoDateInMultiTerms(body, b -> { + for (int i = 0; i < DEPTH; i++) { + b.startObject().field("field", field("kwd", i)).endObject(); + } + }); + request.setJsonEntity(Strings.toString(body.endObject())); + setTimeout(request); + try { + Map response = responseAsMap(client().performRequest(request)); + ListMatcher buckets = matchesList(); + for (int i = 0; i < 10; i++) { + buckets = buckets.item( + matchesMap().entry("key_as_string", any(String.class)) + .entry("key", hasSize(10)) + .entry("doc_count", 1) + .entry("adh", matchesMap().entry("buckets", hasSize(1)).entry("interval", "1s")) + ); + } + MapMatcher agg = matchesMap().entry("buckets", buckets) + .entry("doc_count_error_upper_bound", 0) + .entry("sum_other_doc_count", greaterThan(0)); + assertMap(response, matchesMap().extraOk().entry("aggregations", matchesMap().entry("multi", agg))); + } catch (SocketTimeoutException timeout) { + logger.info("timed out"); + assertNoSearchesRunning(); + } + } + + public void testMultiTermWithTimestamp() throws Exception { + Request request = new Request("POST", "/deep/_search"); + XContentBuilder body = JsonXContent.contentBuilder().prettyPrint().startObject(); + body.field("size", 0); + autoDateInMultiTerms(body, b -> { + b.startObject().field("field", field("kwd", 0)).endObject(); + b.startObject().field("field", "@timestamp").endObject(); + }); + request.setJsonEntity(Strings.toString(body.endObject())); + setTimeout(request); + try { + Map response = responseAsMap(client().performRequest(request)); + ListMatcher buckets = matchesList(); + for (int i = 0; i < 10; i++) { + buckets = buckets.item( + matchesMap().entry("key_as_string", any(String.class)) + .entry("key", hasSize(10)) + .entry("doc_count", 1) + .entry("adh", matchesMap().entry("buckets", hasSize(1)).entry("interval", "1s")) + ); + } + MapMatcher agg = matchesMap().entry("buckets", buckets) + .entry("doc_count_error_upper_bound", 0) + .entry("sum_other_doc_count", greaterThan(0)); + assertMap(response, matchesMap().extraOk().entry("aggregations", matchesMap().entry("multi", agg))); + } catch (SocketTimeoutException timeout) { + logger.info("timed out"); + assertNoSearchesRunning(); + } + } + + private void autoDateInMultiTerms(XContentBuilder body, CheckedConsumer terms) throws IOException { + body.startObject("aggs").startObject("multi"); + { + body.startObject("multi_terms"); + { + body.startArray("terms"); + terms.accept(body); + body.endArray(); + body.startArray("order"); + { + body.startObject().field("_count", "desc").endObject(); + body.startObject().field("_key", "asc").endObject(); + } + body.endArray(); + } + body.endObject(); + body.startObject("aggs").startObject("adh").startObject("auto_date_histogram"); + { + body.field("field", "@timestamp"); + body.field("buckets", 1); + } + body.endObject().endObject().endObject(); + } + body.endObject().endObject(); + } + + @Before + public void createDeep() throws IOException { + if (indexExists("deep")) { + return; + } + logger.info("creating deep index"); + XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); + mapping.startObject("properties"); + mapping.startObject("@timestamp").field("type", "date").endObject(); + for (int f = 0; f < DEPTH; f++) { + mapping.startObject(field("kwd", f)).field("type", "keyword").endObject(); + } + CreateIndexResponse createIndexResponse = createIndex( + "deep", + Settings.builder().put("index.number_of_replicas", 0).build(), + Strings.toString(mapping.endObject().endObject()) + ); + assertThat(createIndexResponse.isAcknowledged(), equalTo(true)); + Bulk bulk = new Bulk(); + bulk.doc(new StringBuilder("{"), 0); + bulk.flush(); + + MapMatcher shardsOk = matchesMap().entry("total", 1).entry("failed", 0).entry("successful", 1); + logger.info("refreshing deep index"); + Map refresh = responseAsMap(client().performRequest(new Request("POST", "/_refresh"))); + assertMap(refresh, matchesMap().entry("_shards", shardsOk)); + + logger.info("double checking deep index count"); + Map count = responseAsMap(client().performRequest(new Request("POST", "/deep/_count"))); + assertMap(count, matchesMap().entry("_shards", shardsOk.entry("skipped", 0)).entry("count", TOTAL_DOCS)); + + logger.info("deep index ready for test"); + } + + private String field(String prefix, int field) { + return String.format(Locale.ROOT, "%s%03d", prefix, field); + } + + @Override + protected boolean preserveClusterUponCompletion() { + return true; + } + + class Bulk { + private static final int BULK_SIZE = Math.toIntExact(ByteSizeValue.ofMb(2).getBytes()); + + StringBuilder bulk = new StringBuilder(); + int current = 0; + int total = 0; + long timestamp = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2025-01-01T00:00:00Z"); + + void doc(StringBuilder doc, int field) throws IOException { + if (field != 0) { + doc.append(','); + } + int len = doc.length(); + for (int value = 0; value < VALUE_COUNT; value++) { + doc.append('"').append(field("kwd", field)).append("\":\"").append(value).append('"'); + if (field == DEPTH - 1) { + doc.append(",\"@timestamp\":").append(timestamp).append('}'); + timestamp += TimeValue.timeValueMinutes(1).millis(); + addToBulk(doc); + } else { + doc(doc, field + 1); + } + doc.setLength(len); + } + } + + void addToBulk(StringBuilder doc) throws IOException { + current++; + total++; + bulk.append("{\"index\":{}}\n"); + bulk.append(doc).append('\n'); + if (bulk.length() > BULK_SIZE) { + flush(); + } + } + + void flush() throws IOException { + logger.info( + "Flushing to deep {} docs/{}. Total {}% {}/{}", + current, + ByteSizeValue.ofBytes(bulk.length()), + String.format(Locale.ROOT, "%04.1f", 100.0 * total / TOTAL_DOCS), + total, + TOTAL_DOCS + ); + Request request = new Request("POST", "/deep/_bulk"); + request.setJsonEntity(bulk.toString()); + Map response = responseAsMap(client().performRequest(request)); + assertMap(response, matchesMap().extraOk().entry("errors", false)); + bulk.setLength(0); + current = 0; + } + } + + private void setTimeout(Request request) { + RequestConfig.Builder config = RequestConfig.custom(); + config.setSocketTimeout(Math.toIntExact(TIMEOUT.millis())); + request.setOptions(request.getOptions().toBuilder().setRequestConfig(config.build())); + } + + /** + * Asserts that within a minute the _search has left the _tasks api. + *

+ * It'd sure be more convenient if, whenever the _search has returned + * back to us the _tasks API doesn't contain the _search. But sometimes + * it still does. So long as it stops eventually that's + * still indicative of the interrupt code working. + *

+ */ + private void assertNoSearchesRunning() throws Exception { + assertBusy(() -> { + Request tasks = new Request("GET", "/_tasks"); + tasks.addParameter("actions", "*search"); + tasks.addParameter("detailed", ""); + assertBusy(() -> { + Map response = responseAsMap(client().performRequest(tasks)); + // If there are running searches the map in `nodes` is non-empty. + if (response.isEmpty() == false) { + logger.warn("search still running, hot threads:\n{}", hotThreads()); + } + assertMap(response, matchesMap().entry("nodes", matchesMap())); + }); + }, 1, TimeUnit.MINUTES); + } + + private String hotThreads() throws IOException { + Request tasks = new Request("GET", "/_nodes/hot_threads"); + return EntityUtils.toString(client().performRequest(tasks).getEntity()); + } +} diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java index e684092099948..563828527afb3 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java @@ -210,7 +210,8 @@ private MappedFieldType counterField(String name) { null, false, TimeSeriesParams.MetricType.COUNTER, - IndexMode.TIME_SERIES + IndexMode.TIME_SERIES, + false ); } diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchErrorTraceIT.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchErrorTraceIT.java index 39a6fa1e4b34f..8583844e76aec 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchErrorTraceIT.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchErrorTraceIT.java @@ -7,15 +7,13 @@ package org.elasticsearch.xpack.search; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.ErrorTraceHelper; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.transport.TransportMessageListener; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.XContentType; import org.junit.Before; @@ -23,8 +21,7 @@ import java.util.Collection; import java.util.List; import java.util.Map; -import java.util.Optional; -import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.BooleanSupplier; public class AsyncSearchErrorTraceIT extends ESIntegTestCase { @@ -38,25 +35,11 @@ protected Collection> nodePlugins() { return List.of(AsyncSearch.class); } - private AtomicBoolean transportMessageHasStackTrace; + private BooleanSupplier transportMessageHasStackTrace; @Before - private void setupMessageListener() { - internalCluster().getDataNodeInstances(TransportService.class).forEach(ts -> { - ts.addMessageListener(new TransportMessageListener() { - @Override - public void onResponseSent(long requestId, String action, Exception error) { - TransportMessageListener.super.onResponseSent(requestId, action, error); - if (action.startsWith("indices:data/read/search")) { - Optional throwable = ExceptionsHelper.unwrapCausesAndSuppressed( - error, - t -> t.getStackTrace().length > 0 - ); - transportMessageHasStackTrace.set(throwable.isPresent()); - } - } - }); - }); + public void setupMessageListener() { + transportMessageHasStackTrace = ErrorTraceHelper.setupErrorTraceListener(internalCluster()); } private void setupIndexWithDocs() { @@ -70,7 +53,6 @@ private void setupIndexWithDocs() { } public void testAsyncSearchFailingQueryErrorTraceDefault() throws IOException, InterruptedException { - transportMessageHasStackTrace = new AtomicBoolean(); setupIndexWithDocs(); Request searchRequest = new Request("POST", "/_async_search"); @@ -93,11 +75,10 @@ public void testAsyncSearchFailingQueryErrorTraceDefault() throws IOException, I responseEntity = performRequestAndGetResponseEntityAfterDelay(request, TimeValue.timeValueSeconds(1L)); } // check that the stack trace was not sent from the data node to the coordinating node - assertFalse(transportMessageHasStackTrace.get()); + assertFalse(transportMessageHasStackTrace.getAsBoolean()); } public void testAsyncSearchFailingQueryErrorTraceTrue() throws IOException, InterruptedException { - transportMessageHasStackTrace = new AtomicBoolean(); setupIndexWithDocs(); Request searchRequest = new Request("POST", "/_async_search"); @@ -122,11 +103,10 @@ public void testAsyncSearchFailingQueryErrorTraceTrue() throws IOException, Inte responseEntity = performRequestAndGetResponseEntityAfterDelay(request, TimeValue.timeValueSeconds(1L)); } // check that the stack trace was sent from the data node to the coordinating node - assertTrue(transportMessageHasStackTrace.get()); + assertTrue(transportMessageHasStackTrace.getAsBoolean()); } public void testAsyncSearchFailingQueryErrorTraceFalse() throws IOException, InterruptedException { - transportMessageHasStackTrace = new AtomicBoolean(); setupIndexWithDocs(); Request searchRequest = new Request("POST", "/_async_search"); @@ -151,11 +131,10 @@ public void testAsyncSearchFailingQueryErrorTraceFalse() throws IOException, Int responseEntity = performRequestAndGetResponseEntityAfterDelay(request, TimeValue.timeValueSeconds(1L)); } // check that the stack trace was not sent from the data node to the coordinating node - assertFalse(transportMessageHasStackTrace.get()); + assertFalse(transportMessageHasStackTrace.getAsBoolean()); } public void testAsyncSearchFailingQueryErrorTraceFalseOnSubmitAndTrueOnGet() throws IOException, InterruptedException { - transportMessageHasStackTrace = new AtomicBoolean(); setupIndexWithDocs(); Request searchRequest = new Request("POST", "/_async_search"); @@ -180,11 +159,10 @@ public void testAsyncSearchFailingQueryErrorTraceFalseOnSubmitAndTrueOnGet() thr responseEntity = performRequestAndGetResponseEntityAfterDelay(request, TimeValue.timeValueSeconds(1L)); } // check that the stack trace was not sent from the data node to the coordinating node - assertFalse(transportMessageHasStackTrace.get()); + assertFalse(transportMessageHasStackTrace.getAsBoolean()); } public void testAsyncSearchFailingQueryErrorTraceTrueOnSubmitAndFalseOnGet() throws IOException, InterruptedException { - transportMessageHasStackTrace = new AtomicBoolean(); setupIndexWithDocs(); Request searchRequest = new Request("POST", "/_async_search"); @@ -209,7 +187,7 @@ public void testAsyncSearchFailingQueryErrorTraceTrueOnSubmitAndFalseOnGet() thr responseEntity = performRequestAndGetResponseEntityAfterDelay(request, TimeValue.timeValueSeconds(1L)); } // check that the stack trace was sent from the data node to the coordinating node - assertTrue(transportMessageHasStackTrace.get()); + assertTrue(transportMessageHasStackTrace.getAsBoolean()); } private Map performRequestAndGetResponseEntityAfterDelay(Request r, TimeValue sleep) throws IOException, diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 850dd4bbf0c59..d3052cb191a06 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -102,6 +102,13 @@ tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("esql/190_lookup_join/alias-pattern-multiple", "LOOKUP JOIN does not support index aliases for now") task.skipTest("esql/190_lookup_join/alias-pattern-single", "LOOKUP JOIN does not support index aliases for now") task.skipTest("esql/180_match_operator/match with disjunctions", "Disjunctions in full text functions work now") + // Expected deprecation warning to compat yaml tests: + task.addAllowedWarningRegex(".*rollup functionality will be removed in Elasticsearch.*") + task.skipTest("esql/40_tsdb/from doc with aggregate_metric_double", "TODO: support for subset of metric fields") + task.skipTest("esql/40_tsdb/stats on aggregate_metric_double", "TODO: support for subset of metric fields") + task.skipTest("esql/40_tsdb/from index pattern unsupported counter", "TODO: support for subset of metric fields") + task.skipTest("esql/40_unsupported_types/unsupported", "TODO: support for subset of metric fields") + task.skipTest("esql/40_unsupported_types/unsupported with sort", "TODO: support for subset of metric fields") }) tasks.named('yamlRestCompatTest').configure { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportForgetFollowerAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportForgetFollowerAction.java index 431210264ebbf..f126a546ae850 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportForgetFollowerAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportForgetFollowerAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.PlainShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; @@ -141,8 +140,7 @@ protected ShardsIterator shards( final ForgetFollowerAction.Request request, final String[] concreteIndices ) { - final GroupShardsIterator activePrimaryShards = clusterState.routingTable() - .activePrimaryShardsGrouped(concreteIndices, false); + final List activePrimaryShards = clusterState.routingTable().activePrimaryShardsGrouped(concreteIndices, false); final List shardRoutings = new ArrayList<>(); final Iterator it = activePrimaryShards.iterator(); while (it.hasNext()) { diff --git a/x-pack/plugin/core/src/main/java/module-info.java b/x-pack/plugin/core/src/main/java/module-info.java index 55b3428907c40..ca5f8406fc97c 100644 --- a/x-pack/plugin/core/src/main/java/module-info.java +++ b/x-pack/plugin/core/src/main/java/module-info.java @@ -66,7 +66,6 @@ exports org.elasticsearch.xpack.core.esql; exports org.elasticsearch.xpack.core.esql.action; exports org.elasticsearch.xpack.core.esql.action.internal; // TODO: qualify to esql when modularized - exports org.elasticsearch.xpack.core.frozen.action; exports org.elasticsearch.xpack.core.frozen; exports org.elasticsearch.xpack.core.graph.action; exports org.elasticsearch.xpack.core.graph; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/FrozenEngine.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/FrozenEngine.java index 3b242ca94ac61..998603d8f4bc8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/FrozenEngine.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/FrozenEngine.java @@ -17,7 +17,6 @@ import org.apache.lucene.store.Directory; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; -import org.elasticsearch.common.settings.Setting; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.index.engine.Engine; @@ -55,12 +54,7 @@ * stats in order to obtain the number of reopens. */ public final class FrozenEngine extends ReadOnlyEngine { - public static final Setting INDEX_FROZEN = Setting.boolSetting( - "index.frozen", - false, - Setting.Property.IndexScope, - Setting.Property.PrivateIndex - ); + private final SegmentsStats segmentsStats; private final DocsStats docsStats; private volatile ElasticsearchDirectoryReader lastOpenedReader; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java index 421a306babf29..731ab15001414 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java @@ -9,6 +9,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.index.CheckIndex; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; @@ -216,9 +217,7 @@ private SegmentCommitInfo syncSegment( Codec codec = si.getCodec(); Directory sourceDir = si.dir; if (si.getUseCompoundFile()) { - sourceDir = new LinkedFilesDirectory.CloseMePleaseWrapper( - codec.compoundFormat().getCompoundReader(sourceDir, si, IOContext.DEFAULT) - ); + sourceDir = new LinkedFilesDirectory.CloseMePleaseWrapper(codec.compoundFormat().getCompoundReader(sourceDir, si)); toClose = sourceDir; } final String segmentSuffix = ""; @@ -254,7 +253,7 @@ private SegmentCommitInfo syncSegment( false, IndexOptions.NONE, DocValuesType.NONE, - fieldInfo.docValuesSkipIndexType(), + DocValuesSkipIndexType.NONE, -1, fieldInfo.attributes(), 0, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index cf5ebc8adc56d..3ade1a0eb1d47 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -411,9 +411,9 @@ public List getRestHandlers( } public static Path resolveConfigFile(Environment env, String name) { - Path config = env.configFile().resolve(name); + Path config = env.configDir().resolve(name); if (Files.exists(config) == false) { - Path legacyConfig = env.configFile().resolve("x-pack").resolve(name); + Path legacyConfig = env.configDir().resolve("x-pack").resolve(name); if (Files.exists(legacyConfig)) { deprecationLogger.warn( DeprecationCategory.OTHER, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java index 6aef618288fd2..c2d157e512a21 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java @@ -240,7 +240,7 @@ public Iterator> settings() { public static final List DEFAULT_CIPHERS = JDK12_CIPHERS; - public static final Setting PASSWORD_HASHING_ALGORITHM = defaultStoredHashAlgorithmSetting( + public static final Setting PASSWORD_HASHING_ALGORITHM = defaultStoredPasswordHashAlgorithmSetting( "xpack.security.authc.password_hashing.algorithm", (s) -> { if (XPackSettings.FIPS_MODE_ENABLED.get(s)) { @@ -251,7 +251,7 @@ public Iterator> settings() { } ); - public static final Setting SERVICE_TOKEN_HASHING_ALGORITHM = defaultStoredHashAlgorithmSetting( + public static final Setting SERVICE_TOKEN_HASHING_ALGORITHM = defaultStoredPasswordHashAlgorithmSetting( "xpack.security.authc.service_token_hashing.algorithm", (s) -> Hasher.PBKDF2_STRETCH.name() ); @@ -259,11 +259,17 @@ public Iterator> settings() { /* * Do not allow insecure hashing algorithms to be used for password hashing */ - public static Setting defaultStoredHashAlgorithmSetting(String key, Function defaultHashingAlgorithm) { + public static Setting defaultStoredPasswordHashAlgorithmSetting( + String key, + Function defaultHashingAlgorithm + ) { return new Setting<>(key, defaultHashingAlgorithm, Function.identity(), v -> { - if (Hasher.getAvailableAlgoStoredHash().contains(v.toLowerCase(Locale.ROOT)) == false) { + if (Hasher.getAvailableAlgoStoredPasswordHash().contains(v.toLowerCase(Locale.ROOT)) == false) { throw new IllegalArgumentException( - "Invalid algorithm: " + v + ". Valid values for password hashing are " + Hasher.getAvailableAlgoStoredHash().toString() + "Invalid algorithm: " + + v + + ". Valid values for password hashing are " + + Hasher.getAvailableAlgoStoredPasswordHash().toString() ); } else if (v.regionMatches(true, 0, "pbkdf2", 0, "pbkdf2".length())) { try { @@ -280,7 +286,38 @@ public static Setting defaultStoredHashAlgorithmSetting(String key, Func }, Property.NodeScope); } - public static final List DEFAULT_SUPPORTED_PROTOCOLS = Arrays.asList("TLSv1.3", "TLSv1.2", "TLSv1.1"); + /** + * Similar to {@link #defaultStoredPasswordHashAlgorithmSetting(String, Function)} but for secure, high-entropy tokens so salted secure + * hashing algorithms are allowed, in addition to algorithms that are suitable for password hashing. + */ + public static Setting defaultStoredSecureTokenHashAlgorithmSetting( + String key, + Function defaultHashingAlgorithm + ) { + return new Setting<>(key, defaultHashingAlgorithm, Function.identity(), v -> { + if (Hasher.getAvailableAlgoStoredSecureTokenHash().contains(v.toLowerCase(Locale.ROOT)) == false) { + throw new IllegalArgumentException( + "Invalid algorithm: " + + v + + ". Valid values for secure token hashing are " + + Hasher.getAvailableAlgoStoredSecureTokenHash().toString() + ); + } else if (v.regionMatches(true, 0, "pbkdf2", 0, "pbkdf2".length())) { + try { + SecretKeyFactory.getInstance("PBKDF2withHMACSHA512"); + } catch (NoSuchAlgorithmException e) { + throw new IllegalArgumentException( + "Support for PBKDF2WithHMACSHA512 must be available in order to use any of the PBKDF2 algorithms for the [" + + key + + "] setting.", + e + ); + } + } + }, Property.NodeScope); + } + + public static final List DEFAULT_SUPPORTED_PROTOCOLS = Arrays.asList("TLSv1.3", "TLSv1.2"); public static final SslClientAuthenticationMode CLIENT_AUTH_DEFAULT = SslClientAuthenticationMode.REQUIRED; public static final SslClientAuthenticationMode HTTP_CLIENT_AUTH_DEFAULT = SslClientAuthenticationMode.NONE; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java index fe970bef87145..1993545075979 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java @@ -391,6 +391,22 @@ public T getTaskAndCheckAuthentication( TaskManager taskManager, AsyncExecutionId asyncExecutionId, Class tClass + ) throws IOException { + return getTaskAndCheckAuthentication(taskManager, security, asyncExecutionId, tClass); + } + + /** + * Returns the {@link AsyncTask} if the provided asyncTaskId + * is registered in the task manager, null otherwise. + * + * This method throws a {@link ResourceNotFoundException} if the authenticated user + * is not the creator of the original task. + */ + public static T getTaskAndCheckAuthentication( + TaskManager taskManager, + AsyncSearchSecurity security, + AsyncExecutionId asyncExecutionId, + Class tClass ) throws IOException { T asyncTask = getTask(taskManager, asyncExecutionId, tClass); if (asyncTask == null) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java index d02fb85f46b1e..26ad24d80e14d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java @@ -8,31 +8,29 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.client.internal.OriginSettingClient; -import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParserConfiguration; -import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.ml.utils.MlIndexAndAlias; -import org.elasticsearch.xpack.core.template.IndexTemplateConfig; import java.io.IOException; import java.util.Date; import java.util.Objects; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Supplier; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -43,59 +41,39 @@ public abstract class AbstractAuditor { private static final Logger logger = LogManager.getLogger(AbstractAuditor.class); static final int MAX_BUFFER_SIZE = 1000; - static final TimeValue MASTER_TIMEOUT = TimeValue.timeValueMinutes(1); + protected static final TimeValue MASTER_TIMEOUT = TimeValue.timeValueMinutes(1); private final OriginSettingClient client; private final String nodeName; - private final String auditIndex; - private final String templateName; - private final Supplier templateSupplier; + private final String auditIndexWriteAlias; private final AbstractAuditMessageFactory messageFactory; - private final AtomicBoolean hasLatestTemplate; - - private Queue backlog; private final ClusterService clusterService; - private final AtomicBoolean putTemplateInProgress; - - protected AbstractAuditor( - OriginSettingClient client, - String auditIndex, - IndexTemplateConfig templateConfig, - String nodeName, - AbstractAuditMessageFactory messageFactory, - ClusterService clusterService - ) { + private final IndexNameExpressionResolver indexNameExpressionResolver; + private final AtomicBoolean indexAndAliasCreated; - this(client, auditIndex, templateConfig.getTemplateName(), () -> { - try (var parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, templateConfig.loadBytes())) { - return new TransportPutComposableIndexTemplateAction.Request(templateConfig.getTemplateName()).indexTemplate( - ComposableIndexTemplate.parse(parser) - ).masterNodeTimeout(MASTER_TIMEOUT); - } catch (IOException e) { - throw new ElasticsearchParseException("unable to parse composable template " + templateConfig.getTemplateName(), e); - } - }, nodeName, messageFactory, clusterService); - } + private Queue backlog; + private final AtomicBoolean indexAndAliasCreationInProgress; + private final ExecutorService executorService; protected AbstractAuditor( OriginSettingClient client, - String auditIndex, - String templateName, - Supplier templateSupplier, + String auditIndexWriteAlias, String nodeName, AbstractAuditMessageFactory messageFactory, - ClusterService clusterService + ClusterService clusterService, + IndexNameExpressionResolver indexNameExpressionResolver, + ExecutorService executorService ) { this.client = Objects.requireNonNull(client); - this.auditIndex = Objects.requireNonNull(auditIndex); - this.templateName = Objects.requireNonNull(templateName); - this.templateSupplier = Objects.requireNonNull(templateSupplier); + this.auditIndexWriteAlias = Objects.requireNonNull(auditIndexWriteAlias); this.messageFactory = Objects.requireNonNull(messageFactory); - this.clusterService = Objects.requireNonNull(clusterService); this.nodeName = Objects.requireNonNull(nodeName); + this.clusterService = Objects.requireNonNull(clusterService); + this.indexNameExpressionResolver = Objects.requireNonNull(indexNameExpressionResolver); this.backlog = new ConcurrentLinkedQueue<>(); - this.hasLatestTemplate = new AtomicBoolean(); - this.putTemplateInProgress = new AtomicBoolean(); + this.indexAndAliasCreated = new AtomicBoolean(); + this.indexAndAliasCreationInProgress = new AtomicBoolean(); + this.executorService = executorService; } public void audit(Level level, String resourceId, String message) { @@ -114,6 +92,19 @@ public void error(String resourceId, String message) { audit(Level.ERROR, resourceId, message); } + /** + * Calling reset will cause the auditor to check the required + * index and alias exist and recreate if necessary + */ + public void reset() { + indexAndAliasCreated.set(false); + if (backlog == null) { + // create a new backlog in case documents need + // to be temporarily stored when the new index/alias is created + backlog = new ConcurrentLinkedQueue<>(); + } + } + private static void onIndexResponse(DocWriteResponse response) { logger.trace("Successfully wrote audit message"); } @@ -123,35 +114,24 @@ private static void onIndexFailure(Exception exception) { } protected void indexDoc(ToXContent toXContent) { - if (hasLatestTemplate.get()) { + if (indexAndAliasCreated.get()) { writeDoc(toXContent); return; } - if (MlIndexAndAlias.hasIndexTemplate(clusterService.state(), templateName)) { + // install template & create index with alias + var createListener = ActionListener.wrap(success -> { + indexAndAliasCreationInProgress.set(false); synchronized (this) { - // synchronized so nothing can be added to backlog while this value changes - hasLatestTemplate.set(true); + // synchronized so nothing can be added to backlog while writing it + indexAndAliasCreated.set(true); + writeBacklog(); } - writeDoc(toXContent); - return; - } - ActionListener putTemplateListener = ActionListener.wrap(r -> { - synchronized (this) { - // synchronized so nothing can be added to backlog while this value changes - hasLatestTemplate.set(true); - } - logger.info("Auditor template [{}] successfully installed", templateName); - putTemplateInProgress.set(false); - writeBacklog(); - }, e -> { - logger.warn(Strings.format("Error putting latest template [%s]", templateName), e); - putTemplateInProgress.set(false); - }); + }, e -> { indexAndAliasCreationInProgress.set(false); }); synchronized (this) { - if (hasLatestTemplate.get() == false) { + if (indexAndAliasCreated.get() == false) { // synchronized so that hasLatestTemplate does not change value // between the read and adding to the backlog assert backlog != null; @@ -165,29 +145,31 @@ protected void indexDoc(ToXContent toXContent) { } // stop multiple invocations - if (putTemplateInProgress.compareAndSet(false, true)) { - MlIndexAndAlias.installIndexTemplateIfRequired( - clusterService.state(), - client, - templateSupplier.get(), - putTemplateListener - ); + if (indexAndAliasCreationInProgress.compareAndSet(false, true)) { + installTemplateAndCreateIndex(createListener); } - return; } } - - indexDoc(toXContent); } private void writeDoc(ToXContent toXContent) { - client.index(indexRequest(toXContent), ActionListener.wrap(AbstractAuditor::onIndexResponse, AbstractAuditor::onIndexFailure)); + client.index(indexRequest(toXContent), ActionListener.wrap(AbstractAuditor::onIndexResponse, e -> { + if (e instanceof IndexNotFoundException) { + executorService.execute(() -> { + reset(); + indexDoc(toXContent); + }); + } else { + onIndexFailure(e); + } + })); } private IndexRequest indexRequest(ToXContent toXContent) { - IndexRequest indexRequest = new IndexRequest(auditIndex); + IndexRequest indexRequest = new IndexRequest(auditIndexWriteAlias); indexRequest.source(toXContentBuilder(toXContent)); indexRequest.timeout(TimeValue.timeValueSeconds(5)); + indexRequest.setRequireAlias(true); return indexRequest; } @@ -206,7 +188,7 @@ protected void clearBacklog() { protected void writeBacklog() { assert backlog != null; if (backlog == null) { - logger.error("Message back log has already been written"); + logger.debug("Message back log has already been written"); return; } @@ -221,7 +203,7 @@ protected void writeBacklog() { if (bulkItemResponses.hasFailures()) { logger.warn("Failures bulk indexing the message back log: {}", bulkItemResponses.buildFailureMessage()); } else { - logger.trace("Successfully wrote audit message backlog after upgrading template"); + logger.trace("Successfully wrote audit message backlog"); } backlog = null; }, AbstractAuditor::onIndexFailure)); @@ -231,4 +213,32 @@ protected void writeBacklog() { int backLogSize() { return backlog.size(); } + + private void installTemplateAndCreateIndex(ActionListener listener) { + SubscribableListener.newForked(l -> { + MlIndexAndAlias.installIndexTemplateIfRequired(clusterService.state(), client, templateVersion(), putTemplateRequest(), l); + }).andThen((l, success) -> { + var indexDetails = indexDetails(); + MlIndexAndAlias.createIndexAndAliasIfNecessary( + client, + clusterService.state(), + indexNameExpressionResolver, + indexDetails.indexPrefix(), + indexDetails.indexVersion(), + auditIndexWriteAlias, + MASTER_TIMEOUT, + ActiveShardCount.DEFAULT, + l + ); + + }).addListener(listener); + } + + protected abstract TransportPutComposableIndexTemplateAction.Request putTemplateRequest(); + + protected abstract int templateVersion(); + + protected abstract IndexDetails indexDetails(); + + public record IndexDetails(String indexPrefix, String indexVersion) {}; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java index 48fb8ebdc577d..cba1df9b79c76 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java @@ -50,7 +50,6 @@ public static Predicate getReindexRequiredPredicate(Metadata metadata, bo public static boolean reindexRequired(IndexMetadata indexMetadata, boolean filterToBlockedStatus) { return creationVersionBeforeMinimumWritableVersion(indexMetadata) && isNotSearchableSnapshot(indexMetadata) - && isNotClosed(indexMetadata) && matchBlockedStatus(indexMetadata, filterToBlockedStatus); } @@ -62,10 +61,6 @@ private static boolean creationVersionBeforeMinimumWritableVersion(IndexMetadata return metadata.getCreationVersion().before(MINIMUM_WRITEABLE_VERSION_AFTER_UPGRADE); } - private static boolean isNotClosed(IndexMetadata indexMetadata) { - return indexMetadata.getState().equals(IndexMetadata.State.CLOSE) == false; - } - private static boolean matchBlockedStatus(IndexMetadata indexMetadata, boolean filterToBlockedStatus) { return MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.get(indexMetadata.getSettings()) == filterToBlockedStatus; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java index e433093cdfb9a..6bfa7e5bdb2bb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java @@ -36,6 +36,8 @@ public final class EnrichMetadata extends AbstractNamedDiffable static final ParseField POLICIES = new ParseField("policies"); + public static final EnrichMetadata EMPTY = new EnrichMetadata(Collections.emptyMap()); + @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "enrich_metadata", diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java index 36322ed6c6cbd..79644852eca32 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java @@ -10,12 +10,16 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.action.support.local.LocalClusterStateRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV10; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.ToXContentObject; @@ -23,6 +27,7 @@ import java.io.IOException; import java.util.List; +import java.util.Map; import java.util.Objects; public class EnrichStatsAction extends ActionType { @@ -34,20 +39,31 @@ private EnrichStatsAction() { super(NAME); } - public static class Request extends MasterNodeRequest { + public static class Request extends LocalClusterStateRequest { public Request(TimeValue masterNodeTimeout) { super(masterNodeTimeout); } + /** + * NB prior to 9.0 this was a TransportMasterNodeAction so for BwC we must remain able to read these requests until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) public Request(StreamInput in) throws IOException { - super(in); + // This request extended MasterNodeRequest instead of MasterNodeReadRequest, meaning that it didn't serialize the `local` field. + super(in, false); } @Override public ActionRequestValidationException validate() { return null; } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, "", parentTaskId, headers); + } } public static class Response extends ActionResponse implements ToXContentObject { @@ -62,13 +78,6 @@ public Response(List executingPolicies, List this.cacheStats = cacheStats; } - public Response(StreamInput in) throws IOException { - super(in); - executingPolicies = in.readCollectionAsList(ExecutingPolicy::new); - coordinatorStats = in.readCollectionAsList(CoordinatorStats::new); - cacheStats = in.readCollectionAsList(CacheStats::new); - } - public List getExecutingPolicies() { return executingPolicies; } @@ -81,6 +90,11 @@ public List getCacheStats() { return cacheStats; } + /** + * NB prior to 9.0 this was a TransportMasterNodeAction so for BwC we must remain able to write these responses until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) @Override public void writeTo(StreamOutput out) throws IOException { out.writeCollection(executingPolicies); @@ -167,10 +181,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public record ExecutingPolicy(String name, TaskInfo taskInfo) implements Writeable, ToXContentFragment { - ExecutingPolicy(StreamInput in) throws IOException { - this(in.readString(), TaskInfo.from(in)); - } - + /** + * NB prior to 9.0 this was a TransportMasterNodeAction so for BwC we must remain able to write these responses until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(name); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java index 7f138dec7ee23..ae02dc781e0dd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java @@ -9,10 +9,14 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.action.support.local.LocalClusterStateRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV10; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; @@ -33,7 +37,7 @@ private GetEnrichPolicyAction() { super(NAME); } - public static class Request extends MasterNodeReadRequest { + public static class Request extends LocalClusterStateRequest { private final List names; @@ -42,6 +46,11 @@ public Request(TimeValue masterNodeTimeout, String... names) { this.names = List.of(names); } + /** + * NB prior to 9.0 this was a TransportMasterNodeReadAction so for BwC we must remain able to read these requests until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) public Request(StreamInput in) throws IOException { super(in); this.names = in.readStringCollectionAsImmutableList(); @@ -52,14 +61,13 @@ public ActionRequestValidationException validate() { return null; } - public List getNames() { - return names; + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, "", parentTaskId, headers); } - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeStringCollection(names); + public List getNames() { + return names; } @Override @@ -89,10 +97,11 @@ public Response(Map policies) { .collect(Collectors.toList()); } - public Response(StreamInput in) throws IOException { - policies = in.readCollectionAsList(EnrichPolicy.NamedPolicy::new); - } - + /** + * NB prior to 9.0 this was a TransportMasterNodeReadAction so for BwC we must remain able to write these responses until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) @Override public void writeTo(StreamOutput out) throws IOException { out.writeCollection(policies); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/frozen/action/FreezeIndexAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/frozen/action/FreezeIndexAction.java deleted file mode 100644 index d4d76200c25be..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/frozen/action/FreezeIndexAction.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.core.frozen.action; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.protocol.xpack.frozen.FreezeResponse; - -public class FreezeIndexAction extends ActionType { - - public static final FreezeIndexAction INSTANCE = new FreezeIndexAction(); - public static final String NAME = "indices:admin/freeze"; - - private FreezeIndexAction() { - super(NAME); - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/BaseInferenceActionRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/BaseInferenceActionRequest.java index e426574c52ce6..855b0bdebb417 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/BaseInferenceActionRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/BaseInferenceActionRequest.java @@ -7,20 +7,35 @@ package org.elasticsearch.xpack.core.inference.action; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.inference.TaskType; import java.io.IOException; +/** + * Base class for inference action requests. Tracks request routing state to prevent potential routing loops + * and supports both streaming and non-streaming inference operations. + */ public abstract class BaseInferenceActionRequest extends ActionRequest { + private boolean hasBeenRerouted; + public BaseInferenceActionRequest() { super(); } public BaseInferenceActionRequest(StreamInput in) throws IOException { super(in); + if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_REQUEST_ADAPTIVE_RATE_LIMITING)) { + this.hasBeenRerouted = in.readBoolean(); + } else { + // For backwards compatibility, we treat all inference requests coming from ES nodes having + // a version pre-node-local-rate-limiting as already rerouted to maintain pre-node-local-rate-limiting behavior. + this.hasBeenRerouted = true; + } } public abstract boolean isStreaming(); @@ -28,4 +43,20 @@ public BaseInferenceActionRequest(StreamInput in) throws IOException { public abstract TaskType getTaskType(); public abstract String getInferenceEntityId(); + + public void setHasBeenRerouted(boolean hasBeenRerouted) { + this.hasBeenRerouted = hasBeenRerouted; + } + + public boolean hasBeenRerouted() { + return hasBeenRerouted; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_REQUEST_ADAPTIVE_RATE_LIMITING)) { + out.writeBoolean(hasBeenRerouted); + } + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java index f88909ba4208e..f2b2c563d7519 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java @@ -47,7 +47,7 @@ public class InferenceAction extends ActionType { public static final InferenceAction INSTANCE = new InferenceAction(); - public static final String NAME = "cluster:monitor/xpack/inference"; + public static final String NAME = "cluster:internal/xpack/inference"; public InferenceAction() { super(NAME); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceActionProxy.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceActionProxy.java new file mode 100644 index 0000000000000..68cd39f26b456 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceActionProxy.java @@ -0,0 +1,133 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.Objects; + +/** + * This action is used when making a REST request to the inference API. The transport handler + * will then look at the task type in the params (or retrieve it from the persisted model if it wasn't + * included in the params) to determine where this request should be routed. If the task type is chat completion + * then it will be routed to the unified chat completion handler by creating the {@link UnifiedCompletionAction}. + * If not, it will be passed along to {@link InferenceAction}. + */ +public class InferenceActionProxy extends ActionType { + public static final InferenceActionProxy INSTANCE = new InferenceActionProxy(); + public static final String NAME = "cluster:monitor/xpack/inference/post"; + + public InferenceActionProxy() { + super(NAME); + } + + public static class Request extends ActionRequest { + + private final TaskType taskType; + private final String inferenceEntityId; + private final BytesReference content; + private final XContentType contentType; + private final TimeValue timeout; + private final boolean stream; + + public Request( + TaskType taskType, + String inferenceEntityId, + BytesReference content, + XContentType contentType, + TimeValue timeout, + boolean stream + ) { + this.taskType = taskType; + this.inferenceEntityId = inferenceEntityId; + this.content = content; + this.contentType = contentType; + this.timeout = timeout; + this.stream = stream; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.taskType = TaskType.fromStream(in); + this.inferenceEntityId = in.readString(); + this.content = in.readBytesReference(); + this.contentType = in.readEnum(XContentType.class); + this.timeout = in.readTimeValue(); + + // streaming is not supported yet for transport traffic + this.stream = false; + } + + public TaskType getTaskType() { + return taskType; + } + + public String getInferenceEntityId() { + return inferenceEntityId; + } + + public BytesReference getContent() { + return content; + } + + public XContentType getContentType() { + return contentType; + } + + public TimeValue getTimeout() { + return timeout; + } + + public boolean isStreaming() { + return stream; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(inferenceEntityId); + taskType.writeTo(out); + out.writeBytesReference(content); + XContentHelper.writeTo(out, contentType); + out.writeTimeValue(timeout); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return taskType == request.taskType + && Objects.equals(inferenceEntityId, request.inferenceEntityId) + && Objects.equals(content, request.content) + && contentType == request.contentType + && timeout == request.timeout + && stream == request.stream; + } + + @Override + public int hashCode() { + return Objects.hash(taskType, inferenceEntityId, content, contentType, timeout, stream); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionAction.java index f5c852a0450ae..43c84ad914c2a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionAction.java @@ -21,7 +21,7 @@ public class UnifiedCompletionAction extends ActionType { public static final UnifiedCompletionAction INSTANCE = new UnifiedCompletionAction(); - public static final String NAME = "cluster:monitor/xpack/inference/unified"; + public static final String NAME = "cluster:internal/xpack/inference/unified"; public UnifiedCompletionAction() { super(NAME); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UpdateInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UpdateInferenceModelAction.java index cc59ae890467b..505d058394db8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UpdateInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UpdateInferenceModelAction.java @@ -70,8 +70,8 @@ public Request(String inferenceEntityId, BytesReference content, XContentType co public Request(StreamInput in) throws IOException { super(in); this.inferenceEntityId = in.readString(); - this.content = in.readBytesReference(); this.taskType = TaskType.fromStream(in); + this.content = in.readBytesReference(); this.contentType = in.readEnum(XContentType.class); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceByteEmbedding.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceByteEmbedding.java new file mode 100644 index 0000000000000..7d7176a9a5a51 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceByteEmbedding.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * this file was contributed to by a generative AI + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +public record InferenceByteEmbedding(byte[] values) implements Writeable, ToXContentObject, EmbeddingInt { + public static final String EMBEDDING = "embedding"; + + public InferenceByteEmbedding(StreamInput in) throws IOException { + this(in.readByteArray()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeByteArray(values); + } + + public static InferenceByteEmbedding of(List embeddingValuesList) { + byte[] embeddingValues = new byte[embeddingValuesList.size()]; + for (int i = 0; i < embeddingValuesList.size(); i++) { + embeddingValues[i] = embeddingValuesList.get(i); + } + return new InferenceByteEmbedding(embeddingValues); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.startArray(EMBEDDING); + for (byte value : values) { + builder.value(value); + } + builder.endArray(); + + builder.endObject(); + return builder; + } + + @Override + public String toString() { + return Strings.toString(this); + } + + float[] toFloatArray() { + float[] floatArray = new float[values.length]; + for (int i = 0; i < values.length; i++) { + floatArray[i] = ((Byte) values[i]).floatValue(); + } + return floatArray; + } + + double[] toDoubleArray() { + double[] doubleArray = new double[values.length]; + for (int i = 0; i < values.length; i++) { + doubleArray[i] = ((Byte) values[i]).doubleValue(); + } + return doubleArray; + } + + @Override + public int getSize() { + return values().length; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InferenceByteEmbedding embedding = (InferenceByteEmbedding) o; + return Arrays.equals(values, embedding.values); + } + + @Override + public int hashCode() { + return Arrays.hashCode(values); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingBitResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingBitResults.java new file mode 100644 index 0000000000000..887c07558ab71 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingBitResults.java @@ -0,0 +1,109 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * this file was contributed to by a generative AI + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Writes a text embedding result in the follow json format + * { + * "text_embedding_bytes": [ + * { + * "embedding": [ + * 23 + * ] + * }, + * { + * "embedding": [ + * -23 + * ] + * } + * ] + * } + */ +public record InferenceTextEmbeddingBitResults(List embeddings) implements InferenceServiceResults, TextEmbedding { + public static final String NAME = "text_embedding_service_bit_results"; + public static final String TEXT_EMBEDDING_BITS = "text_embedding_bits"; + + public InferenceTextEmbeddingBitResults(StreamInput in) throws IOException { + this(in.readCollectionAsList(InferenceByteEmbedding::new)); + } + + @Override + public int getFirstEmbeddingSize() { + return TextEmbeddingUtils.getFirstEmbeddingSize(new ArrayList<>(embeddings)); + } + + @Override + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContentHelper.array(TEXT_EMBEDDING_BITS, embeddings.iterator()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(embeddings); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public List transformToCoordinationFormat() { + return embeddings.stream() + .map(embedding -> new MlTextEmbeddingResults(TEXT_EMBEDDING_BITS, embedding.toDoubleArray(), false)) + .toList(); + } + + @Override + @SuppressWarnings("deprecation") + public List transformToLegacyFormat() { + var legacyEmbedding = new LegacyTextEmbeddingResults( + embeddings.stream().map(embedding -> new LegacyTextEmbeddingResults.Embedding(embedding.toFloatArray())).toList() + ); + + return List.of(legacyEmbedding); + } + + public Map asMap() { + Map map = new LinkedHashMap<>(); + map.put(TEXT_EMBEDDING_BITS, embeddings); + + return map; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InferenceTextEmbeddingBitResults that = (InferenceTextEmbeddingBitResults) o; + return Objects.equals(embeddings, that.embeddings); + } + + @Override + public int hashCode() { + return Objects.hash(embeddings); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java index 16dca7b04d526..1ae54220508c5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java @@ -9,21 +9,16 @@ package org.elasticsearch.xpack.core.inference.results; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; @@ -33,7 +28,7 @@ /** * Writes a text embedding result in the follow json format * { - * "text_embedding": [ + * "text_embedding_bytes": [ * { * "embedding": [ * 23 @@ -111,78 +106,4 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(embeddings); } - - public record InferenceByteEmbedding(byte[] values) implements Writeable, ToXContentObject, EmbeddingInt { - public static final String EMBEDDING = "embedding"; - - public InferenceByteEmbedding(StreamInput in) throws IOException { - this(in.readByteArray()); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeByteArray(values); - } - - public static InferenceByteEmbedding of(List embeddingValuesList) { - byte[] embeddingValues = new byte[embeddingValuesList.size()]; - for (int i = 0; i < embeddingValuesList.size(); i++) { - embeddingValues[i] = embeddingValuesList.get(i); - } - return new InferenceByteEmbedding(embeddingValues); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - builder.startArray(EMBEDDING); - for (byte value : values) { - builder.value(value); - } - builder.endArray(); - - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } - - private float[] toFloatArray() { - float[] floatArray = new float[values.length]; - for (int i = 0; i < values.length; i++) { - floatArray[i] = ((Byte) values[i]).floatValue(); - } - return floatArray; - } - - private double[] toDoubleArray() { - double[] doubleArray = new double[values.length]; - for (int i = 0; i < values.length; i++) { - doubleArray[i] = ((Byte) values[i]).floatValue(); - } - return doubleArray; - } - - @Override - public int getSize() { - return values().length; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - InferenceByteEmbedding embedding = (InferenceByteEmbedding) o; - return Arrays.equals(values, embedding.values); - } - - @Override - public int hashCode() { - return Arrays.hashCode(values); - } - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/UnifiedChatCompletionException.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/UnifiedChatCompletionException.java new file mode 100644 index 0000000000000..f2844e6534a94 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/UnifiedChatCompletionException.java @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContent; + +import java.util.Iterator; +import java.util.Locale; +import java.util.Objects; + +import static java.util.Collections.emptyIterator; +import static org.elasticsearch.ExceptionsHelper.maybeError; +import static org.elasticsearch.common.collect.Iterators.concat; +import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.endObject; +import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.startObject; + +public class UnifiedChatCompletionException extends XContentFormattedException { + + private static final Logger log = LogManager.getLogger(UnifiedChatCompletionException.class); + private final String message; + private final String type; + @Nullable + private final String code; + @Nullable + private final String param; + + public UnifiedChatCompletionException(RestStatus status, String message, String type, @Nullable String code) { + this(status, message, type, code, null); + } + + public UnifiedChatCompletionException(RestStatus status, String message, String type, @Nullable String code, @Nullable String param) { + super(message, status); + this.message = Objects.requireNonNull(message); + this.type = Objects.requireNonNull(type); + this.code = code; + this.param = param; + } + + public UnifiedChatCompletionException( + Throwable cause, + RestStatus status, + String message, + String type, + @Nullable String code, + @Nullable String param + ) { + super(message, cause, status); + this.message = Objects.requireNonNull(message); + this.type = Objects.requireNonNull(type); + this.code = code; + this.param = param; + } + + @Override + public Iterator toXContentChunked(Params params) { + return concat( + startObject(), + startObject("error"), + optionalField("code", code), + field("message", message), + optionalField("param", param), + field("type", type), + endObject(), + endObject() + ); + } + + private static Iterator field(String key, String value) { + return ChunkedToXContentHelper.chunk((b, p) -> b.field(key, value)); + } + + private static Iterator optionalField(String key, String value) { + return value != null ? ChunkedToXContentHelper.chunk((b, p) -> b.field(key, value)) : emptyIterator(); + } + + public static UnifiedChatCompletionException fromThrowable(Throwable t) { + if (ExceptionsHelper.unwrapCause(t) instanceof UnifiedChatCompletionException e) { + return e; + } else { + return maybeError(t).map(error -> { + // we should never be throwing Error, but just in case we are, rethrow it on another thread so the JVM can handle it and + // return a vague error to the user so that they at least see something went wrong but don't leak JVM details to users + ExceptionsHelper.maybeDieOnAnotherThread(error); + var e = new RuntimeException("Fatal error while streaming response. Please retry the request."); + log.error(e.getMessage(), t); + return new UnifiedChatCompletionException( + RestStatus.INTERNAL_SERVER_ERROR, + e.getMessage(), + getExceptionName(e), + RestStatus.INTERNAL_SERVER_ERROR.name().toLowerCase(Locale.ROOT) + ); + }).orElseGet(() -> { + log.atDebug().withThrowable(t).log("UnifiedChatCompletionException stack trace for debugging purposes."); + var status = ExceptionsHelper.status(t); + return new UnifiedChatCompletionException( + t, + status, + t.getMessage(), + getExceptionName(t), + status.name().toLowerCase(Locale.ROOT), + null + ); + }); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/XContentFormattedException.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/XContentFormattedException.java new file mode 100644 index 0000000000000..799953d452f0d --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/XContentFormattedException.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.util.Iterator; +import java.util.Objects; + +/** + * Similar to {@link org.elasticsearch.ElasticsearchWrapperException}, this will wrap an Exception to generate an xContent using + * {@link ElasticsearchException#generateFailureXContent(XContentBuilder, Params, Exception, boolean)}. + * Extends {@link ElasticsearchException} to provide REST handlers the {@link #status()} method in order to set the response header. + */ +public class XContentFormattedException extends ElasticsearchException implements ChunkedToXContent { + + public static final String X_CONTENT_PARAM = "detailedErrorsEnabled"; + private final RestStatus status; + private final Throwable cause; + + public XContentFormattedException(String message, RestStatus status) { + super(message); + this.status = Objects.requireNonNull(status); + this.cause = null; + } + + public XContentFormattedException(Throwable cause, RestStatus status) { + super(cause); + this.status = Objects.requireNonNull(status); + this.cause = cause; + } + + public XContentFormattedException(String message, Throwable cause, RestStatus status) { + super(message, cause); + this.status = Objects.requireNonNull(status); + this.cause = cause; + } + + @Override + public RestStatus status() { + return status; + } + + @Override + public Iterator toXContentChunked(Params params) { + return Iterators.concat( + ChunkedToXContentHelper.startObject(), + Iterators.single( + (b, p) -> ElasticsearchException.generateFailureXContent( + b, + p, + cause instanceof Exception e ? e : this, + params.paramAsBoolean(X_CONTENT_PARAM, false) + ) + ), + Iterators.single((b, p) -> b.field("status", status.getStatus())), + ChunkedToXContentHelper.endObject() + ); + } + + @Override + public Iterator toXContentChunked(RestApiVersion restApiVersion, Params params) { + return ChunkedToXContent.super.toXContentChunked(restApiVersion, params); + } + + @Override + public Iterator toXContentChunkedV8(Params params) { + return ChunkedToXContent.super.toXContentChunkedV8(params); + } + + @Override + public boolean isFragment() { + return super.isFragment(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java index 7a098d432f35b..1ab4906ed0d06 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java @@ -38,6 +38,10 @@ public static String jobResultsIndexPrefix() { return AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX; } + public static String jobResultsIndexPattern() { + return AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*"; + } + /** * The name of the alias pointing to the indices where the job's results are stored * @param jobId Job Id @@ -47,15 +51,26 @@ public static String jobResultsAliasedName(String jobId) { return AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + jobId; } + /** + * Extract the job Id from the alias name. + * If not an results index alias null is returned + * @param jobResultsAliasedName The alias + * @return The job Id + */ + public static String jobIdFromAlias(String jobResultsAliasedName) { + if (jobResultsAliasedName.length() < AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX.length()) { + return null; + } + return jobResultsAliasedName.substring(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX.length()); + } + /** * The name of the alias pointing to the write index for a job * @param jobId Job Id * @return The write alias */ public static String resultsWriteAlias(String jobId) { - // ".write" rather than simply "write" to avoid the danger of clashing - // with the read alias of a job whose name begins with "write-" - return AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + ".write-" + jobId; + return AnomalyDetectorsIndexFields.RESULTS_INDEX_WRITE_PREFIX + jobId; } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndexFields.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndexFields.java index 504a4b756c979..2a0fff86ba494 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndexFields.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndexFields.java @@ -11,6 +11,9 @@ public final class AnomalyDetectorsIndexFields { public static final String STATE_INDEX_PREFIX = ".ml-state"; public static final String RESULTS_INDEX_PREFIX = ".ml-anomalies-"; + // ".write" rather than simply "write" to avoid the danger of clashing + // with the read alias of a job whose name begins with "write-" + public static final String RESULTS_INDEX_WRITE_PREFIX = RESULTS_INDEX_PREFIX + ".write-"; public static final String RESULTS_INDEX_DEFAULT = "shared"; private AnomalyDetectorsIndexFields() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/NotificationsIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/NotificationsIndex.java index 1f5c15a46fc4e..059c217cbf1fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/NotificationsIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/NotificationsIndex.java @@ -13,11 +13,15 @@ public final class NotificationsIndex { - public static final String NOTIFICATIONS_INDEX = ".ml-notifications-000002"; + public static final String NOTIFICATIONS_INDEX_PREFIX = ".ml-notifications-"; + public static final String NOTIFICATIONS_INDEX_VERSION = "000002"; + public static final String NOTIFICATIONS_INDEX = NOTIFICATIONS_INDEX_PREFIX + NOTIFICATIONS_INDEX_VERSION; + public static final String NOTIFICATIONS_INDEX_WRITE_ALIAS = ".ml-notifications-write"; private static final String RESOURCE_PATH = "/ml/"; private static final String MAPPINGS_VERSION_VARIABLE = "xpack.ml.version"; public static final int NOTIFICATIONS_INDEX_MAPPINGS_VERSION = 1; + public static final int NOTIFICATIONS_INDEX_TEMPLATE_VERSION = 1; private NotificationsIndex() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java index e85acc159059e..22f17428ac141 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java @@ -31,6 +31,8 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.json.JsonXContent; @@ -64,27 +66,24 @@ public final class MlIndexAndAlias { */ public static final String BWC_MAPPINGS_VERSION = "8.11.0"; - private static final Logger logger = LogManager.getLogger(MlIndexAndAlias.class); + public static final String FIRST_INDEX_SIX_DIGIT_SUFFIX = "-000001"; - static final Comparator INDEX_NAME_COMPARATOR = new Comparator<>() { - - private final Predicate HAS_SIX_DIGIT_SUFFIX = Pattern.compile("\\d{6}").asMatchPredicate(); - - @Override - public int compare(String index1, String index2) { - String[] index1Parts = index1.split("-"); - String index1Suffix = index1Parts[index1Parts.length - 1]; - boolean index1HasSixDigitsSuffix = HAS_SIX_DIGIT_SUFFIX.test(index1Suffix); - String[] index2Parts = index2.split("-"); - String index2Suffix = index2Parts[index2Parts.length - 1]; - boolean index2HasSixDigitsSuffix = HAS_SIX_DIGIT_SUFFIX.test(index2Suffix); - if (index1HasSixDigitsSuffix && index2HasSixDigitsSuffix) { - return index1Suffix.compareTo(index2Suffix); - } else if (index1HasSixDigitsSuffix != index2HasSixDigitsSuffix) { - return Boolean.compare(index1HasSixDigitsSuffix, index2HasSixDigitsSuffix); - } else { - return index1.compareTo(index2); - } + private static final Logger logger = LogManager.getLogger(MlIndexAndAlias.class); + private static final Predicate HAS_SIX_DIGIT_SUFFIX = Pattern.compile("\\d{6}").asMatchPredicate(); + + static final Comparator INDEX_NAME_COMPARATOR = (index1, index2) -> { + String[] index1Parts = index1.split("-"); + String index1Suffix = index1Parts[index1Parts.length - 1]; + boolean index1HasSixDigitsSuffix = HAS_SIX_DIGIT_SUFFIX.test(index1Suffix); + String[] index2Parts = index2.split("-"); + String index2Suffix = index2Parts[index2Parts.length - 1]; + boolean index2HasSixDigitsSuffix = HAS_SIX_DIGIT_SUFFIX.test(index2Suffix); + if (index1HasSixDigitsSuffix && index2HasSixDigitsSuffix) { + return index1Suffix.compareTo(index2Suffix); + } else if (index1HasSixDigitsSuffix != index2HasSixDigitsSuffix) { + return Boolean.compare(index1HasSixDigitsSuffix, index2HasSixDigitsSuffix); + } else { + return index1.compareTo(index2); } }; @@ -108,6 +107,34 @@ public static void createIndexAndAliasIfNecessary( ActiveShardCount waitForShardCount, ActionListener finalListener ) { + createIndexAndAliasIfNecessary( + client, + clusterState, + resolver, + indexPatternPrefix, + FIRST_INDEX_SIX_DIGIT_SUFFIX, + alias, + masterNodeTimeout, + waitForShardCount, + finalListener + ); + } + + /** + * Same as createIndexAndAliasIfNecessary but with the first concrete + * index number specified. + */ + public static void createIndexAndAliasIfNecessary( + Client client, + ClusterState clusterState, + IndexNameExpressionResolver resolver, + String indexPatternPrefix, + String indexNumber, + String alias, + TimeValue masterNodeTimeout, + ActiveShardCount waitForShardCount, + ActionListener finalListener + ) { final ActionListener loggingListener = ActionListener.wrap(finalListener::onResponse, e -> { logger.error(() -> format("Failed to create alias and index with pattern [%s] and alias [%s]", indexPatternPrefix, alias), e); @@ -126,7 +153,7 @@ public static void createIndexAndAliasIfNecessary( String legacyIndexWithoutSuffix = indexPatternPrefix; String indexPattern = indexPatternPrefix + "*"; // The initial index name must be suitable for rollover functionality. - String firstConcreteIndex = indexPatternPrefix + "-000001"; + String firstConcreteIndex = indexPatternPrefix + indexNumber; String[] concreteIndexNames = resolver.concreteIndexNames(clusterState, IndicesOptions.lenientExpandHidden(), indexPattern); Optional indexPointedByCurrentWriteAlias = clusterState.getMetadata().hasAlias(alias) ? clusterState.getMetadata().getIndicesLookup().get(alias).getIndices().stream().map(Index::getName).findFirst() @@ -331,7 +358,7 @@ public static void installIndexTemplateIfRequired( String templateName = templateConfig.getTemplateName(); // The check for existence of the template is against the cluster state, so very cheap - if (hasIndexTemplate(clusterState, templateName)) { + if (hasIndexTemplate(clusterState, templateName, templateConfig.getVersion())) { listener.onResponse(true); return; } @@ -345,7 +372,7 @@ public static void installIndexTemplateIfRequired( throw new ElasticsearchParseException("unable to parse composable template " + templateConfig.getTemplateName(), e); } - installIndexTemplateIfRequired(clusterState, client, request, listener); + installIndexTemplateIfRequired(clusterState, client, templateConfig.getVersion(), request, listener); } /** @@ -361,11 +388,12 @@ public static void installIndexTemplateIfRequired( public static void installIndexTemplateIfRequired( ClusterState clusterState, Client client, + int templateVersion, TransportPutComposableIndexTemplateAction.Request templateRequest, ActionListener listener ) { // The check for existence of the template is against the cluster state, so very cheap - if (hasIndexTemplate(clusterState, templateRequest.name())) { + if (hasIndexTemplate(clusterState, templateRequest.name(), templateVersion)) { listener.onResponse(true); return; } @@ -380,8 +408,13 @@ public static void installIndexTemplateIfRequired( executeAsyncWithOrigin(client, ML_ORIGIN, TransportPutComposableIndexTemplateAction.TYPE, templateRequest, innerListener); } - public static boolean hasIndexTemplate(ClusterState state, String templateName) { - return state.getMetadata().templatesV2().containsKey(templateName); + public static boolean hasIndexTemplate(ClusterState state, String templateName, long version) { + var template = state.getMetadata().templatesV2().get(templateName); + return template != null && Long.valueOf(version).equals(template.version()); + } + + public static boolean has6DigitSuffix(String indexName) { + return HAS_SIX_DIGIT_SUFFIX.test(indexName); } /** @@ -395,4 +428,11 @@ public static String latestIndex(String[] concreteIndices) { ? concreteIndices[0] : Arrays.stream(concreteIndices).max(MlIndexAndAlias.INDEX_NAME_COMPARATOR).get(); } + + /** + * True if the version is read *and* write compatible not just read only compatible + */ + public static boolean indexIsReadWriteCompatibleInV9(IndexVersion version) { + return version.onOrAfter(IndexVersions.V_8_0_0); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserRequestBuilder.java index 7ae915d2db791..81f6b7489d8c8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserRequestBuilder.java @@ -89,11 +89,11 @@ public PutUserRequestBuilder email(String email) { public PutUserRequestBuilder passwordHash(char[] passwordHash, Hasher configuredHasher) { final Hasher resolvedHasher = Hasher.resolveFromHash(passwordHash); if (resolvedHasher.equals(configuredHasher) == false - && Hasher.getAvailableAlgoStoredHash().contains(resolvedHasher.name().toLowerCase(Locale.ROOT)) == false) { + && Hasher.getAvailableAlgoStoredPasswordHash().contains(resolvedHasher.name().toLowerCase(Locale.ROOT)) == false) { throw new IllegalArgumentException( "The provided password hash is not a hash or it could not be resolved to a supported hash algorithm. " + "The supported password hash algorithms are " - + Hasher.getAvailableAlgoStoredHash().toString() + + Hasher.getAvailableAlgoStoredPasswordHash().toString() ); } if (request.passwordHash() != null) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java index bf24919a39495..7e4780bf4f5b3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java @@ -735,7 +735,7 @@ private static boolean verifyBcryptHash(SecureString text, char[] hash) { * an instance of the appropriate {@link Hasher} by using {@link #resolve(String) resolve()} */ @SuppressForbidden(reason = "This is the only allowed way to get available values") - public static List getAvailableAlgoStoredHash() { + public static List getAvailableAlgoStoredPasswordHash() { return Arrays.stream(Hasher.values()) .map(Hasher::name) .map(name -> name.toLowerCase(Locale.ROOT)) @@ -743,6 +743,20 @@ public static List getAvailableAlgoStoredHash() { .collect(Collectors.toList()); } + /** + * Returns a list of lower case String identifiers for the Hashing algorithm and parameter + * combinations that can be used for secure token hashing. The identifiers can be used to get + * an instance of the appropriate {@link Hasher} by using {@link #resolve(String) resolve()} + */ + @SuppressForbidden(reason = "This is the only allowed way to get available values") + public static List getAvailableAlgoStoredSecureTokenHash() { + return Arrays.stream(Hasher.values()) + .map(Hasher::name) + .map(name -> name.toLowerCase(Locale.ROOT)) + .filter(name -> (name.startsWith("pbkdf2") || name.startsWith("bcrypt") || name.equals("ssha256"))) + .collect(Collectors.toList()); + } + /** * Returns a list of lower case String identifiers for the Hashing algorithm and parameter * combinations that can be used for password hashing in the cache. The identifiers can be used to get diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java index 68baab4469e48..98c12930e188e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.security.authz.privilege; +import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.action.search.TransportSearchShardsAction; import org.elasticsearch.index.seqno.RetentionLeaseActions; import org.elasticsearch.index.seqno.RetentionLeaseBackgroundSyncAction; @@ -38,12 +39,13 @@ public final class SystemPrivilege extends Privilege { RetentionLeaseActions.ADD.name() + "*", // needed for CCR to add retention leases RetentionLeaseActions.REMOVE.name() + "*", // needed for CCR to remove retention leases RetentionLeaseActions.RENEW.name() + "*", // needed for CCR to renew retention leases - "indices:admin/settings/update", // needed for DiskThresholdMonitor.markIndicesReadOnly + "indices:admin/settings/update", // needed for: DiskThresholdMonitor.markIndicesReadOnly, SystemIndexMigrator CompletionPersistentTaskAction.INSTANCE.name(), // needed for ShardFollowTaskCleaner "indices:data/write/*", // needed for SystemIndexMigrator "indices:data/read/*", // needed for SystemIndexMigrator "indices:admin/refresh", // needed for SystemIndexMigrator "indices:admin/aliases", // needed for SystemIndexMigrator + TransportAddIndexBlockAction.TYPE.name() + "*", // needed for SystemIndexMigrator TransportSearchShardsAction.TYPE.name(), // added so this API can be called with the system user by other APIs ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION.name() // needed for Security plugin reload of remote cluster credentials ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index fcd1ba54a8de2..ed41c77c40f6e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -120,6 +120,11 @@ static RoleDescriptor kibanaSystem(String name) { .indices(".ml-annotations*", ".ml-notifications*") .privileges("read", "write") .build(), + // And the reindexed indices from v7 + RoleDescriptor.IndicesPrivileges.builder() + .indices(".reindexed-v8-ml-annotations*", ".reindexed-v8-ml-notifications*") + .privileges("read", "write") + .build(), // APM agent configuration - system index defined in KibanaPlugin RoleDescriptor.IndicesPrivileges.builder() @@ -224,6 +229,11 @@ static RoleDescriptor kibanaSystem(String name) { RoleDescriptor.IndicesPrivileges.builder().indices("metrics-fleet_server*").privileges("all").build(), // Fleet reads output health from this index pattern RoleDescriptor.IndicesPrivileges.builder().indices("logs-fleet_server*").privileges("read", "delete_index").build(), + // Fleet creates and writes this index for sync integrations feature + RoleDescriptor.IndicesPrivileges.builder() + .indices("fleet-synced-integrations") + .privileges("create_index", "manage", "read", "write") + .build(), // Legacy "Alerts as data" used in Security Solution. // Kibana user creates these indices; reads / writes to them. RoleDescriptor.IndicesPrivileges.builder() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java index 52f077b658d02..a704b350dba4b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java @@ -9,9 +9,11 @@ import org.elasticsearch.action.admin.cluster.shards.TransportClusterSearchShardsAction; import org.elasticsearch.action.admin.indices.analyze.TransportReloadAnalyzersAction; +import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; +import org.elasticsearch.action.admin.indices.open.OpenIndexAction; import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; @@ -206,6 +208,8 @@ public class InternalUsers { "indices:admin/data_stream/index/reindex", "indices:admin/index/create_from_source", TransportAddIndexBlockAction.TYPE.name(), + OpenIndexAction.NAME, + TransportCloseIndexAction.NAME, TransportCreateIndexAction.TYPE.name(), TransportClusterSearchShardsAction.TYPE.name(), TransportUpdateSettingsAction.TYPE.name(), @@ -221,6 +225,7 @@ public class InternalUsers { .build() }, null, null, + new String[] {}, MetadataUtils.DEFAULT_RESERVED_METADATA, Map.of() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java index 6424136eb1a7c..9a35b8f13d4c1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java @@ -146,7 +146,7 @@ public static SslKeyConfig createKeyConfig( boolean acceptNonSecurePasswords ) { final SslSettingsLoader settingsLoader = new SslSettingsLoader(settings, prefix, acceptNonSecurePasswords); - return settingsLoader.buildKeyConfig(environment.configFile()); + return settingsLoader.buildKeyConfig(environment.configDir()); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java index cb55de79342b8..f9b27daa8f8c1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java @@ -128,7 +128,7 @@ protected SslTrustConfig buildTrustConfig( } public SslConfiguration load(Environment env) { - return load(env.configFile()); + return load(env.configDir()); } public static SslConfiguration load(Settings settings, String prefix, Environment env) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java index 08e89a0fcab00..c264f084c017d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -205,8 +204,7 @@ protected Map> getNodeBundles(ClusterState clusterState, St String[] singleIndex = { indexName }; - GroupShardsIterator shards = clusterService.operationRouting() - .searchShards(clusterState, singleIndex, null, null); + List shards = clusterService.operationRouting().searchShards(clusterState, singleIndex, null, null); for (ShardIterator copiesOfShard : shards) { ShardRouting selectedCopyOfShard = null; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java index 8439c9cd76fad..652ae7f2d0593 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java @@ -48,6 +48,7 @@ public final class TransformInternalIndexConstants { public static final String AUDIT_INDEX_PATTERN_DEPRECATED = TRANSFORM_PREFIX_DEPRECATED + "notifications-*"; public static final String AUDIT_INDEX_READ_ALIAS = TRANSFORM_PREFIX + "notifications-read"; + public static final String AUDIT_INDEX_WRITE_ALIAS = TRANSFORM_PREFIX + "notifications-write"; public static final String AUDIT_INDEX = AUDIT_INDEX_PREFIX + AUDIT_TEMPLATE_VERSION; private TransformInternalIndexConstants() {} diff --git a/x-pack/plugin/core/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/core/src/main/plugin-metadata/entitlement-policy.yaml index 4e0266b06bbb0..a069e1b4ce4ce 100644 --- a/x-pack/plugin/core/src/main/plugin-metadata/entitlement-policy.yaml +++ b/x-pack/plugin/core/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,8 +1,13 @@ org.apache.httpcomponents.httpclient: - outbound_network # For SamlRealm + - manage_threads org.apache.httpcomponents.httpcore.nio: - outbound_network + - manage_threads +org.apache.httpcomponents.httpasyncclient: + - manage_threads unboundid.ldapsdk: + - manage_threads - write_system_properties: properties: - java.security.auth.login.config diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackPluginTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackPluginTests.java index ab6e7356a6e02..f7432a59040da 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackPluginTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackPluginTests.java @@ -143,7 +143,7 @@ public List loadExtensions(Class extensionPointType) { Environment mockEnvironment = mock(Environment.class); when(mockEnvironment.settings()).thenReturn(Settings.builder().build()); - when(mockEnvironment.configFile()).thenReturn(PathUtils.get("")); + when(mockEnvironment.configDir()).thenReturn(PathUtils.get("")); // ensure createComponents does not influence the results Plugin.PluginServices services = mock(Plugin.PluginServices.class); when(services.clusterService()).thenReturn(mock(ClusterService.class)); @@ -187,7 +187,7 @@ public List loadExtensions(Class extensionPointType) { }); Environment mockEnvironment = mock(Environment.class); when(mockEnvironment.settings()).thenReturn(Settings.builder().build()); - when(mockEnvironment.configFile()).thenReturn(PathUtils.get("")); + when(mockEnvironment.configDir()).thenReturn(PathUtils.get("")); Plugin.PluginServices services = mock(Plugin.PluginServices.class); when(services.clusterService()).thenReturn(mock(ClusterService.class)); when(services.threadPool()).thenReturn(mock(ThreadPool.class)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java index 1f28afbbc75b7..13d15a4ab0b07 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java @@ -6,7 +6,14 @@ */ package org.elasticsearch.xpack.core.common.notifications; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; +import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.TransportBulkAction; @@ -17,15 +24,22 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.IndicesAdminClient; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -33,7 +47,9 @@ import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.elasticsearch.xpack.core.template.IndexTemplateConfig; import org.junit.After; @@ -51,6 +67,8 @@ import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -65,12 +83,16 @@ public class AbstractAuditorTests extends ESTestCase { private static final String TEST_NODE_NAME = "node_1"; private static final String TEST_ORIGIN = "test_origin"; - private static final String TEST_INDEX = "test_index"; + private static final String TEST_INDEX_PREFIX = "test_index"; + private static final String TEST_INDEX_VERSION = "-000001"; + private static final String TEST_INDEX = TEST_INDEX_PREFIX + TEST_INDEX_VERSION; + private static final String TEST_INDEX_ALIAS = "test_index_write"; private static final int TEST_TEMPLATE_VERSION = 23456789; private Client client; private ArgumentCaptor indexRequestCaptor; + private ArgumentCaptor bulkRequestCaptor; private long startMillis; private ThreadPool threadPool; @@ -83,6 +105,7 @@ public void setUpMocks() { when(mockPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); indexRequestCaptor = ArgumentCaptor.forClass(IndexRequest.class); + bulkRequestCaptor = ArgumentCaptor.forClass(BulkRequest.class); startMillis = System.currentTimeMillis(); threadPool = new TestThreadPool(getClass().getName()); @@ -97,53 +120,58 @@ public void shutdownThreadPool() { public void testInfo() throws IOException { AbstractAuditor auditor = createTestAuditorWithTemplateInstalled(); auditor.info("foo", "Here is my info"); - - verify(client).execute(eq(TransportIndexAction.TYPE), indexRequestCaptor.capture(), any()); - IndexRequest indexRequest = indexRequestCaptor.getValue(); - assertThat(indexRequest.indices(), arrayContaining(TEST_INDEX)); - assertThat(indexRequest.timeout(), equalTo(TimeValue.timeValueSeconds(5))); - AbstractAuditMessageTests.TestAuditMessage auditMessage = parseAuditMessage(indexRequest.source()); - assertThat(auditMessage.getResourceId(), equalTo("foo")); - assertThat(auditMessage.getMessage(), equalTo("Here is my info")); - assertThat(auditMessage.getLevel(), equalTo(Level.INFO)); - assertThat( - auditMessage.getTimestamp().getTime(), - allOf(greaterThanOrEqualTo(startMillis), lessThanOrEqualTo(System.currentTimeMillis())) - ); - assertThat(auditMessage.getNodeName(), equalTo(TEST_NODE_NAME)); + // The first audit is written as a bulk request from the backlog + // once the template & alias checks have passed + verifyBulkIndexed("foo", "Here is my info", Level.INFO); + // Subsequent messages are indexed directly + auditor.info("foo", "This message is indexed directly because the write alias exists"); + verifyMessageIndexed("foo", "This message is indexed directly because the write alias exists", Level.INFO); } public void testWarning() throws IOException { AbstractAuditor auditor = createTestAuditorWithTemplateInstalled(); auditor.warning("bar", "Here is my warning"); - - verify(client).execute(eq(TransportIndexAction.TYPE), indexRequestCaptor.capture(), any()); - IndexRequest indexRequest = indexRequestCaptor.getValue(); - assertThat(indexRequest.indices(), arrayContaining(TEST_INDEX)); - assertThat(indexRequest.timeout(), equalTo(TimeValue.timeValueSeconds(5))); - AbstractAuditMessageTests.TestAuditMessage auditMessage = parseAuditMessage(indexRequest.source()); - assertThat(auditMessage.getResourceId(), equalTo("bar")); - assertThat(auditMessage.getMessage(), equalTo("Here is my warning")); - assertThat(auditMessage.getLevel(), equalTo(Level.WARNING)); - assertThat( - auditMessage.getTimestamp().getTime(), - allOf(greaterThanOrEqualTo(startMillis), lessThanOrEqualTo(System.currentTimeMillis())) - ); - assertThat(auditMessage.getNodeName(), equalTo(TEST_NODE_NAME)); + // The first audit is written as a bulk request from the backlog + // once the template & alias checks have passed + verifyBulkIndexed("bar", "Here is my warning", Level.WARNING); + // Subsequent messages are indexed directly + auditor.warning("bar", "This message is indexed directly because the write alias exists"); + verifyMessageIndexed("bar", "This message is indexed directly because the write alias exists", Level.WARNING); } public void testError() throws IOException { AbstractAuditor auditor = createTestAuditorWithTemplateInstalled(); auditor.error("foobar", "Here is my error"); + // The first audit is written as a bulk request from the backlog + // once the template & alias checks have passed + verifyBulkIndexed("foobar", "Here is my error", Level.ERROR); + // Subsequent messages are indexed directly + auditor.error("foobar", "This message is indexed directly because the write alias exists"); + verifyMessageIndexed("foobar", "This message is indexed directly because the write alias exists", Level.ERROR); + } + public void testAudit() throws IOException { + Level level = randomFrom(Level.ERROR, Level.INFO, Level.WARNING); + + AbstractAuditor auditor = createTestAuditorWithTemplateInstalled(); + auditor.audit(level, "r_id", "Here is my audit"); + // The first audit is written as a bulk request from the backlog + // once the template & alias checks have passed + verifyBulkIndexed("r_id", "Here is my audit", level); + // Subsequent messages are indexed directly + auditor.audit(level, "r_id", "This message is indexed directly because the write alias exists"); + verifyMessageIndexed("r_id", "This message is indexed directly because the write alias exists", level); + } + + private void verifyMessageIndexed(String resourceId, String message, Level level) throws IOException { verify(client).execute(eq(TransportIndexAction.TYPE), indexRequestCaptor.capture(), any()); IndexRequest indexRequest = indexRequestCaptor.getValue(); - assertThat(indexRequest.indices(), arrayContaining(TEST_INDEX)); + assertThat(indexRequest.indices(), arrayContaining(TEST_INDEX_ALIAS)); assertThat(indexRequest.timeout(), equalTo(TimeValue.timeValueSeconds(5))); AbstractAuditMessageTests.TestAuditMessage auditMessage = parseAuditMessage(indexRequest.source()); - assertThat(auditMessage.getResourceId(), equalTo("foobar")); - assertThat(auditMessage.getMessage(), equalTo("Here is my error")); - assertThat(auditMessage.getLevel(), equalTo(Level.ERROR)); + assertThat(auditMessage.getResourceId(), equalTo(resourceId)); + assertThat(auditMessage.getMessage(), equalTo(message)); + assertThat(auditMessage.getLevel(), equalTo(level)); assertThat( auditMessage.getTimestamp().getTime(), allOf(greaterThanOrEqualTo(startMillis), lessThanOrEqualTo(System.currentTimeMillis())) @@ -151,19 +179,18 @@ public void testError() throws IOException { assertThat(auditMessage.getNodeName(), equalTo(TEST_NODE_NAME)); } - public void testAudit() throws IOException { - Level level = randomFrom(Level.ERROR, Level.INFO, Level.WARNING); - - AbstractAuditor auditor = createTestAuditorWithTemplateInstalled(); - auditor.audit(level, "r_id", "Here is my audit"); - - verify(client).execute(eq(TransportIndexAction.TYPE), indexRequestCaptor.capture(), any()); - IndexRequest indexRequest = indexRequestCaptor.getValue(); - assertThat(indexRequest.indices(), arrayContaining(TEST_INDEX)); - assertThat(indexRequest.timeout(), equalTo(TimeValue.timeValueSeconds(5))); + private void verifyBulkIndexed(String resourceId, String message, Level level) throws IOException { + verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); + BulkRequest bulkRequest = bulkRequestCaptor.getValue(); + assertThat(bulkRequest.numberOfActions(), is(1)); + assertThat(bulkRequest.timeout(), equalTo(TimeValue.timeValueSeconds(60))); + var firstBulk = bulkRequest.requests().get(0); + assertThat(firstBulk.index(), is(TEST_INDEX_ALIAS)); + assertThat(firstBulk, instanceOf(IndexRequest.class)); + var indexRequest = (IndexRequest) firstBulk; AbstractAuditMessageTests.TestAuditMessage auditMessage = parseAuditMessage(indexRequest.source()); - assertThat(auditMessage.getResourceId(), equalTo("r_id")); - assertThat(auditMessage.getMessage(), equalTo("Here is my audit")); + assertThat(auditMessage.getResourceId(), equalTo(resourceId)); + assertThat(auditMessage.getMessage(), equalTo(message)); assertThat(auditMessage.getLevel(), equalTo(level)); assertThat( auditMessage.getTimestamp().getTime(), @@ -172,6 +199,14 @@ public void testAudit() throws IOException { assertThat(auditMessage.getNodeName(), equalTo(TEST_NODE_NAME)); } + public void testAuditWithMissingAlias() throws IOException { + AbstractAuditor auditor = createTestAuditorWithTemplateAndIndexButNoAlias(); + auditor.info("foobar", "Add the alias first"); + verify(client).execute(eq(TransportIndicesAliasesAction.TYPE), any(), any()); + + verifyBulkIndexed("foobar", "Add the alias first", Level.INFO); + } + public void testAuditingBeforeTemplateInstalled() throws Exception { CountDownLatch writeSomeDocsBeforeTemplateLatch = new CountDownLatch(1); AbstractAuditor auditor = createTestAuditorWithoutTemplate( @@ -186,6 +221,9 @@ public void testAuditingBeforeTemplateInstalled() throws Exception { // fire the put template response writeSomeDocsBeforeTemplateLatch.countDown(); + assertBusy(() -> verify(client, times(1)).execute(eq(TransportPutComposableIndexTemplateAction.TYPE), any(), any())); + assertBusy(() -> verify(client, times(1)).execute(eq(TransportCreateIndexAction.TYPE), any(), any())); + // the back log will be written some point later ArgumentCaptor bulkCaptor = ArgumentCaptor.forClass(BulkRequest.class); assertBusy(() -> verify(client, times(1)).execute(eq(TransportBulkAction.TYPE), bulkCaptor.capture(), any())); @@ -197,6 +235,40 @@ public void testAuditingBeforeTemplateInstalled() throws Exception { verify(client, times(1)).execute(eq(TransportIndexAction.TYPE), any(), any()); } + public void testRecreateTemplateWhenDeleted() throws Exception { + CountDownLatch writeSomeDocsBeforeTemplateLatch = new CountDownLatch(1); + AbstractAuditor auditor = createTestAuditorWithoutTemplate( + writeSomeDocsBeforeTemplateLatch + ); + + auditor.info("foobar", "Here is my info to queue"); + + verify(client, never()).execute(eq(TransportIndexAction.TYPE), any(), any()); + // fire the put template response + writeSomeDocsBeforeTemplateLatch.countDown(); + + assertBusy(() -> verify(client, times(1)).execute(eq(TransportPutComposableIndexTemplateAction.TYPE), any(), any())); + assertBusy(() -> verify(client, times(1)).execute(eq(TransportCreateIndexAction.TYPE), any(), any())); + + // the back log will be written some point later + assertBusy(() -> verify(client, times(1)).execute(eq(TransportBulkAction.TYPE), any(), any())); + + // "delete" the index + doAnswer(ans -> { + ActionListener listener = ans.getArgument(2); + listener.onFailure(new IndexNotFoundException("some index")); + return null; + }).when(client).execute(eq(TransportIndexAction.TYPE), any(), any()); + + // audit more data + auditor.info("foobar", "Here is another message"); + + // verify the template is recreated and the audit message is processed + assertBusy(() -> verify(client, times(2)).execute(eq(TransportPutComposableIndexTemplateAction.TYPE), any(), any())); + assertBusy(() -> verify(client, times(2)).execute(eq(TransportCreateIndexAction.TYPE), any(), any())); + assertBusy(() -> verify(client, times(2)).execute(eq(TransportBulkAction.TYPE), any(), any())); + } + public void testMaxBufferSize() throws Exception { CountDownLatch writeSomeDocsBeforeTemplateLatch = new CountDownLatch(1); AbstractAuditor auditor = createTestAuditorWithoutTemplate( @@ -226,17 +298,42 @@ private static AbstractAuditMessageTests.TestAuditMessage parseAuditMessage(Byte } private TestAuditor createTestAuditorWithTemplateInstalled() { - Map templates = Map.of(TEST_INDEX, mock(IndexTemplateMetadata.class)); - Map templatesV2 = Collections.singletonMap(TEST_INDEX, mock(ComposableIndexTemplate.class)); - Metadata metadata = mock(Metadata.class); - when(metadata.getTemplates()).thenReturn(templates); - when(metadata.templatesV2()).thenReturn(templatesV2); - ClusterState state = mock(ClusterState.class); - when(state.getMetadata()).thenReturn(metadata); + return new TestAuditor(client, TEST_NODE_NAME, mockClusterServiceAndIndexState(true)); + } + + @SuppressWarnings("unchecked") + private TestAuditor createTestAuditorWithTemplateAndIndexButNoAlias() { + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(mock(IndicesAliasesResponse.class)); + return null; + }).when(client).execute(eq(TransportIndicesAliasesAction.TYPE), any(), any()); + + return new TestAuditor(client, TEST_NODE_NAME, mockClusterServiceAndIndexState(false)); + } + + private ClusterService mockClusterServiceAndIndexState(boolean includeAlias) { + Map templates = Map.of(TEST_INDEX_PREFIX, mock(IndexTemplateMetadata.class)); + var template = mock(ComposableIndexTemplate.class); + when(template.version()).thenReturn((long) TEST_TEMPLATE_VERSION); + Map templatesV2 = Collections.singletonMap(TEST_INDEX_PREFIX, template); + + var indexMeta = Map.of(TEST_INDEX, createIndexMetadata(TEST_INDEX, includeAlias)); + Metadata metadata = Metadata.builder().indices(indexMeta).templates(templates).indexTemplates(templatesV2).build(); + + ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); + ClusterService clusterService = mock(ClusterService.class); when(clusterService.state()).thenReturn(state); + return clusterService; + } - return new TestAuditor(client, TEST_NODE_NAME, clusterService); + private static IndexMetadata createIndexMetadata(String indexName, boolean withAlias) { + IndexMetadata.Builder builder = IndexMetadata.builder(indexName).settings(indexSettings(IndexVersion.current(), 1, 0)); + if (withAlias) { + builder.putAlias(AliasMetadata.builder(TEST_INDEX_ALIAS).build()); + } + return builder.build(); } @SuppressWarnings("unchecked") @@ -264,15 +361,24 @@ private TestAuditor createTestAuditorWithoutTemplate(CountDownLatch latch) { return null; }).when(client).execute(eq(TransportPutComposableIndexTemplateAction.TYPE), any(), any()); + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(new CreateIndexResponse(true, true, "foo")); + return null; + }).when(client).execute(eq(TransportCreateIndexAction.TYPE), any(), any()); + + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(new ClusterHealthResponse()); + return null; + }).when(client).execute(eq(TransportClusterHealthAction.TYPE), any(), any()); + IndicesAdminClient indicesAdminClient = mock(IndicesAdminClient.class); AdminClient adminClient = mock(AdminClient.class); when(adminClient.indices()).thenReturn(indicesAdminClient); when(client.admin()).thenReturn(adminClient); - Metadata metadata = mock(Metadata.class); - when(metadata.getTemplates()).thenReturn(Map.of()); - ClusterState state = mock(ClusterState.class); - when(state.getMetadata()).thenReturn(metadata); + ClusterState state = ClusterState.builder(ClusterName.DEFAULT).build(); ClusterService clusterService = mock(ClusterService.class); when(clusterService.state()).thenReturn(state); @@ -284,23 +390,46 @@ public static class TestAuditor extends AbstractAuditor /** * The bwc versions to test serialization against */ - protected List bwcVersions() { + protected Collection bwcVersions() { return DEFAULT_BWC_VERSIONS; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractChunkedBWCSerializationTestCase.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractChunkedBWCSerializationTestCase.java index 0254406a2c8ec..e6b6ef3e3a06a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractChunkedBWCSerializationTestCase.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractChunkedBWCSerializationTestCase.java @@ -13,7 +13,7 @@ import org.elasticsearch.test.AbstractChunkedSerializingTestCase; import java.io.IOException; -import java.util.List; +import java.util.Collection; import static org.elasticsearch.test.BWCVersions.DEFAULT_BWC_VERSIONS; @@ -28,7 +28,7 @@ public abstract class AbstractChunkedBWCSerializationTestCase bwcVersions() { + protected Collection bwcVersions() { return DEFAULT_BWC_VERSIONS; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java index 3ab5851815474..91070d5768f63 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java @@ -120,7 +120,7 @@ protected CoordinatedInferenceAction.Request mutateInstanceForVersion( instance.setPrefixType(TrainedModelPrefixStrings.PrefixType.NONE); } - return new CoordinatedInferenceAction.Request( + var newInstance = new CoordinatedInferenceAction.Request( instance.getModelId(), instance.getInputs(), instance.getTaskSettings(), @@ -131,5 +131,7 @@ protected CoordinatedInferenceAction.Request mutateInstanceForVersion( instance.getHighPriority(), instance.getRequestModelType() ); + newInstance.setPrefixType(instance.getPrefixType()); + return newInstance; } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java index 8fc1e55ec0ac5..0bc5ac8cc780e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -186,7 +187,8 @@ public void testInstallIndexTemplateIfRequired_GivenComposableTemplateExists() { NotificationsIndex.NOTIFICATIONS_INDEX, createComposableIndexTemplateMetaData( NotificationsIndex.NOTIFICATIONS_INDEX, - Collections.singletonList(NotificationsIndex.NOTIFICATIONS_INDEX) + Collections.singletonList(NotificationsIndex.NOTIFICATIONS_INDEX), + TEST_TEMPLATE_VERSION ) ) ); @@ -365,8 +367,20 @@ public void testIndexNameComparator() { } public void testLatestIndex() { - var names = new String[] { "index-000001", "index-000002", "index-000003" }; - assertThat(MlIndexAndAlias.latestIndex(names), equalTo("index-000003")); + { + var names = new String[] { "index-000001", "index-000002", "index-000003" }; + assertThat(MlIndexAndAlias.latestIndex(names), equalTo("index-000003")); + } + { + var names = new String[] { "index", "index-000001", "index-000002" }; + assertThat(MlIndexAndAlias.latestIndex(names), equalTo("index-000002")); + } + } + + public void testIndexIsReadWriteCompatibleInV9() { + assertTrue(MlIndexAndAlias.indexIsReadWriteCompatibleInV9(IndexVersion.current())); + assertTrue(MlIndexAndAlias.indexIsReadWriteCompatibleInV9(IndexVersions.V_8_0_0)); + assertFalse(MlIndexAndAlias.indexIsReadWriteCompatibleInV9(IndexVersions.V_7_17_0)); } private void createIndexAndAliasIfNecessary(ClusterState clusterState) { @@ -417,8 +431,8 @@ private static IndexTemplateMetadata createLegacyIndexTemplateMetaData(String te return IndexTemplateMetadata.builder(templateName).patterns(patterns).build(); } - private static ComposableIndexTemplate createComposableIndexTemplateMetaData(String templateName, List patterns) { - return ComposableIndexTemplate.builder().indexPatterns(patterns).build(); + private static ComposableIndexTemplate createComposableIndexTemplateMetaData(String templateName, List patterns, long version) { + return ComposableIndexTemplate.builder().indexPatterns(patterns).version(version).build(); } private static IndexMetadata createIndexMetadata(String indexName, boolean withAlias) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 107953557f3ea..ab5a1bbd8c897 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -4140,7 +4140,7 @@ public void testInferenceAdminRole() { assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role role = Role.buildFromRoleDescriptor(roleDescriptor, new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES); - assertTrue(role.cluster().check("cluster:monitor/xpack/inference", request, authentication)); + assertTrue(role.cluster().check("cluster:monitor/xpack/inference/post", request, authentication)); assertTrue(role.cluster().check("cluster:monitor/xpack/inference/get", request, authentication)); assertTrue(role.cluster().check("cluster:admin/xpack/inference/put", request, authentication)); assertTrue(role.cluster().check("cluster:admin/xpack/inference/delete", request, authentication)); @@ -4160,10 +4160,9 @@ public void testInferenceUserRole() { assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role role = Role.buildFromRoleDescriptor(roleDescriptor, new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES); - assertTrue(role.cluster().check("cluster:monitor/xpack/inference", request, authentication)); + assertTrue(role.cluster().check("cluster:monitor/xpack/inference/post", request, authentication)); assertTrue(role.cluster().check("cluster:monitor/xpack/inference/get", request, authentication)); assertFalse(role.cluster().check("cluster:admin/xpack/inference/put", request, authentication)); - assertTrue(role.cluster().check("cluster:monitor/xpack/inference/unified", request, authentication)); assertFalse(role.cluster().check("cluster:admin/xpack/inference/delete", request, authentication)); assertTrue(role.cluster().check("cluster:monitor/xpack/ml/trained_models/deployment/infer", request, authentication)); assertFalse(role.cluster().check("cluster:admin/xpack/ml/trained_models/deployment/start", request, authentication)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoaderTests.java index d03595c39877b..79ff817061a01 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoaderTests.java @@ -229,7 +229,7 @@ public void testKeystorePassword() { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, PASSWORD, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, PASSWORD, KEY_MGR_ALGORITHM, environment.configDir())) ); } @@ -244,7 +244,7 @@ public void testKeystorePasswordBackcompat() { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, PASSWORD, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, PASSWORD, KEY_MGR_ALGORITHM, environment.configDir())) ); assertSettingDeprecationsAndWarnings(new Setting[] { configurationSettings.x509KeyPair.legacyKeystorePassword }); } @@ -263,7 +263,7 @@ public void testKeystoreKeyPassword() { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir())) ); } @@ -279,7 +279,7 @@ public void testKeystoreKeyPasswordBackcompat() { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig("path", PASSWORD, "type", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir())) ); assertSettingDeprecationsAndWarnings( new Setting[] { @@ -298,7 +298,7 @@ public void testInferKeystoreTypeFromJksFile() { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig("xpack/tls/path.jks", PASSWORD, "jks", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig("xpack/tls/path.jks", PASSWORD, "jks", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir())) ); } @@ -314,7 +314,7 @@ public void testInferKeystoreTypeFromPkcs12File() { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig(path, PASSWORD, "PKCS12", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig(path, PASSWORD, "PKCS12", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir())) ); } @@ -328,7 +328,7 @@ public void testInferKeystoreTypeFromUnrecognised() { StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); assertThat( ksKeyInfo, - equalTo(new StoreKeyConfig("xpack/tls/path.foo", PASSWORD, "jks", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) + equalTo(new StoreKeyConfig("xpack/tls/path.foo", PASSWORD, "jks", null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir())) ); } @@ -347,10 +347,7 @@ public void testExplicitKeystoreType() { SslConfiguration sslConfiguration = getSslConfiguration(settings); assertThat(sslConfiguration.keyConfig(), instanceOf(StoreKeyConfig.class)); StoreKeyConfig ksKeyInfo = (StoreKeyConfig) sslConfiguration.keyConfig(); - assertThat( - ksKeyInfo, - equalTo(new StoreKeyConfig(path, PASSWORD, type, null, KEYPASS, KEY_MGR_ALGORITHM, environment.configFile())) - ); + assertThat(ksKeyInfo, equalTo(new StoreKeyConfig(path, PASSWORD, type, null, KEYPASS, KEY_MGR_ALGORITHM, environment.configDir()))); } public void testThatEmptySettingsAreEqual() { diff --git a/x-pack/plugin/core/template-resources/src/main/resources/ml/notifications_index_template.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/notifications_index_template.json index bf3cf302f0170..38ab2621b9316 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/ml/notifications_index_template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/ml/notifications_index_template.json @@ -2,7 +2,7 @@ "priority" : 2147483647, "version" : ${xpack.ml.version.id}, "index_patterns" : [ - ".ml-notifications-000002" + ".ml-notifications-*" ], "template" : { "settings" : { diff --git a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats-mb.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats-mb.json index 7457dce805eca..fcb299115ffd2 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats-mb.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats-mb.json @@ -786,6 +786,45 @@ }, "sampling": { "properties": { + "tail": { + "properties": { + "dynamic_service_groups": { + "type": "long" + }, + "events": { + "properties": { + "dropped": { + "type": "long" + }, + "failed_writes": { + "type": "long" + }, + "head_unsampled": { + "type": "long" + }, + "processed": { + "type": "long" + }, + "sampled": { + "type": "long" + }, + "stored": { + "type": "long" + } + } + }, + "storage": { + "properties": { + "lsm_size": { + "type": "long" + }, + "value_log_size": { + "type": "long" + } + } + } + } + }, "transactions_dropped": { "type": "long" } @@ -2219,6 +2258,54 @@ }, "sampling": { "properties": { + "tail": { + "properties": { + "dynamic_service_groups": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.dynamic_service_groups" + }, + "events": { + "properties": { + "dropped": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.events.dropped" + }, + "failed_writes": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.events.failed_writes" + }, + "head_unsampled": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.events.head_unsampled" + }, + "processed": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.events.processed" + }, + "sampled": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.events.sampled" + }, + "stored": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.events.stored" + } + } + }, + "storage": { + "properties": { + "lsm_size": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.storage.lsm_size" + }, + "value_log_size": { + "type": "alias", + "path": "beat.stats.apm_server.sampling.tail.storage.value_log_size" + } + } + } + } + }, "transactions_dropped": { "type": "alias", "path": "beat.stats.apm_server.sampling.transactions_dropped" diff --git a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats.json index d699317c29da3..e1a8b5cc37060 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats.json @@ -966,6 +966,45 @@ }, "sampling": { "properties": { + "tail": { + "properties": { + "dynamic_service_groups": { + "type": "long" + }, + "events": { + "properties": { + "dropped": { + "type": "long" + }, + "failed_writes": { + "type": "long" + }, + "head_unsampled": { + "type": "long" + }, + "processed": { + "type": "long" + }, + "sampled": { + "type": "long" + }, + "stored": { + "type": "long" + } + } + }, + "storage": { + "properties": { + "lsm_size": { + "type": "long" + }, + "value_log_size": { + "type": "long" + } + } + } + } + }, "transactions_dropped": { "type": "long" } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json index 793a8c3035d8e..6d82f6e5295e6 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json @@ -464,6 +464,13 @@ } } }, + "threads": { + "properties": { + "count": { + "type": "long" + } + } + }, "gc": { "properties": { "collectors": { @@ -562,6 +569,20 @@ "type": "long" } } + }, + "fetch_total": { + "properties": { + "count": { + "type": "long" + } + } + }, + "fetch_time": { + "properties": { + "ms": { + "type": "long" + } + } } } }, @@ -586,6 +607,42 @@ } } }, + "get": { + "properties": { + "total": { + "properties": { + "count": { + "type": "long" + } + } + }, + "time": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, + "merges": { + "properties": { + "total": { + "properties": { + "count": { + "type": "long" + } + } + }, + "total_time": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, "fielddata": { "properties": { "memory": { @@ -594,6 +651,13 @@ "type": "long" } } + }, + "evictions": { + "properties": { + "count": { + "type": "long" + } + } } } }, @@ -651,6 +715,60 @@ } } }, + "translog": { + "properties": { + "operations": { + "properties": { + "count": { + "type": "long" + } + } + }, + "size": { + "properties": { + "bytes": { + "type": "long" + } + } + } + } + }, + "refresh": { + "properties": { + "total": { + "properties": { + "count": { + "type": "long" + } + } + }, + "total_time": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, + "flush": { + "properties": { + "total": { + "properties": { + "count": { + "type": "long" + } + } + }, + "total_time": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, "segments": { "properties": { "version_map": { @@ -768,6 +886,20 @@ }, "process": { "properties": { + "mem": { + "properties": { + "total_virtual": { + "properties": { + "bytes": { + "type": "long" + } + } + } + } + }, + "open_file_descriptors": { + "type": "long" + }, "cpu": { "properties": { "pct": { @@ -882,6 +1014,88 @@ } } }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, + "queue": { + "properties": { + "count": { + "type": "long" + } + } + } + } + }, + "force_merge": { + "properties": { + "rejected": { + "properties": { + "count": { + "type": "long" + } + } + }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, + "queue": { + "properties": { + "count": { + "type": "long" + } + } + } + } + }, + "flush": { + "properties": { + "rejected": { + "properties": { + "count": { + "type": "long" + } + } + }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, + "queue": { + "properties": { + "count": { + "type": "long" + } + } + } + } + }, + "search_worker": { + "properties": { + "rejected": { + "properties": { + "count": { + "type": "long" + } + } + }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, "queue": { "properties": { "count": { @@ -900,6 +1114,38 @@ } } }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, + "queue": { + "properties": { + "count": { + "type": "long" + } + } + } + } + }, + "system_read": { + "properties": { + "rejected": { + "properties": { + "count": { + "type": "long" + } + } + }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, "queue": { "properties": { "count": { @@ -918,6 +1164,38 @@ } } }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, + "queue": { + "properties": { + "count": { + "type": "long" + } + } + } + } + }, + "esql_worker": { + "properties": { + "rejected": { + "properties": { + "count": { + "type": "long" + } + } + }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, "queue": { "properties": { "count": { @@ -936,6 +1214,38 @@ } } }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, + "queue": { + "properties": { + "count": { + "type": "long" + } + } + } + } + }, + "system_write": { + "properties": { + "rejected": { + "properties": { + "count": { + "type": "long" + } + } + }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, "queue": { "properties": { "count": { @@ -954,6 +1264,13 @@ } } }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, "queue": { "properties": { "count": { @@ -962,6 +1279,181 @@ } } } + }, + "snapshot": { + "properties": { + "rejected": { + "properties": { + "count": { + "type": "long" + } + } + }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, + "queue": { + "properties": { + "count": { + "type": "long" + } + } + } + } + } + } + }, + "transport": { + "properties": { + "tx": { + "properties": { + "size": { + "properties": { + "bytes": { + "type": "long" + } + } + }, + "count": { + "type": "long" + } + } + }, + "rx": { + "properties": { + "size": { + "properties": { + "bytes": { + "type": "long" + } + } + }, + "count": { + "type": "long" + } + } + } + } + }, + "ingest": { + "properties": { + "total": { + "properties": { + "current": { + "type": "long" + }, + "time_in_millis": { + "type": "long" + }, + "count": { + "type": "long" + }, + "failed": { + "type": "long" + } + } + } + } + }, + "indexing_pressure": { + "properties": { + "memory": { + "properties": { + "current": { + "properties": { + "all": { + "properties": { + "bytes": { + "type": "long" + } + } + }, + "coordinating": { + "properties": { + "bytes": { + "type": "long" + } + } + }, + "replica": { + "properties": { + "bytes": { + "type": "long" + } + } + }, + "combined_coordinating_and_primary": { + "properties": { + "bytes": { + "type": "long" + } + } + }, + "primary": { + "properties": { + "bytes": { + "type": "long" + } + } + } + } + }, + "total": { + "properties": { + "all": { + "properties": { + "bytes": { + "type": "long" + } + } + }, + "coordinating": { + "properties": { + "rejections": { + "type": "long" + }, + "bytes": { + "type": "long" + } + } + }, + "replica": { + "properties": { + "rejections": { + "type": "long" + }, + "bytes": { + "type": "long" + } + } + }, + "combined_coordinating_and_primary": { + "properties": { + "bytes": { + "type": "long" + } + } + }, + "primary": { + "properties": { + "rejections": { + "type": "long" + }, + "bytes": { + "type": "long" + } + } + } + } + }, + "limit_in_bytes": { + "type": "long" + } + } } } }, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/reindex-data-stream-pipeline.json b/x-pack/plugin/core/template-resources/src/main/resources/reindex-data-stream-pipeline.json new file mode 100644 index 0000000000000..e8c3352131700 --- /dev/null +++ b/x-pack/plugin/core/template-resources/src/main/resources/reindex-data-stream-pipeline.json @@ -0,0 +1,16 @@ +{ + "description": "This pipeline sanitizes documents that are being reindexed into a data stream using the reindex data stream API. It is an internal pipeline and should not be modified.", + "processors": [ + { + "set": { + "field": "@timestamp", + "value": 0, + "override": false + } + } + ], + "_meta": { + "managed": true + }, + "version": ${xpack.migrate.reindex.pipeline.version} +} diff --git a/x-pack/plugin/deprecation/src/main/java/module-info.java b/x-pack/plugin/deprecation/src/main/java/module-info.java index f9a86839ad6f2..4c46205df4f0c 100644 --- a/x-pack/plugin/deprecation/src/main/java/module-info.java +++ b/x-pack/plugin/deprecation/src/main/java/module-info.java @@ -13,6 +13,7 @@ requires org.apache.logging.log4j; requires org.apache.logging.log4j.core; requires log4j2.ecs.layout; + requires org.apache.lucene.core; exports org.elasticsearch.xpack.deprecation to org.elasticsearch.server; exports org.elasticsearch.xpack.deprecation.logging to org.elasticsearch.server; diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecker.java new file mode 100644 index 0000000000000..cc21f0b2cd711 --- /dev/null +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecker.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.deprecation; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.common.TriConsumer; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; +import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * Cluster-specific deprecation checks, this is used to populate the {@code cluster_settings} field + */ +public class ClusterDeprecationChecker { + + private static final Logger logger = LogManager.getLogger(ClusterDeprecationChecker.class); + private final List, List>> CHECKS = List.of( + this::checkTransformSettings + ); + private final NamedXContentRegistry xContentRegistry; + + ClusterDeprecationChecker(NamedXContentRegistry xContentRegistry) { + this.xContentRegistry = xContentRegistry; + } + + public List check(ClusterState clusterState, List transformConfigs) { + List allIssues = new ArrayList<>(); + CHECKS.forEach(check -> check.apply(clusterState, transformConfigs, allIssues)); + return allIssues; + } + + private void checkTransformSettings( + ClusterState clusterState, + List transformConfigs, + List allIssues + ) { + for (var config : transformConfigs) { + try { + allIssues.addAll(config.checkForDeprecations(xContentRegistry)); + } catch (IOException e) { + logger.warn("failed to check transformation settings for '" + config.getId() + "'", e); + } + } + } +} diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecker.java index c867092f7bc19..db8a3dc205e02 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecker.java @@ -18,13 +18,13 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.function.BiFunction; import java.util.stream.Collectors; import static java.util.Map.entry; import static java.util.Map.ofEntries; -import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks; /** * Checks the data streams for deprecation warnings. @@ -44,10 +44,24 @@ public DataStreamDeprecationChecker(IndexNameExpressionResolver indexNameExpress /** * @param clusterState The cluster state provided for the checker + * @param request not used yet in these checks + * @param precomputedData not used yet in these checks * @return the name of the data streams that have violated the checks with their respective warnings. */ @Override - public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { + public Map> check( + ClusterState clusterState, + DeprecationInfoAction.Request request, + TransportDeprecationInfoAction.PrecomputedData precomputedData + ) { + return check(clusterState); + } + + /** + * @param clusterState The cluster state provided for the checker + * @return the name of the data streams that have violated the checks with their respective warnings. + */ + public Map> check(ClusterState clusterState) { List dataStreamNames = indexNameExpressionResolver.dataStreamNames( clusterState, IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN @@ -58,7 +72,10 @@ public Map> check(ClusterState clusterState, Depr Map> dataStreamIssues = new HashMap<>(); for (String dataStreamName : dataStreamNames) { DataStream dataStream = clusterState.metadata().dataStreams().get(dataStreamName); - List issuesForSingleDataStream = filterChecks(DATA_STREAM_CHECKS, c -> c.apply(dataStream, clusterState)); + List issuesForSingleDataStream = DATA_STREAM_CHECKS.stream() + .map(c -> c.apply(dataStream, clusterState)) + .filter(Objects::nonNull) + .toList(); if (issuesForSingleDataStream.isEmpty() == false) { dataStreamIssues.put(dataStreamName, issuesForSingleDataStream); } @@ -102,7 +119,7 @@ static DeprecationIssue ignoredOldIndicesCheck(DataStream dataStream, ClusterSta + "OK to remain read-only after upgrade", false, ofEntries( - entry("reindex_required", true), + entry("reindex_required", false), entry("total_backing_indices", backingIndices.size()), entry("ignored_indices_requiring_upgrade_count", ignoredIndices.size()), entry("ignored_indices_requiring_upgrade", ignoredIndices) diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java index 85b7c89e7cb85..2c8b95e378375 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java @@ -33,7 +33,7 @@ import java.util.function.Predicate; import java.util.function.Supplier; -import static org.elasticsearch.xpack.deprecation.DeprecationChecks.SKIP_DEPRECATIONS_SETTING; +import static org.elasticsearch.xpack.deprecation.TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING; import static org.elasticsearch.xpack.deprecation.logging.DeprecationIndexingComponent.DEPRECATION_INDEXING_FLUSH_INTERVAL; /** diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java deleted file mode 100644 index 039a75f51f030..0000000000000 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.deprecation; - -import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; - -import java.util.List; -import java.util.Objects; -import java.util.function.Function; -import java.util.stream.Collectors; - -/** - * Class containing all the cluster, node, and index deprecation checks that will be served - * by the {@link DeprecationInfoAction}. - */ -public class DeprecationChecks { - - public static final Setting> SKIP_DEPRECATIONS_SETTING = Setting.stringListSetting( - "deprecation.skip_deprecated_settings", - Setting.Property.NodeScope, - Setting.Property.Dynamic - ); - - private DeprecationChecks() {} - - static List> CLUSTER_SETTINGS_CHECKS = List.of(); - - static final List< - NodeDeprecationCheck> NODE_SETTINGS_CHECKS = List - .of( - NodeDeprecationChecks::checkMultipleDataPaths, - NodeDeprecationChecks::checkDataPathsList, - NodeDeprecationChecks::checkSharedDataPathSetting, - NodeDeprecationChecks::checkReservedPrefixedRealmNames, - NodeDeprecationChecks::checkExporterUseIngestPipelineSettings, - NodeDeprecationChecks::checkExporterPipelineMasterTimeoutSetting, - NodeDeprecationChecks::checkExporterCreateLegacyTemplateSetting, - NodeDeprecationChecks::checkMonitoringSettingHistoryDuration, - NodeDeprecationChecks::checkMonitoringSettingHistoryDuration, - NodeDeprecationChecks::checkMonitoringSettingCollectIndexRecovery, - NodeDeprecationChecks::checkMonitoringSettingCollectIndices, - NodeDeprecationChecks::checkMonitoringSettingCollectCcrTimeout, - NodeDeprecationChecks::checkMonitoringSettingCollectEnrichStatsTimeout, - NodeDeprecationChecks::checkMonitoringSettingCollectIndexRecoveryStatsTimeout, - NodeDeprecationChecks::checkMonitoringSettingCollectIndexStatsTimeout, - NodeDeprecationChecks::checkMonitoringSettingCollectMlJobStatsTimeout, - NodeDeprecationChecks::checkMonitoringSettingCollectNodeStatsTimeout, - NodeDeprecationChecks::checkMonitoringSettingCollectClusterStatsTimeout, - NodeDeprecationChecks::checkMonitoringSettingExportersHost, - NodeDeprecationChecks::checkMonitoringSettingExportersBulkTimeout, - NodeDeprecationChecks::checkMonitoringSettingExportersConnectionTimeout, - NodeDeprecationChecks::checkMonitoringSettingExportersConnectionReadTimeout, - NodeDeprecationChecks::checkMonitoringSettingExportersAuthUsername, - NodeDeprecationChecks::checkMonitoringSettingExportersAuthPass, - NodeDeprecationChecks::checkMonitoringSettingExportersSSL, - NodeDeprecationChecks::checkMonitoringSettingExportersProxyBase, - NodeDeprecationChecks::checkMonitoringSettingExportersSniffEnabled, - NodeDeprecationChecks::checkMonitoringSettingExportersHeaders, - NodeDeprecationChecks::checkMonitoringSettingExportersTemplateTimeout, - NodeDeprecationChecks::checkMonitoringSettingExportersMasterTimeout, - NodeDeprecationChecks::checkMonitoringSettingExportersEnabled, - NodeDeprecationChecks::checkMonitoringSettingExportersType, - NodeDeprecationChecks::checkMonitoringSettingExportersAlertsEnabled, - NodeDeprecationChecks::checkMonitoringSettingExportersAlertsBlacklist, - NodeDeprecationChecks::checkMonitoringSettingExportersIndexNameTimeFormat, - NodeDeprecationChecks::checkMonitoringSettingDecommissionAlerts, - NodeDeprecationChecks::checkMonitoringSettingEsCollectionEnabled, - NodeDeprecationChecks::checkMonitoringSettingCollectionEnabled, - NodeDeprecationChecks::checkMonitoringSettingCollectionInterval, - NodeDeprecationChecks::checkScriptContextCache, - NodeDeprecationChecks::checkScriptContextCompilationsRateLimitSetting, - NodeDeprecationChecks::checkScriptContextCacheSizeSetting, - NodeDeprecationChecks::checkScriptContextCacheExpirationSetting, - NodeDeprecationChecks::checkEnforceDefaultTierPreferenceSetting, - NodeDeprecationChecks::checkLifecyleStepMasterTimeoutSetting, - NodeDeprecationChecks::checkEqlEnabledSetting, - NodeDeprecationChecks::checkNodeAttrData, - NodeDeprecationChecks::checkWatcherBulkConcurrentRequestsSetting, - NodeDeprecationChecks::checkTracingApmSettings - ); - - /** - * helper utility function to reduce repeat of running a specific {@link List} of checks. - * - * @param checks The functional checks to execute using the mapper function - * @param mapper The function that executes the lambda check with the appropriate arguments - * @param The signature of the check (BiFunction, Function, including the appropriate arguments) - * @return The list of {@link DeprecationIssue} that were found in the cluster - */ - static List filterChecks(List checks, Function mapper) { - return checks.stream().map(mapper).filter(Objects::nonNull).collect(Collectors.toList()); - } - - @FunctionalInterface - public interface NodeDeprecationCheck { - R apply(A first, B second, C third, D fourth); - } -} diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java index c89e61fbcf24d..1fceb917ece53 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java @@ -12,41 +12,25 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.IndicesRequest; -import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadRequest; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.ComponentTemplate; -import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.Tuple; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.transport.Transports; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.function.Function; -import java.util.stream.Collectors; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -59,93 +43,6 @@ private DeprecationInfoAction() { super(NAME); } - /** - * helper utility function to reduce repeat of running a specific {@link Set} of checks. - * - * @param checks The functional checks to execute using the mapper function - * @param mapper The function that executes the lambda check with the appropriate arguments - * @param The signature of the check (BiFunction, Function, including the appropriate arguments) - * @return The list of {@link DeprecationIssue} that were found in the cluster - */ - public static List filterChecks(List checks, Function mapper) { - return checks.stream().map(mapper).filter(Objects::nonNull).collect(Collectors.toList()); - } - - /** - * This method rolls up DeprecationIssues that are identical but on different nodes. It also roles up DeprecationIssues that are - * identical (and on different nodes) except that they differ in the removable settings listed in their meta object. We roll these up - * by taking the intersection of all removable settings in otherwise identical DeprecationIssues. That way we don't claim that a - * setting can be automatically removed if any node has it in its elasticsearch.yml. - * @param response - * @return - */ - private static List mergeNodeIssues(NodesDeprecationCheckResponse response) { - // A collection whose values are lists of DeprecationIssues that differ only by meta values (if that): - Collection>> issuesToMerge = getDeprecationIssuesThatDifferOnlyByMeta(response.getNodes()); - // A map of DeprecationIssues (containing only the intersection of removable settings) to the nodes they are seen on - Map> issueToListOfNodesMap = getMergedIssuesToNodesMap(issuesToMerge); - - return issueToListOfNodesMap.entrySet().stream().map(entry -> { - DeprecationIssue issue = entry.getKey(); - String details = issue.getDetails() != null ? issue.getDetails() + " " : ""; - return new DeprecationIssue( - issue.getLevel(), - issue.getMessage(), - issue.getUrl(), - details + "(nodes impacted: " + entry.getValue() + ")", - issue.isResolveDuringRollingUpgrade(), - issue.getMeta() - ); - }).collect(Collectors.toList()); - } - - /* - * This method pulls all the DeprecationIssues from the given nodeResponses, and buckets them into lists of DeprecationIssues that - * differ at most by meta values (if that). The returned tuples also contain the node name the deprecation issue was found on. If all - * nodes in the cluster were configured identically then all tuples in a list will differ only by the node name. - */ - private static Collection>> getDeprecationIssuesThatDifferOnlyByMeta( - List nodeResponses - ) { - Map>> issuesToMerge = new HashMap<>(); - for (NodesDeprecationCheckAction.NodeResponse resp : nodeResponses) { - for (DeprecationIssue issue : resp.getDeprecationIssues()) { - issuesToMerge.computeIfAbsent( - new DeprecationIssue( - issue.getLevel(), - issue.getMessage(), - issue.getUrl(), - issue.getDetails(), - issue.isResolveDuringRollingUpgrade(), - null // Intentionally removing meta from the key so that it's not taken into account for equality - ), - (key) -> new ArrayList<>() - ).add(new Tuple<>(issue, resp.getNode().getName())); - } - } - return issuesToMerge.values(); - } - - /* - * At this point we have one DeprecationIssue per node for a given deprecation. This method rolls them up into a single DeprecationIssue - * with a list of nodes that they appear on. If two DeprecationIssues on two different nodes differ only by the set of removable - * settings (i.e. they have different elasticsearch.yml configurations) then this method takes the intersection of those settings when - * it rolls them up. - */ - private static Map> getMergedIssuesToNodesMap( - Collection>> issuesToMerge - ) { - Map> issueToListOfNodesMap = new HashMap<>(); - for (List> similarIssues : issuesToMerge) { - DeprecationIssue leastCommonDenominator = DeprecationIssue.getIntersectionOfRemovableSettings( - similarIssues.stream().map(Tuple::v1).toList() - ); - issueToListOfNodesMap.computeIfAbsent(leastCommonDenominator, (key) -> new ArrayList<>()) - .addAll(similarIssues.stream().map(Tuple::v2).toList()); - } - return issueToListOfNodesMap; - } - public static class Response extends ActionResponse implements ToXContentObject { static final Set RESERVED_NAMES = Set.of( "cluster_settings", @@ -289,143 +186,6 @@ public int hashCode() { return Objects.hash(clusterSettingsIssues, nodeSettingsIssues, resourceDeprecationIssues, pluginSettingsIssues); } - /** - * This is the function that does the bulk of the logic of taking the appropriate ES dependencies - * like {@link NodeInfo}, {@link ClusterState}. Alongside these objects and the list of deprecation checks, - * this function will run through all the checks and build out the final list of issues that exist in the - * cluster. - * - * @param state The cluster state - * @param indexNameExpressionResolver Used to resolve indices into their concrete names - * @param request The originating request containing the index expressions to evaluate - * @param nodeDeprecationResponse The response containing the deprecation issues found on each node - * @param clusterSettingsChecks The list of cluster-level checks - * @param pluginSettingIssues this map gets modified to move transform deprecation issues into cluster_settings - * @param skipTheseDeprecatedSettings the settings that will be removed from cluster metadata and the index metadata of all the - * indexes specified by indexNames - * @param resourceDeprecationCheckers these are checkers that take as input the cluster state and return a map from resource type - * to issues grouped by the resource name. - * @return The list of deprecation issues found in the cluster - */ - public static DeprecationInfoAction.Response from( - ClusterState state, - IndexNameExpressionResolver indexNameExpressionResolver, - Request request, - NodesDeprecationCheckResponse nodeDeprecationResponse, - List> clusterSettingsChecks, - Map> pluginSettingIssues, - List skipTheseDeprecatedSettings, - List resourceDeprecationCheckers - ) { - assert Transports.assertNotTransportThread("walking mappings in indexSettingsChecks is expensive"); - // Allow system index access here to prevent deprecation warnings when we call this API - String[] concreteIndexNames = indexNameExpressionResolver.concreteIndexNames(state, request); - ClusterState stateWithSkippedSettingsRemoved = removeSkippedSettings(state, concreteIndexNames, skipTheseDeprecatedSettings); - List clusterSettingsIssues = filterChecks( - clusterSettingsChecks, - (c) -> c.apply(stateWithSkippedSettingsRemoved) - ); - List nodeSettingsIssues = mergeNodeIssues(nodeDeprecationResponse); - - Map>> resourceDeprecationIssues = new HashMap<>(); - for (ResourceDeprecationChecker resourceDeprecationChecker : resourceDeprecationCheckers) { - Map> issues = resourceDeprecationChecker.check(stateWithSkippedSettingsRemoved, request); - if (issues.isEmpty() == false) { - resourceDeprecationIssues.put(resourceDeprecationChecker.getName(), issues); - } - } - - // WORKAROUND: move transform deprecation issues into cluster_settings - List transformDeprecations = pluginSettingIssues.remove( - TransformDeprecationChecker.TRANSFORM_DEPRECATION_KEY - ); - if (transformDeprecations != null) { - clusterSettingsIssues.addAll(transformDeprecations); - } - - return new DeprecationInfoAction.Response( - clusterSettingsIssues, - nodeSettingsIssues, - resourceDeprecationIssues, - pluginSettingIssues - ); - } - } - - /** - * - * @param state The cluster state to modify - * @param indexNames The names of the indexes whose settings need to be filtered - * @param skipTheseDeprecatedSettings The settings that will be removed from cluster metadata and the index metadata of all the - * indexes specified by indexNames - * @return A modified cluster state with the given settings removed - */ - private static ClusterState removeSkippedSettings(ClusterState state, String[] indexNames, List skipTheseDeprecatedSettings) { - // Short-circuit, no need to reconstruct the cluster state if there are no settings to remove - if (skipTheseDeprecatedSettings == null || skipTheseDeprecatedSettings.isEmpty()) { - return state; - } - ClusterState.Builder clusterStateBuilder = new ClusterState.Builder(state); - Metadata.Builder metadataBuilder = Metadata.builder(state.metadata()); - metadataBuilder.transientSettings( - metadataBuilder.transientSettings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false) - ); - metadataBuilder.persistentSettings( - metadataBuilder.persistentSettings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false) - ); - Map indicesBuilder = new HashMap<>(state.getMetadata().indices()); - for (String indexName : indexNames) { - IndexMetadata indexMetadata = state.getMetadata().index(indexName); - IndexMetadata.Builder filteredIndexMetadataBuilder = new IndexMetadata.Builder(indexMetadata); - Settings filteredSettings = indexMetadata.getSettings() - .filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false); - filteredIndexMetadataBuilder.settings(filteredSettings); - indicesBuilder.put(indexName, filteredIndexMetadataBuilder.build()); - } - metadataBuilder.componentTemplates(state.metadata().componentTemplates().entrySet().stream().map(entry -> { - String templateName = entry.getKey(); - ComponentTemplate componentTemplate = entry.getValue(); - Template template = componentTemplate.template(); - if (template.settings() == null || template.settings().isEmpty()) { - return Tuple.tuple(templateName, componentTemplate); - } - return Tuple.tuple( - templateName, - new ComponentTemplate( - Template.builder(template) - .settings(template.settings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false)) - .build(), - componentTemplate.version(), - componentTemplate.metadata(), - componentTemplate.deprecated() - ) - ); - }).collect(Collectors.toMap(Tuple::v1, Tuple::v2))); - metadataBuilder.indexTemplates(state.metadata().templatesV2().entrySet().stream().map(entry -> { - String templateName = entry.getKey(); - ComposableIndexTemplate indexTemplate = entry.getValue(); - Template template = indexTemplate.template(); - if (templateName == null || template.settings() == null || template.settings().isEmpty()) { - return Tuple.tuple(templateName, indexTemplate); - } - return Tuple.tuple( - templateName, - indexTemplate.toBuilder() - .template( - Template.builder(indexTemplate.template()) - .settings( - indexTemplate.template() - .settings() - .filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false) - ) - ) - .build() - ); - }).collect(Collectors.toMap(Tuple::v1, Tuple::v2))); - - metadataBuilder.indices(indicesBuilder); - clusterStateBuilder.metadata(metadataBuilder); - return clusterStateBuilder.build(); } public static class Request extends MasterNodeReadRequest implements IndicesRequest.Replaceable { diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationChecker.java index 6d7f860f645f1..f96fae6343b9f 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationChecker.java @@ -19,9 +19,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.function.Function; -import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks; import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_COMMON_DETAIL; import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_HELP_URL; import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_MESSAGE; @@ -33,17 +33,28 @@ public class IlmPolicyDeprecationChecker implements ResourceDeprecationChecker { public static final String NAME = "ilm_policies"; - private static final List> CHECKS = List.of( - IlmPolicyDeprecationChecker::checkLegacyTiers, - IlmPolicyDeprecationChecker::checkFrozenAction - ); + private final List> checks = List.of(this::checkLegacyTiers, this::checkFrozenAction); /** * @param clusterState The cluster state provided for the checker + * @param request not used yet in these checks + * @param precomputedData not used yet in these checks * @return the name of the data streams that have violated the checks with their respective warnings. */ @Override - public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { + public Map> check( + ClusterState clusterState, + DeprecationInfoAction.Request request, + TransportDeprecationInfoAction.PrecomputedData precomputedData + ) { + return check(clusterState); + } + + /** + * @param clusterState The cluster state provided for the checker + * @return the name of the data streams that have violated the checks with their respective warnings. + */ + Map> check(ClusterState clusterState) { IndexLifecycleMetadata lifecycleMetadata = clusterState.metadata().custom(IndexLifecycleMetadata.TYPE); if (lifecycleMetadata == null || lifecycleMetadata.getPolicyMetadatas().isEmpty()) { return Map.of(); @@ -53,7 +64,10 @@ public Map> check(ClusterState clusterState, Depr String name = entry.getKey(); LifecyclePolicyMetadata policyMetadata = entry.getValue(); - List issuesForSinglePolicy = filterChecks(CHECKS, c -> c.apply(policyMetadata.getPolicy())); + List issuesForSinglePolicy = checks.stream() + .map(c -> c.apply(policyMetadata.getPolicy())) + .filter(Objects::nonNull) + .toList(); if (issuesForSinglePolicy.isEmpty() == false) { issues.put(name, issuesForSinglePolicy); } @@ -61,7 +75,7 @@ public Map> check(ClusterState clusterState, Depr return issues.isEmpty() ? Map.of() : issues; } - static DeprecationIssue checkLegacyTiers(LifecyclePolicy policy) { + private DeprecationIssue checkLegacyTiers(LifecyclePolicy policy) { for (Phase phase : policy.getPhases().values()) { AllocateAction allocateAction = (AllocateAction) phase.getActions().get(AllocateAction.NAME); if (allocateAction != null) { @@ -82,7 +96,7 @@ static DeprecationIssue checkLegacyTiers(LifecyclePolicy policy) { return null; } - static DeprecationIssue checkFrozenAction(LifecyclePolicy policy) { + private DeprecationIssue checkFrozenAction(LifecyclePolicy policy) { for (Phase phase : policy.getPhases().values()) { if (phase.getActions().containsKey(FreezeAction.NAME)) { return new DeprecationIssue( diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java index e941ebfc05e49..6bed9143175ca 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java @@ -10,26 +10,28 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetadata; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.LegacyFormatNames; +import org.elasticsearch.core.Strings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.engine.frozen.FrozenEngine; import org.elasticsearch.xpack.core.deprecation.DeprecatedIndexPredicate; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; +import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Function; +import java.util.stream.Collectors; -import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks; import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_COMMON_DETAIL; import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_HELP_URL; @@ -39,30 +41,37 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker { public static final String NAME = "index_settings"; - private static final List> INDEX_SETTINGS_CHECKS = List.of( - IndexDeprecationChecker::oldIndicesCheck, - IndexDeprecationChecker::ignoredOldIndicesCheck, - IndexDeprecationChecker::translogRetentionSettingCheck, - IndexDeprecationChecker::checkIndexDataPath, - IndexDeprecationChecker::storeTypeSettingCheck, - IndexDeprecationChecker::frozenIndexSettingCheck, - IndexDeprecationChecker::deprecatedCamelCasePattern, - IndexDeprecationChecker::legacyRoutingSettingCheck - ); private final IndexNameExpressionResolver indexNameExpressionResolver; + private final List>, DeprecationIssue>> checks = List.of( + this::oldIndicesCheck, + this::ignoredOldIndicesCheck, + this::translogRetentionSettingCheck, + this::checkIndexDataPath, + this::storeTypeSettingCheck, + this::deprecatedCamelCasePattern, + this::legacyRoutingSettingCheck + ); public IndexDeprecationChecker(IndexNameExpressionResolver indexNameExpressionResolver) { this.indexNameExpressionResolver = indexNameExpressionResolver; } @Override - public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { + public Map> check( + ClusterState clusterState, + DeprecationInfoAction.Request request, + TransportDeprecationInfoAction.PrecomputedData precomputedData + ) { Map> indexSettingsIssues = new HashMap<>(); String[] concreteIndexNames = indexNameExpressionResolver.concreteIndexNames(clusterState, request); + Map> indexToTransformIds = indexToTransformIds(precomputedData.transformConfigs()); for (String concreteIndex : concreteIndexNames) { IndexMetadata indexMetadata = clusterState.getMetadata().index(concreteIndex); - List singleIndexIssues = filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetadata, clusterState)); + List singleIndexIssues = checks.stream() + .map(c -> c.apply(indexMetadata, clusterState, indexToTransformIds)) + .filter(Objects::nonNull) + .toList(); if (singleIndexIssues.isEmpty() == false) { indexSettingsIssues.put(concreteIndex, singleIndexIssues); } @@ -78,46 +87,100 @@ public String getName() { return NAME; } - static DeprecationIssue oldIndicesCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + private DeprecationIssue oldIndicesCheck( + IndexMetadata indexMetadata, + ClusterState clusterState, + Map> indexToTransformIds + ) { // TODO: this check needs to be revised. It's trivially true right now. IndexVersion currentCompatibilityVersion = indexMetadata.getCompatibilityVersion(); // We intentionally exclude indices that are in data streams because they will be picked up by DataStreamDeprecationChecks if (DeprecatedIndexPredicate.reindexRequired(indexMetadata, false) && isNotDataStreamIndex(indexMetadata, clusterState)) { - return new DeprecationIssue( - DeprecationIssue.Level.CRITICAL, - "Old index with a compatibility version < 9.0", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", - "This index has version: " + currentCompatibilityVersion.toReleaseVersion(), - false, - Collections.singletonMap("reindex_required", true) - ); + var transforms = transformIdsForIndex(indexMetadata, indexToTransformIds); + if (transforms.isEmpty() == false) { + return new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + "One or more Transforms write to this index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + Strings.format( + "This index was created in version [%s] and requires action before upgrading to 9.0. The following transforms are " + + "configured to write to this index: [%s]. Refer to the migration guide to learn more about how to prepare " + + "transforms destination indices for your upgrade.", + currentCompatibilityVersion.toReleaseVersion(), + String.join(", ", transforms) + ), + false, + Map.of("reindex_required", true, "transform_ids", transforms) + ); + } else { + return new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + "Old index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", + "This index has version: " + currentCompatibilityVersion.toReleaseVersion(), + false, + Map.of("reindex_required", true) + ); + } } return null; } - static DeprecationIssue ignoredOldIndicesCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + private List transformIdsForIndex(IndexMetadata indexMetadata, Map> indexToTransformIds) { + return indexToTransformIds.getOrDefault(indexMetadata.getIndex().getName(), List.of()); + } + + private DeprecationIssue ignoredOldIndicesCheck( + IndexMetadata indexMetadata, + ClusterState clusterState, + Map> indexToTransformIds + ) { IndexVersion currentCompatibilityVersion = indexMetadata.getCompatibilityVersion(); // We intentionally exclude indices that are in data streams because they will be picked up by DataStreamDeprecationChecks if (DeprecatedIndexPredicate.reindexRequired(indexMetadata, true) && isNotDataStreamIndex(indexMetadata, clusterState)) { - return new DeprecationIssue( - DeprecationIssue.Level.WARNING, - "Old index with a compatibility version < 9.0 Has Been Ignored", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", - "This read-only index has version: " - + currentCompatibilityVersion.toReleaseVersion() - + " and will be supported as read-only in 9.0", - false, - Collections.singletonMap("reindex_required", true) - ); + var transforms = transformIdsForIndex(indexMetadata, indexToTransformIds); + if (transforms.isEmpty() == false) { + return new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "One or more Transforms write to this old index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + Strings.format( + "This index was created in version [%s] and will be supported as a read-only index in 9.0. The following " + + "transforms are no longer able to write to this index: [%s]. Refer to the migration guide to learn more " + + "about how to handle your transforms destination indices.", + currentCompatibilityVersion.toReleaseVersion(), + String.join(", ", transforms) + ), + false, + Map.of("reindex_required", true, "transform_ids", transforms) + ); + } else { + return new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "Old index with a compatibility version < 9.0 has been ignored", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", + "This read-only index has version: " + + currentCompatibilityVersion.toReleaseVersion() + + " and will be supported as read-only in 9.0", + false, + Map.of("reindex_required", true) + ); + } } return null; } - private static boolean isNotDataStreamIndex(IndexMetadata indexMetadata, ClusterState clusterState) { + private boolean isNotDataStreamIndex(IndexMetadata indexMetadata, ClusterState clusterState) { return clusterState.metadata().findDataStreams(indexMetadata.getIndex().getName()).isEmpty(); } - static DeprecationIssue translogRetentionSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + private DeprecationIssue translogRetentionSettingCheck( + IndexMetadata indexMetadata, + ClusterState clusterState, + Map> ignored + ) { final boolean softDeletesEnabled = IndexSettings.INDEX_SOFT_DELETES_SETTING.get(indexMetadata.getSettings()); if (softDeletesEnabled) { if (IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.exists(indexMetadata.getSettings()) @@ -144,7 +207,7 @@ static DeprecationIssue translogRetentionSettingCheck(IndexMetadata indexMetadat return null; } - static DeprecationIssue checkIndexDataPath(IndexMetadata indexMetadata, ClusterState clusterState) { + private DeprecationIssue checkIndexDataPath(IndexMetadata indexMetadata, ClusterState clusterState, Map> ignored) { if (IndexMetadata.INDEX_DATA_PATH_SETTING.exists(indexMetadata.getSettings())) { final String message = String.format( Locale.ROOT, @@ -159,7 +222,11 @@ static DeprecationIssue checkIndexDataPath(IndexMetadata indexMetadata, ClusterS return null; } - static DeprecationIssue storeTypeSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + private DeprecationIssue storeTypeSettingCheck( + IndexMetadata indexMetadata, + ClusterState clusterState, + Map> ignored + ) { final String storeType = IndexModule.INDEX_STORE_TYPE_SETTING.get(indexMetadata.getSettings()); if (IndexModule.Type.SIMPLEFS.match(storeType)) { return new DeprecationIssue( @@ -176,25 +243,11 @@ static DeprecationIssue storeTypeSettingCheck(IndexMetadata indexMetadata, Clust return null; } - static DeprecationIssue frozenIndexSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { - Boolean isIndexFrozen = FrozenEngine.INDEX_FROZEN.get(indexMetadata.getSettings()); - if (Boolean.TRUE.equals(isIndexFrozen)) { - String indexName = indexMetadata.getIndex().getName(); - return new DeprecationIssue( - DeprecationIssue.Level.WARNING, - "index [" - + indexName - + "] is a frozen index. The frozen indices feature is deprecated and will be removed in a future version", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/frozen-indices.html", - "Frozen indices no longer offer any advantages. Consider cold or frozen tiers in place of frozen indices.", - false, - null - ); - } - return null; - } - - static DeprecationIssue legacyRoutingSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + private DeprecationIssue legacyRoutingSettingCheck( + IndexMetadata indexMetadata, + ClusterState clusterState, + Map> ignored + ) { List deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(indexMetadata.getSettings()); if (deprecatedSettings.isEmpty()) { return null; @@ -210,7 +263,7 @@ static DeprecationIssue legacyRoutingSettingCheck(IndexMetadata indexMetadata, C ); } - private static void fieldLevelMappingIssue(IndexMetadata indexMetadata, BiConsumer> checker) { + private void fieldLevelMappingIssue(IndexMetadata indexMetadata, BiConsumer> checker) { if (indexMetadata.mapping() != null) { Map sourceAsMap = indexMetadata.mapping().sourceAsMap(); checker.accept(indexMetadata.mapping(), sourceAsMap); @@ -228,7 +281,7 @@ private static void fieldLevelMappingIssue(IndexMetadata indexMetadata, BiConsum * @return a list of issues found in fields */ @SuppressWarnings("unchecked") - static List findInPropertiesRecursively( + private List findInPropertiesRecursively( String type, Map parentMap, Function, Boolean> predicate, @@ -282,7 +335,11 @@ static List findInPropertiesRecursively( return issues; } - static DeprecationIssue deprecatedCamelCasePattern(IndexMetadata indexMetadata, ClusterState clusterState) { + private DeprecationIssue deprecatedCamelCasePattern( + IndexMetadata indexMetadata, + ClusterState clusterState, + Map> ignored + ) { List fields = new ArrayList<>(); fieldLevelMappingIssue( indexMetadata, @@ -290,8 +347,8 @@ static DeprecationIssue deprecatedCamelCasePattern(IndexMetadata indexMetadata, findInPropertiesRecursively( mappingMetadata.type(), sourceAsMap, - IndexDeprecationChecker::isDateFieldWithCamelCasePattern, - IndexDeprecationChecker::changeFormatToSnakeCase, + this::isDateFieldWithCamelCasePattern, + this::changeFormatToSnakeCase, "", "" ) @@ -312,7 +369,7 @@ static DeprecationIssue deprecatedCamelCasePattern(IndexMetadata indexMetadata, return null; } - private static boolean isDateFieldWithCamelCasePattern(Map property) { + private boolean isDateFieldWithCamelCasePattern(Map property) { if ("date".equals(property.get("type")) && property.containsKey("format")) { String[] patterns = DateFormatter.splitCombinedPatterns((String) property.get("format")); for (String pattern : patterns) { @@ -323,7 +380,7 @@ private static boolean isDateFieldWithCamelCasePattern(Map property) { return false; } - private static String changeFormatToSnakeCase(String type, Map.Entry entry) { + private String changeFormatToSnakeCase(String type, Map.Entry entry) { Map value = (Map) entry.getValue(); final String formatFieldValue = (String) value.get("format"); String[] patterns = DateFormatter.splitCombinedPatterns(formatFieldValue); @@ -339,4 +396,14 @@ private static String changeFormatToSnakeCase(String type, Map.Entry entry sb.deleteCharAt(sb.length() - 1); return sb.toString(); } + + private Map> indexToTransformIds(List transformConfigs) { + return transformConfigs.stream() + .collect( + Collectors.groupingBy( + config -> config.getDestination().getIndex(), + Collectors.mapping(TransformConfig::getId, Collectors.toList()) + ) + ); + } } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/LegacyIndexTemplateDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/LegacyIndexTemplateDeprecationChecker.java deleted file mode 100644 index f7aba6491dfd2..0000000000000 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/LegacyIndexTemplateDeprecationChecker.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.deprecation; - -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; -import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.function.Function; - -import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks; -import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_COMMON_DETAIL; -import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_HELP_URL; -import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_MESSAGE; - -/** - * Checks the legacy index templates for deprecation warnings. - */ -public class LegacyIndexTemplateDeprecationChecker implements ResourceDeprecationChecker { - - public static final String NAME = "legacy_templates"; - private static final List> CHECKS = List.of( - LegacyIndexTemplateDeprecationChecker::checkIndexTemplates - ); - - /** - * @param clusterState The cluster state provided for the checker - * @return the name of the data streams that have violated the checks with their respective warnings. - */ - @Override - public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { - var templates = clusterState.metadata().templates().entrySet(); - if (templates.isEmpty()) { - return Map.of(); - } - Map> issues = new HashMap<>(); - for (Map.Entry entry : templates) { - String name = entry.getKey(); - IndexTemplateMetadata template = entry.getValue(); - - List issuesForSingleIndexTemplate = filterChecks(CHECKS, c -> c.apply(template)); - if (issuesForSingleIndexTemplate.isEmpty() == false) { - issues.put(name, issuesForSingleIndexTemplate); - } - } - return issues.isEmpty() ? Map.of() : issues; - } - - static DeprecationIssue checkIndexTemplates(IndexTemplateMetadata indexTemplateMetadata) { - List deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(indexTemplateMetadata.settings()); - if (deprecatedSettings.isEmpty()) { - return null; - } - return new DeprecationIssue( - DeprecationIssue.Level.WARNING, - DEPRECATION_MESSAGE, - DEPRECATION_HELP_URL, - "One or more of your legacy index templates is configured with 'index.routing.allocation.*.data' settings. " - + DEPRECATION_COMMON_DETAIL, - false, - DeprecationIssue.createMetaMapForRemovableSettings(deprecatedSettings) - ); - } - - @Override - public String getName() { - return NAME; - } -} diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecker.java new file mode 100644 index 0000000000000..a2e9ed12a2298 --- /dev/null +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecker.java @@ -0,0 +1,138 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.deprecation; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.ThreadedActionListener; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Retrieves the individual node checks and reduces them to a list of deprecation warnings + */ +public class NodeDeprecationChecker { + + private static final Logger logger = LogManager.getLogger(NodeDeprecationChecker.class); + private final ThreadPool threadPool; + + public NodeDeprecationChecker(ThreadPool threadPool) { + this.threadPool = threadPool; + } + + public void check(Client client, ActionListener> listener) { + NodesDeprecationCheckRequest nodeDepReq = new NodesDeprecationCheckRequest("_all"); + ClientHelper.executeAsyncWithOrigin( + client, + ClientHelper.DEPRECATION_ORIGIN, + NodesDeprecationCheckAction.INSTANCE, + nodeDepReq, + new ThreadedActionListener<>(threadPool.generic(), listener.delegateFailureAndWrap((l, response) -> { + if (response.hasFailures()) { + List failedNodeIds = response.failures() + .stream() + .map(failure -> failure.nodeId() + ": " + failure.getMessage()) + .collect(Collectors.toList()); + logger.warn("nodes failed to run deprecation checks: {}", failedNodeIds); + for (FailedNodeException failure : response.failures()) { + logger.debug("node {} failed to run deprecation checks: {}", failure.nodeId(), failure); + } + } + l.onResponse(reduceToDeprecationIssues(response)); + })) + ); + } + + /** + * This method rolls up DeprecationIssues that are identical but on different nodes. It also rolls up DeprecationIssues that are + * identical (and on different nodes) except that they differ in the removable settings listed in their meta object. We roll these up + * by taking the intersection of all removable settings in otherwise identical DeprecationIssues. That way we don't claim that a + * setting can be automatically removed if any node has it in its elasticsearch.yml. + * @param response the response that contains the deprecation issues of single nodes + * @return a list of deprecation issues grouped accordingly. + */ + static List reduceToDeprecationIssues(NodesDeprecationCheckResponse response) { + // A collection whose values are lists of DeprecationIssues that differ only by meta values (if that): + Collection>> issuesToMerge = getDeprecationIssuesThatDifferOnlyByMeta(response.getNodes()); + // A map of DeprecationIssues (containing only the intersection of removable settings) to the nodes they are seen on + Map> issueToListOfNodesMap = getMergedIssuesToNodesMap(issuesToMerge); + + return issueToListOfNodesMap.entrySet().stream().map(entry -> { + DeprecationIssue issue = entry.getKey(); + String details = issue.getDetails() != null ? issue.getDetails() + " " : ""; + return new DeprecationIssue( + issue.getLevel(), + issue.getMessage(), + issue.getUrl(), + details + "(nodes impacted: " + entry.getValue() + ")", + issue.isResolveDuringRollingUpgrade(), + issue.getMeta() + ); + }).collect(Collectors.toList()); + } + + /* + * This method pulls all the DeprecationIssues from the given nodeResponses, and buckets them into lists of DeprecationIssues that + * differ at most by meta values (if that). The returned tuples also contain the node name the deprecation issue was found on. If all + * nodes in the cluster were configured identically then all tuples in a list will differ only by the node name. + */ + private static Collection>> getDeprecationIssuesThatDifferOnlyByMeta( + List nodeResponses + ) { + Map>> issuesToMerge = new HashMap<>(); + for (NodesDeprecationCheckAction.NodeResponse resp : nodeResponses) { + for (DeprecationIssue issue : resp.getDeprecationIssues()) { + issuesToMerge.computeIfAbsent( + new DeprecationIssue( + issue.getLevel(), + issue.getMessage(), + issue.getUrl(), + issue.getDetails(), + issue.isResolveDuringRollingUpgrade(), + null // Intentionally removing meta from the key so that it's not taken into account for equality + ), + (key) -> new ArrayList<>() + ).add(new Tuple<>(issue, resp.getNode().getName())); + } + } + return issuesToMerge.values(); + } + + /* + * At this point we have one DeprecationIssue per node for a given deprecation. This method rolls them up into a single DeprecationIssue + * with a list of nodes that they appear on. If two DeprecationIssues on two different nodes differ only by the set of removable + * settings (i.e. they have different elasticsearch.yml configurations) then this method takes the intersection of those settings when + * it rolls them up. + */ + private static Map> getMergedIssuesToNodesMap( + Collection>> issuesToMerge + ) { + Map> issueToListOfNodesMap = new HashMap<>(); + for (List> similarIssues : issuesToMerge) { + DeprecationIssue leastCommonDenominator = DeprecationIssue.getIntersectionOfRemovableSettings( + similarIssues.stream().map(Tuple::v1).toList() + ); + issueToListOfNodesMap.computeIfAbsent(leastCommonDenominator, (key) -> new ArrayList<>()) + .addAll(similarIssues.stream().map(Tuple::v2).toList()); + } + return issueToListOfNodesMap; + } + +} diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java index b6fff5a82f0cd..2f476d111f4ba 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java @@ -41,6 +41,60 @@ public class NodeDeprecationChecks { + // Visible for testing + static final List< + NodeDeprecationCheck> SINGLE_NODE_CHECKS = List.of( + NodeDeprecationChecks::checkMultipleDataPaths, + NodeDeprecationChecks::checkDataPathsList, + NodeDeprecationChecks::checkSharedDataPathSetting, + NodeDeprecationChecks::checkReservedPrefixedRealmNames, + NodeDeprecationChecks::checkExporterUseIngestPipelineSettings, + NodeDeprecationChecks::checkExporterPipelineMasterTimeoutSetting, + NodeDeprecationChecks::checkExporterCreateLegacyTemplateSetting, + NodeDeprecationChecks::checkMonitoringSettingHistoryDuration, + NodeDeprecationChecks::checkMonitoringSettingHistoryDuration, + NodeDeprecationChecks::checkMonitoringSettingCollectIndexRecovery, + NodeDeprecationChecks::checkMonitoringSettingCollectIndices, + NodeDeprecationChecks::checkMonitoringSettingCollectCcrTimeout, + NodeDeprecationChecks::checkMonitoringSettingCollectEnrichStatsTimeout, + NodeDeprecationChecks::checkMonitoringSettingCollectIndexRecoveryStatsTimeout, + NodeDeprecationChecks::checkMonitoringSettingCollectIndexStatsTimeout, + NodeDeprecationChecks::checkMonitoringSettingCollectMlJobStatsTimeout, + NodeDeprecationChecks::checkMonitoringSettingCollectNodeStatsTimeout, + NodeDeprecationChecks::checkMonitoringSettingCollectClusterStatsTimeout, + NodeDeprecationChecks::checkMonitoringSettingExportersHost, + NodeDeprecationChecks::checkMonitoringSettingExportersBulkTimeout, + NodeDeprecationChecks::checkMonitoringSettingExportersConnectionTimeout, + NodeDeprecationChecks::checkMonitoringSettingExportersConnectionReadTimeout, + NodeDeprecationChecks::checkMonitoringSettingExportersAuthUsername, + NodeDeprecationChecks::checkMonitoringSettingExportersAuthPass, + NodeDeprecationChecks::checkMonitoringSettingExportersSSL, + NodeDeprecationChecks::checkMonitoringSettingExportersProxyBase, + NodeDeprecationChecks::checkMonitoringSettingExportersSniffEnabled, + NodeDeprecationChecks::checkMonitoringSettingExportersHeaders, + NodeDeprecationChecks::checkMonitoringSettingExportersTemplateTimeout, + NodeDeprecationChecks::checkMonitoringSettingExportersMasterTimeout, + NodeDeprecationChecks::checkMonitoringSettingExportersEnabled, + NodeDeprecationChecks::checkMonitoringSettingExportersType, + NodeDeprecationChecks::checkMonitoringSettingExportersAlertsEnabled, + NodeDeprecationChecks::checkMonitoringSettingExportersAlertsBlacklist, + NodeDeprecationChecks::checkMonitoringSettingExportersIndexNameTimeFormat, + NodeDeprecationChecks::checkMonitoringSettingDecommissionAlerts, + NodeDeprecationChecks::checkMonitoringSettingEsCollectionEnabled, + NodeDeprecationChecks::checkMonitoringSettingCollectionEnabled, + NodeDeprecationChecks::checkMonitoringSettingCollectionInterval, + NodeDeprecationChecks::checkScriptContextCache, + NodeDeprecationChecks::checkScriptContextCompilationsRateLimitSetting, + NodeDeprecationChecks::checkScriptContextCacheSizeSetting, + NodeDeprecationChecks::checkScriptContextCacheExpirationSetting, + NodeDeprecationChecks::checkEnforceDefaultTierPreferenceSetting, + NodeDeprecationChecks::checkLifecyleStepMasterTimeoutSetting, + NodeDeprecationChecks::checkEqlEnabledSetting, + NodeDeprecationChecks::checkNodeAttrData, + NodeDeprecationChecks::checkWatcherBulkConcurrentRequestsSetting, + NodeDeprecationChecks::checkTracingApmSettings + ); + static DeprecationIssue checkDeprecatedSetting( final Settings clusterSettings, final Settings nodeSettings, @@ -77,15 +131,6 @@ private static Map createMetaMapForRemovableSettings(boolean can return canAutoRemoveSetting ? DeprecationIssue.createMetaMapForRemovableSettings(removableSettings) : null; } - static DeprecationIssue checkRemovedSetting( - final Settings clusterSettings, - final Settings nodeSettings, - final Setting removedSetting, - final String url - ) { - return checkRemovedSetting(clusterSettings, nodeSettings, removedSetting, url, null, DeprecationIssue.Level.CRITICAL); - } - static DeprecationIssue checkRemovedSetting( final Settings clusterSettings, final Settings nodeSettings, @@ -1012,4 +1057,9 @@ static DeprecationIssue checkTracingApmSettings( DeprecationIssue.Level.CRITICAL ); } + + @FunctionalInterface + public interface NodeDeprecationCheck { + R apply(A first, B second, C third, D fourth); + } } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ResourceDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ResourceDeprecationChecker.java index 71b9903f69f86..daa3514e3b989 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ResourceDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ResourceDeprecationChecker.java @@ -24,8 +24,14 @@ public interface ResourceDeprecationChecker { * This runs the checks for the current deprecation checker. * * @param clusterState The cluster state provided for the checker + * @param request The deprecation request that triggered this check + * @param precomputedData Data that have been remotely retrieved and might be useful in the checks */ - Map> check(ClusterState clusterState, DeprecationInfoAction.Request request); + Map> check( + ClusterState clusterState, + DeprecationInfoAction.Request request, + TransportDeprecationInfoAction.PrecomputedData precomputedData + ); /** * @return The name of the checker diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationChecker.java index 5a451a9613797..ff0ff982d11bf 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationChecker.java @@ -19,9 +19,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.function.Function; -import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks; import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_COMMON_DETAIL; import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_HELP_URL; import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_MESSAGE; @@ -32,20 +32,34 @@ public class TemplateDeprecationChecker implements ResourceDeprecationChecker { public static final String NAME = "templates"; - private static final List> INDEX_TEMPLATE_CHECKS = List.of( - TemplateDeprecationChecker::checkLegacyTiersInIndexTemplate + private final List> indexTemplateChecks = List.of( + this::checkLegacyTiersInIndexTemplate ); - private static final List> COMPONENT_TEMPLATE_CHECKS = List.of( - TemplateDeprecationChecker::checkSourceModeInComponentTemplates, - TemplateDeprecationChecker::checkLegacyTiersInComponentTemplates + private final List> componentTemplateChecks = List.of( + this::checkSourceModeInComponentTemplates, + this::checkLegacyTiersInComponentTemplates ); /** * @param clusterState The cluster state provided for the checker + * @param request not used yet in these checks + * @param precomputedData not used yet in these checks * @return the name of the data streams that have violated the checks with their respective warnings. */ @Override - public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { + public Map> check( + ClusterState clusterState, + DeprecationInfoAction.Request request, + TransportDeprecationInfoAction.PrecomputedData precomputedData + ) { + return check(clusterState); + } + + /** + * @param clusterState The cluster state provided for the checker + * @return the name of the data streams that have violated the checks with their respective warnings. + */ + Map> check(ClusterState clusterState) { var indexTemplates = clusterState.metadata().templatesV2().entrySet(); var componentTemplates = clusterState.metadata().componentTemplates().entrySet(); if (indexTemplates.isEmpty() && componentTemplates.isEmpty()) { @@ -56,7 +70,10 @@ public Map> check(ClusterState clusterState, Depr String name = entry.getKey(); ComposableIndexTemplate template = entry.getValue(); - List issuesForSingleIndexTemplate = filterChecks(INDEX_TEMPLATE_CHECKS, c -> c.apply(template)); + List issuesForSingleIndexTemplate = indexTemplateChecks.stream() + .map(c -> c.apply(template)) + .filter(Objects::nonNull) + .toList(); if (issuesForSingleIndexTemplate.isEmpty() == false) { issues.computeIfAbsent(name, ignored -> new ArrayList<>()).addAll(issuesForSingleIndexTemplate); } @@ -65,7 +82,10 @@ public Map> check(ClusterState clusterState, Depr String name = entry.getKey(); ComponentTemplate template = entry.getValue(); - List issuesForSingleIndexTemplate = filterChecks(COMPONENT_TEMPLATE_CHECKS, c -> c.apply(template)); + List issuesForSingleIndexTemplate = componentTemplateChecks.stream() + .map(c -> c.apply(template)) + .filter(Objects::nonNull) + .toList(); if (issuesForSingleIndexTemplate.isEmpty() == false) { issues.computeIfAbsent(name, ignored -> new ArrayList<>()).addAll(issuesForSingleIndexTemplate); } @@ -73,7 +93,7 @@ public Map> check(ClusterState clusterState, Depr return issues.isEmpty() ? Map.of() : issues; } - static DeprecationIssue checkLegacyTiersInIndexTemplate(ComposableIndexTemplate composableIndexTemplate) { + private DeprecationIssue checkLegacyTiersInIndexTemplate(ComposableIndexTemplate composableIndexTemplate) { Template template = composableIndexTemplate.template(); if (template != null) { List deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(template.settings()); @@ -93,7 +113,7 @@ static DeprecationIssue checkLegacyTiersInIndexTemplate(ComposableIndexTemplate return null; } - static DeprecationIssue checkSourceModeInComponentTemplates(ComponentTemplate template) { + private DeprecationIssue checkSourceModeInComponentTemplates(ComponentTemplate template) { if (template.template().mappings() != null) { var sourceAsMap = (Map) XContentHelper.convertToMap(template.template().mappings().uncompressed(), true).v2().get("_doc"); if (sourceAsMap != null) { @@ -102,9 +122,9 @@ static DeprecationIssue checkSourceModeInComponentTemplates(ComponentTemplate te if (sourceMap.containsKey("mode")) { return new DeprecationIssue( DeprecationIssue.Level.CRITICAL, + SourceFieldMapper.DEPRECATION_WARNING_TITLE, + "https://ela.st/migrate-source-mode", SourceFieldMapper.DEPRECATION_WARNING, - "https://github.com/elastic/elasticsearch/pull/117172", - null, false, null ); @@ -115,7 +135,7 @@ static DeprecationIssue checkSourceModeInComponentTemplates(ComponentTemplate te return null; } - static DeprecationIssue checkLegacyTiersInComponentTemplates(ComponentTemplate componentTemplate) { + private DeprecationIssue checkLegacyTiersInComponentTemplates(ComponentTemplate componentTemplate) { Template template = componentTemplate.template(); List deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(template.settings()); if (deprecatedSettings.isEmpty()) { diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java deleted file mode 100644 index 0b5eb7ada7655..0000000000000 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.deprecation; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.xpack.core.action.util.PageParams; -import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; -import org.elasticsearch.xpack.core.transform.action.GetTransformAction; -import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; - -import java.util.ArrayList; -import java.util.List; - -public class TransformDeprecationChecker implements DeprecationChecker { - - public static final String TRANSFORM_DEPRECATION_KEY = "transform_settings"; - - @Override - public boolean enabled(Settings settings) { - // always enabled - return true; - } - - @Override - public void check(Components components, ActionListener deprecationIssueListener) { - - PageParams startPage = new PageParams(0, PageParams.DEFAULT_SIZE); - List issues = new ArrayList<>(); - recursiveGetTransformsAndCollectDeprecations( - components, - issues, - startPage, - deprecationIssueListener.delegateFailureAndWrap((l, allIssues) -> l.onResponse(new CheckResult(getName(), allIssues))) - ); - } - - @Override - public String getName() { - return TRANSFORM_DEPRECATION_KEY; - } - - private static void recursiveGetTransformsAndCollectDeprecations( - Components components, - List issues, - PageParams page, - ActionListener> listener - ) { - final GetTransformAction.Request request = new GetTransformAction.Request(Metadata.ALL); - request.setPageParams(page); - request.setAllowNoResources(true); - - components.client() - .execute(GetTransformAction.INSTANCE, request, listener.delegateFailureAndWrap((delegate, getTransformResponse) -> { - for (TransformConfig config : getTransformResponse.getTransformConfigurations()) { - issues.addAll(config.checkForDeprecations(components.xContentRegistry())); - } - if (getTransformResponse.getTransformConfigurationCount() >= (page.getFrom() + page.getSize())) { - PageParams nextPage = new PageParams(page.getFrom() + page.getSize(), PageParams.DEFAULT_SIZE); - recursiveGetTransformsAndCollectDeprecations(components, issues, nextPage, delegate); - } else { - delegate.onResponse(issues); - } - })); - } -} diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java index 5ff1acf2c0e24..c30d8829c23f3 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java @@ -6,12 +6,11 @@ */ package org.elasticsearch.xpack.deprecation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.GroupedActionListener; +import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.client.internal.OriginSettingClient; @@ -19,35 +18,53 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.ComponentTemplate; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Tuple; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.Transports; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction; +import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; - -import static org.elasticsearch.xpack.deprecation.DeprecationChecks.CLUSTER_SETTINGS_CHECKS; +import java.util.stream.Stream; public class TransportDeprecationInfoAction extends TransportMasterNodeReadAction< DeprecationInfoAction.Request, DeprecationInfoAction.Response> { - private static final List PLUGIN_CHECKERS = List.of(new MlDeprecationChecker(), new TransformDeprecationChecker()); - private static final Logger logger = LogManager.getLogger(TransportDeprecationInfoAction.class); + public static final Setting> SKIP_DEPRECATIONS_SETTING = Setting.stringListSetting( + "deprecation.skip_deprecated_settings", + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + private static final List PLUGIN_CHECKERS = List.of(new MlDeprecationChecker()); private final NodeClient client; private final IndexNameExpressionResolver indexNameExpressionResolver; private final Settings settings; private final NamedXContentRegistry xContentRegistry; private volatile List skipTheseDeprecations; + private final NodeDeprecationChecker nodeDeprecationChecker; + private final ClusterDeprecationChecker clusterDeprecationChecker; private final List resourceDeprecationCheckers; @Inject @@ -75,16 +92,17 @@ public TransportDeprecationInfoAction( this.indexNameExpressionResolver = indexNameExpressionResolver; this.settings = settings; this.xContentRegistry = xContentRegistry; - this.resourceDeprecationCheckers = List.of( + skipTheseDeprecations = SKIP_DEPRECATIONS_SETTING.get(settings); + nodeDeprecationChecker = new NodeDeprecationChecker(threadPool); + clusterDeprecationChecker = new ClusterDeprecationChecker(xContentRegistry); + resourceDeprecationCheckers = List.of( new IndexDeprecationChecker(indexNameExpressionResolver), new DataStreamDeprecationChecker(indexNameExpressionResolver), new TemplateDeprecationChecker(), new IlmPolicyDeprecationChecker() ); - skipTheseDeprecations = DeprecationChecks.SKIP_DEPRECATIONS_SETTING.get(settings); // Safe to register this here because it happens synchronously before the cluster service is started: - clusterService.getClusterSettings() - .addSettingsUpdateConsumer(DeprecationChecks.SKIP_DEPRECATIONS_SETTING, this::setSkipDeprecations); + clusterService.getClusterSettings().addSettingsUpdateConsumer(SKIP_DEPRECATIONS_SETTING, this::setSkipDeprecations); } private void setSkipDeprecations(List skipDeprecations) { @@ -104,50 +122,222 @@ protected final void masterOperation( ClusterState state, final ActionListener listener ) { - NodesDeprecationCheckRequest nodeDepReq = new NodesDeprecationCheckRequest("_all"); - ClientHelper.executeAsyncWithOrigin( - client, - ClientHelper.DEPRECATION_ORIGIN, - NodesDeprecationCheckAction.INSTANCE, - nodeDepReq, - listener.delegateFailureAndWrap((delegate, response) -> { - if (response.hasFailures()) { - List failedNodeIds = response.failures() - .stream() - .map(failure -> failure.nodeId() + ": " + failure.getMessage()) - .collect(Collectors.toList()); - logger.warn("nodes failed to run deprecation checks: {}", failedNodeIds); - for (FailedNodeException failure : response.failures()) { - logger.debug("node {} failed to run deprecation checks: {}", failure.nodeId(), failure); - } - } + PrecomputedData precomputedData = new PrecomputedData(); + try (var refs = new RefCountingListener(checkAndCreateResponse(state, request, precomputedData, listener))) { + nodeDeprecationChecker.check(client, refs.acquire(precomputedData::setOnceNodeSettingsIssues)); + transformConfigs(refs.acquire(precomputedData::setOnceTransformConfigs)); + DeprecationChecker.Components components = new DeprecationChecker.Components( + xContentRegistry, + settings, + new OriginSettingClient(client, ClientHelper.DEPRECATION_ORIGIN) + ); + pluginSettingIssues(PLUGIN_CHECKERS, components, refs.acquire(precomputedData::setOncePluginIssues)); + } + } - DeprecationChecker.Components components = new DeprecationChecker.Components( - xContentRegistry, - settings, - new OriginSettingClient(client, ClientHelper.DEPRECATION_ORIGIN) - ); - pluginSettingIssues( - PLUGIN_CHECKERS, - components, - new ThreadedActionListener<>( - client.threadPool().generic(), - delegate.map( - deprecationIssues -> DeprecationInfoAction.Response.from( - state, - indexNameExpressionResolver, - request, - response, - CLUSTER_SETTINGS_CHECKS, - deprecationIssues, - skipTheseDeprecations, - resourceDeprecationCheckers - ) - ) + /** + * This is the function that does the bulk of the logic of combining the necessary dependencies together, including the cluster state, + * the precalculated information in {@code context} with the remaining checkers such as the cluster setting checker and the resource + * checkers.This function will run a significant part of the checks and build out the final list of issues that exist in the + * cluster. Because of that, it's important that it does not run in the transport thread that's why it's combined with + * {@link #executeInGenericThreadpool(ActionListener)}. + * + * @param state The cluster state + * @param request The originating request containing the index expressions to evaluate + * @param precomputedData Data from remote requests necessary to construct the response + * @param responseListener The listener expecting the {@link DeprecationInfoAction.Response} + * @return The listener that should be executed after all the remote requests have completed and the {@link PrecomputedData} + * is initialised. + */ + public ActionListener checkAndCreateResponse( + ClusterState state, + DeprecationInfoAction.Request request, + PrecomputedData precomputedData, + ActionListener responseListener + ) { + return executeInGenericThreadpool( + ActionListener.running( + () -> responseListener.onResponse( + checkAndCreateResponse( + state, + indexNameExpressionResolver, + request, + skipTheseDeprecations, + clusterDeprecationChecker, + resourceDeprecationCheckers, + precomputedData ) - ); - }) + ) + ) + ); + } + + /** + * This is the function that does the bulk of the logic of combining the necessary dependencies together, including the cluster state, + * the precalculated information in {@code context} with the remaining checkers such as the cluster setting checker and the resource + * checkers.This function will run a significant part of the checks and build out the final list of issues that exist in the + * cluster. It's important that it does not run in the transport thread that's why it's combined with + * {@link #checkAndCreateResponse(ClusterState, DeprecationInfoAction.Request, PrecomputedData, ActionListener)}. We keep this separated + * for testing purposes. + * + * @param state The cluster state + * @param indexNameExpressionResolver Used to resolve indices into their concrete names + * @param request The originating request containing the index expressions to evaluate + * @param skipTheseDeprecatedSettings the settings that will be removed from cluster metadata and the index metadata of all the + * indexes specified by indexNames + * @param clusterDeprecationChecker The checker that provides the cluster settings deprecations warnings + * @param resourceDeprecationCheckers these are checkers that take as input the cluster state and return a map from resource type + * to issues grouped by the resource name. + * @param precomputedData data from remote requests necessary to construct the response + * @return The list of deprecation issues found in the cluster + */ + static DeprecationInfoAction.Response checkAndCreateResponse( + ClusterState state, + IndexNameExpressionResolver indexNameExpressionResolver, + DeprecationInfoAction.Request request, + List skipTheseDeprecatedSettings, + ClusterDeprecationChecker clusterDeprecationChecker, + List resourceDeprecationCheckers, + PrecomputedData precomputedData + ) { + assert Transports.assertNotTransportThread("walking mappings in indexSettingsChecks is expensive"); + // Allow system index access here to prevent deprecation warnings when we call this API + String[] concreteIndexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + ClusterState stateWithSkippedSettingsRemoved = removeSkippedSettings(state, concreteIndexNames, skipTheseDeprecatedSettings); + List clusterSettingsIssues = clusterDeprecationChecker.check( + stateWithSkippedSettingsRemoved, + precomputedData.transformConfigs() + ); + + Map>> resourceDeprecationIssues = new HashMap<>(); + for (ResourceDeprecationChecker resourceDeprecationChecker : resourceDeprecationCheckers) { + Map> issues = resourceDeprecationChecker.check( + stateWithSkippedSettingsRemoved, + request, + precomputedData + ); + if (issues.isEmpty() == false) { + resourceDeprecationIssues.put(resourceDeprecationChecker.getName(), issues); + } + } + + return new DeprecationInfoAction.Response( + clusterSettingsIssues, + precomputedData.nodeSettingsIssues(), + resourceDeprecationIssues, + precomputedData.pluginIssues() + ); + } + + /** + * This class holds the results of remote requests. These can be either checks that require remote requests such as + * {@code nodeSettingsIssues} and {@code pluginIssues} or metadata needed for more than one types of checks such as + * {@code transformConfigs}. + */ + public static class PrecomputedData { + private final SetOnce> nodeSettingsIssues = new SetOnce<>(); + private final SetOnce>> pluginIssues = new SetOnce<>(); + private final SetOnce> transformConfigs = new SetOnce<>(); + + public void setOnceNodeSettingsIssues(List nodeSettingsIssues) { + this.nodeSettingsIssues.set(nodeSettingsIssues); + } + + public void setOncePluginIssues(Map> pluginIssues) { + this.pluginIssues.set(pluginIssues); + } + + public void setOnceTransformConfigs(List transformConfigs) { + this.transformConfigs.set(transformConfigs); + } + + public List nodeSettingsIssues() { + return nodeSettingsIssues.get(); + } + + public Map> pluginIssues() { + return pluginIssues.get(); + } + + public List transformConfigs() { + return transformConfigs.get(); + } + } + + /** + * Removes the skipped settings from the selected indices and the component and index templates. + * @param state The cluster state to modify + * @param indexNames The names of the indexes whose settings need to be filtered + * @param skipTheseDeprecatedSettings The settings that will be removed from cluster metadata and the index metadata of all the + * indexes specified by indexNames + * @return A modified cluster state with the given settings removed + */ + private static ClusterState removeSkippedSettings(ClusterState state, String[] indexNames, List skipTheseDeprecatedSettings) { + // Short-circuit, no need to reconstruct the cluster state if there are no settings to remove + if (skipTheseDeprecatedSettings == null || skipTheseDeprecatedSettings.isEmpty()) { + return state; + } + ClusterState.Builder clusterStateBuilder = new ClusterState.Builder(state); + Metadata.Builder metadataBuilder = Metadata.builder(state.metadata()); + metadataBuilder.transientSettings( + metadataBuilder.transientSettings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false) + ); + metadataBuilder.persistentSettings( + metadataBuilder.persistentSettings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false) ); + Map indicesBuilder = new HashMap<>(state.getMetadata().indices()); + for (String indexName : indexNames) { + IndexMetadata indexMetadata = state.getMetadata().index(indexName); + IndexMetadata.Builder filteredIndexMetadataBuilder = new IndexMetadata.Builder(indexMetadata); + Settings filteredSettings = indexMetadata.getSettings() + .filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false); + filteredIndexMetadataBuilder.settings(filteredSettings); + indicesBuilder.put(indexName, filteredIndexMetadataBuilder.build()); + } + metadataBuilder.componentTemplates(state.metadata().componentTemplates().entrySet().stream().map(entry -> { + String templateName = entry.getKey(); + ComponentTemplate componentTemplate = entry.getValue(); + Template template = componentTemplate.template(); + if (template.settings() == null || template.settings().isEmpty()) { + return Tuple.tuple(templateName, componentTemplate); + } + return Tuple.tuple( + templateName, + new ComponentTemplate( + Template.builder(template) + .settings(template.settings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false)) + .build(), + componentTemplate.version(), + componentTemplate.metadata(), + componentTemplate.deprecated() + ) + ); + }).collect(Collectors.toMap(Tuple::v1, Tuple::v2))); + metadataBuilder.indexTemplates(state.metadata().templatesV2().entrySet().stream().map(entry -> { + String templateName = entry.getKey(); + ComposableIndexTemplate indexTemplate = entry.getValue(); + Template template = indexTemplate.template(); + if (template == null || template.settings() == null || template.settings().isEmpty()) { + return Tuple.tuple(templateName, indexTemplate); + } + return Tuple.tuple( + templateName, + indexTemplate.toBuilder() + .template( + Template.builder(indexTemplate.template()) + .settings( + indexTemplate.template() + .settings() + .filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false) + ) + ) + .build() + ); + }).collect(Collectors.toMap(Tuple::v1, Tuple::v2))); + + metadataBuilder.indices(indicesBuilder); + clusterStateBuilder.metadata(metadataBuilder); + return clusterStateBuilder.build(); } static void pluginSettingIssues( @@ -176,4 +366,33 @@ static void pluginSettingIssues( } } + private void transformConfigs(ActionListener> transformConfigsListener) { + transformConfigs(new PageParams(0, PageParams.DEFAULT_SIZE), transformConfigsListener.map(Stream::toList)); + } + + private void transformConfigs(PageParams currentPage, ActionListener> currentPageListener) { + var request = new GetTransformAction.Request(Metadata.ALL); + request.setPageParams(currentPage); + request.setAllowNoResources(true); + + client.execute( + GetTransformAction.INSTANCE, + request, + executeInGenericThreadpool(currentPageListener.delegateFailureAndWrap((delegate, getTransformConfigResponse) -> { + var currentPageOfConfigs = getTransformConfigResponse.getTransformConfigurations().stream(); + var currentPageSize = currentPage.getFrom() + currentPage.getSize(); + var totalTransformConfigCount = getTransformConfigResponse.getTransformConfigurationCount(); + if (totalTransformConfigCount >= currentPageSize) { + var nextPage = new PageParams(currentPageSize, PageParams.DEFAULT_SIZE); + transformConfigs(nextPage, delegate.map(nextPageOfConfigs -> Stream.concat(currentPageOfConfigs, nextPageOfConfigs))); + } else { + delegate.onResponse(currentPageOfConfigs); + } + })) + ); + } + + private ActionListener executeInGenericThreadpool(ActionListener listener) { + return new ThreadedActionListener<>(threadPool.generic(), listener); + } } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckAction.java index 745f5e7ae8959..befe0bd6b41a4 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckAction.java @@ -36,6 +36,7 @@ import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Objects; import static org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING; @@ -75,10 +76,10 @@ public TransportNodeDeprecationCheckAction( this.pluginsService = pluginsService; this.licenseState = licenseState; this.clusterInfoService = clusterInfoService; - skipTheseDeprecations = DeprecationChecks.SKIP_DEPRECATIONS_SETTING.get(settings); + skipTheseDeprecations = TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING.get(settings); // Safe to register this here because it happens synchronously before the cluster service is started: clusterService.getClusterSettings() - .addSettingsUpdateConsumer(DeprecationChecks.SKIP_DEPRECATIONS_SETTING, this::setSkipDeprecations); + .addSettingsUpdateConsumer(TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING, this::setSkipDeprecations); } private void setSkipDeprecations(List skipDeprecations) { @@ -106,13 +107,13 @@ protected NodesDeprecationCheckAction.NodeResponse newNodeResponse(StreamInput i @Override protected NodesDeprecationCheckAction.NodeResponse nodeOperation(NodesDeprecationCheckAction.NodeRequest request, Task task) { - return nodeOperation(request, DeprecationChecks.NODE_SETTINGS_CHECKS); + return nodeOperation(request, NodeDeprecationChecks.SINGLE_NODE_CHECKS); } NodesDeprecationCheckAction.NodeResponse nodeOperation( NodesDeprecationCheckAction.NodeRequest request, List< - DeprecationChecks.NodeDeprecationCheck< + NodeDeprecationChecks.NodeDeprecationCheck< Settings, PluginsAndModules, ClusterState, @@ -130,10 +131,10 @@ NodesDeprecationCheckAction.NodeResponse nodeOperation( .metadata(Metadata.builder(metadata).transientSettings(transientSettings).persistentSettings(persistentSettings).build()) .build(); - List issues = DeprecationInfoAction.filterChecks( - nodeSettingsChecks, - (c) -> c.apply(filteredNodeSettings, pluginsService.info(), filteredClusterState, licenseState) - ); + List issues = nodeSettingsChecks.stream() + .map(c -> c.apply(filteredNodeSettings, pluginsService.info(), filteredClusterState, licenseState)) + .filter(Objects::nonNull) + .toList(); DeprecationIssue watermarkIssue = checkDiskLowWatermark( filteredNodeSettings, filteredClusterState.metadata().settings(), diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java index e3c205ff8c740..a8dd1d464e30c 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java @@ -69,29 +69,21 @@ public void testOldIndicesCheck() { ); // We know that the data stream checks ignore the request. - Map> issuesByDataStream = checker.check(clusterState, null); + Map> issuesByDataStream = checker.check(clusterState); assertThat(issuesByDataStream.size(), equalTo(1)); assertThat(issuesByDataStream.containsKey(dataStream.getName()), equalTo(true)); assertThat(issuesByDataStream.get(dataStream.getName()), equalTo(List.of(expected))); } - public void testOldIndicesCheckWithOnlyClosedOrNewIndices() { + public void testOldIndicesCheckWithOnlyNewIndices() { // This tests what happens when any old indices that we have are closed. We expect no deprecation warning. - int oldClosedIndexCount = randomIntBetween(1, 100); int newOpenIndexCount = randomIntBetween(0, 100); int newClosedIndexCount = randomIntBetween(0, 100); Map nameToIndexMetadata = new HashMap<>(); Set expectedIndices = new HashSet<>(); - DataStream dataStream = createTestDataStream( - 0, - oldClosedIndexCount, - newOpenIndexCount, - newClosedIndexCount, - nameToIndexMetadata, - expectedIndices - ); + DataStream dataStream = createTestDataStream(0, 0, newOpenIndexCount, newClosedIndexCount, nameToIndexMetadata, expectedIndices); Metadata metadata = Metadata.builder() .indices(nameToIndexMetadata) @@ -99,7 +91,7 @@ public void testOldIndicesCheckWithOnlyClosedOrNewIndices() { .build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); - Map> issuesByDataStream = checker.check(clusterState, null); + Map> issuesByDataStream = checker.check(clusterState); assertThat(issuesByDataStream.size(), equalTo(0)); } @@ -145,7 +137,7 @@ public void testOldIndicesCheckWithClosedAndOpenIndices() { ) ); - Map> issuesByDataStream = checker.check(clusterState, null); + Map> issuesByDataStream = checker.check(clusterState); assertThat(issuesByDataStream.containsKey(dataStream.getName()), equalTo(true)); assertThat(issuesByDataStream.get(dataStream.getName()), equalTo(List.of(expected))); } @@ -168,7 +160,7 @@ private DataStream createTestDataStream( allIndices.add(createOldIndex(i, false, nameToIndexMetadata, expectedIndices)); } for (int i = 0; i < oldClosedIndexCount; i++) { - allIndices.add(createOldIndex(i, true, nameToIndexMetadata, null)); + allIndices.add(createOldIndex(i, true, nameToIndexMetadata, expectedIndices)); } for (int i = 0; i < newOpenIndexCount; i++) { allIndices.add(createNewIndex(i, false, nameToIndexMetadata)); @@ -218,7 +210,7 @@ private Index createIndex( ) { Settings.Builder settingsBuilder = isOld ? settings(IndexVersion.fromId(7170099)) : settings(IndexVersion.current()); String indexName = (isOld ? "old-" : "new-") + (isClosed ? "closed-" : "") + "data-stream-index-" + suffix; - if (isOld && isClosed == false) { // we only expect warnings on open old indices + if (isOld) { if (expectedIndices.isEmpty() == false && randomIntBetween(0, 2) == 0) { settingsBuilder.put(INDEX_STORE_TYPE_SETTING.getKey(), SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE); } else { @@ -298,14 +290,14 @@ public void testOldIndicesIgnoredWarningCheck() { + "OK to remain read-only after upgrade", false, ofEntries( - entry("reindex_required", true), + entry("reindex_required", false), entry("total_backing_indices", oldIndexCount + newIndexCount), entry("ignored_indices_requiring_upgrade_count", expectedIndices.size()), entry("ignored_indices_requiring_upgrade", expectedIndices) ) ); - Map> issuesByDataStream = checker.check(clusterState, null); + Map> issuesByDataStream = checker.check(clusterState); assertThat(issuesByDataStream.containsKey(dataStream.getName()), equalTo(true)); assertThat(issuesByDataStream.get(dataStream.getName()), equalTo(List.of(expected))); } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java deleted file mode 100644 index 9a57450b7fad7..0000000000000 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.deprecation; - -import org.elasticsearch.core.Tuple; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; - -import java.util.ArrayList; -import java.util.List; -import java.util.function.Supplier; - -import static org.hamcrest.Matchers.equalTo; - -public class DeprecationChecksTests extends ESTestCase { - - public void testFilterChecks() { - DeprecationIssue issue = createRandomDeprecationIssue(); - int numChecksPassed = randomIntBetween(0, 5); - int numChecksFailed = 10 - numChecksPassed; - List> checks = new ArrayList<>(); - for (int i = 0; i < numChecksFailed; i++) { - checks.add(() -> issue); - } - for (int i = 0; i < numChecksPassed; i++) { - checks.add(() -> null); - } - List filteredIssues = DeprecationInfoAction.filterChecks(checks, Supplier::get); - assertThat(filteredIssues.size(), equalTo(numChecksFailed)); - } - - private static DeprecationIssue createRandomDeprecationIssue() { - String details = randomBoolean() ? randomAlphaOfLength(10) : null; - return new DeprecationIssue( - randomFrom(DeprecationIssue.Level.values()), - randomAlphaOfLength(10), - randomAlphaOfLength(10), - details, - randomBoolean(), - randomMap(1, 5, () -> Tuple.tuple(randomAlphaOfLength(4), randomAlphaOfLength(4))) - ); - } -} diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java index 28fd14abecbc1..537c3eb84a902 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java @@ -6,50 +6,17 @@ */ package org.elasticsearch.xpack.deprecation; -import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.ComponentTemplate; -import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; -import org.elasticsearch.cluster.metadata.DataStream; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.metadata.Template; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.core.Tuple; -import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue.Level; -import org.junit.Assert; -import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiFunction; -import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.Response.RESERVED_NAMES; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.core.IsEqual.equalTo; - public class DeprecationInfoActionResponseTests extends AbstractWireSerializingTestCase { @Override @@ -153,318 +120,11 @@ protected Writeable.Reader instanceReader() { return DeprecationInfoAction.Response::new; } - public void testFrom() throws IOException { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all"); - mapping.field("enabled", false); - mapping.endObject().endObject(); - - Metadata metadata = Metadata.builder() - .put( - IndexMetadata.builder("test") - .putMapping(Strings.toString(mapping)) - .settings(settings(IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - ) - .build(); - - DiscoveryNode discoveryNode = DiscoveryNodeUtils.create("test", new TransportAddress(TransportAddress.META_ADDRESS, 9300)); - ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); - IndexNameExpressionResolver resolver = TestIndexNameExpressionResolver.newInstance(); - boolean clusterIssueFound = randomBoolean(); - boolean nodeIssueFound = randomBoolean(); - boolean indexIssueFound = randomBoolean(); - boolean dataStreamIssueFound = randomBoolean(); - boolean indexTemplateIssueFound = randomBoolean(); - boolean componentTemplateIssueFound = randomBoolean(); - boolean ilmPolicyIssueFound = randomBoolean(); - DeprecationIssue foundIssue = createTestDeprecationIssue(); - List> clusterSettingsChecks = List.of((s) -> clusterIssueFound ? foundIssue : null); - List resourceCheckers = List.of(createResourceChecker("index_settings", (cs, req) -> { - if (indexIssueFound) { - return Map.of("test", List.of(foundIssue)); - } - return Map.of(); - }), createResourceChecker("data_streams", (cs, req) -> { - if (dataStreamIssueFound) { - return Map.of("my-ds", List.of(foundIssue)); - } - return Map.of(); - }), createResourceChecker("templates", (cs, req) -> { - Map> issues = new HashMap<>(); - if (componentTemplateIssueFound) { - issues.put("my-component-template", List.of(foundIssue)); - } - if (indexTemplateIssueFound) { - issues.put("my-index-template", List.of(foundIssue)); - } - return issues; - }), createResourceChecker("ilm_policies", (cs, req) -> { - if (ilmPolicyIssueFound) { - return Map.of("my-policy", List.of(foundIssue)); - } - return Map.of(); - })); - - NodesDeprecationCheckResponse nodeDeprecationIssues = new NodesDeprecationCheckResponse( - new ClusterName(randomAlphaOfLength(5)), - nodeIssueFound ? List.of(new NodesDeprecationCheckAction.NodeResponse(discoveryNode, List.of(foundIssue))) : List.of(), - List.of() - ); - - DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY); - DeprecationInfoAction.Response response = DeprecationInfoAction.Response.from( - state, - resolver, - request, - nodeDeprecationIssues, - clusterSettingsChecks, - new HashMap<>(), // modified in the method to move transform deprecation issues into cluster_settings - List.of(), - resourceCheckers - ); - - if (clusterIssueFound) { - assertThat(response.getClusterSettingsIssues(), equalTo(List.of(foundIssue))); - } else { - assertThat(response.getClusterSettingsIssues(), empty()); - } - - if (nodeIssueFound) { - String details = foundIssue.getDetails() != null ? foundIssue.getDetails() + " " : ""; - DeprecationIssue mergedFoundIssue = new DeprecationIssue( - foundIssue.getLevel(), - foundIssue.getMessage(), - foundIssue.getUrl(), - details + "(nodes impacted: [" + discoveryNode.getName() + "])", - foundIssue.isResolveDuringRollingUpgrade(), - foundIssue.getMeta() - ); - assertThat(response.getNodeSettingsIssues(), equalTo(List.of(mergedFoundIssue))); - } else { - assertTrue(response.getNodeSettingsIssues().isEmpty()); - } - - if (indexIssueFound) { - assertThat(response.getIndexSettingsIssues(), equalTo(Map.of("test", List.of(foundIssue)))); - } else { - assertTrue(response.getIndexSettingsIssues().isEmpty()); - } - if (dataStreamIssueFound) { - assertThat(response.getDataStreamDeprecationIssues(), equalTo(Map.of("my-ds", List.of(foundIssue)))); - } else { - assertTrue(response.getDataStreamDeprecationIssues().isEmpty()); - } - if (ilmPolicyIssueFound) { - assertThat(response.getIlmPolicyDeprecationIssues(), equalTo(Map.of("my-policy", List.of(foundIssue)))); - } else { - assertTrue(response.getIlmPolicyDeprecationIssues().isEmpty()); - } - if (componentTemplateIssueFound == false && indexTemplateIssueFound == false) { - assertTrue(response.getTemplateDeprecationIssues().isEmpty()); - } else { - if (componentTemplateIssueFound) { - assertThat(response.getTemplateDeprecationIssues().get("my-component-template"), equalTo(List.of(foundIssue))); - } - if (indexTemplateIssueFound) { - assertThat(response.getTemplateDeprecationIssues().get("my-index-template"), equalTo(List.of(foundIssue))); - } - - } - } - - public void testFromWithMergeableNodeIssues() throws IOException { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all"); - mapping.field("enabled", false); - mapping.endObject().endObject(); - - Metadata metadata = Metadata.builder() - .put( - IndexMetadata.builder("test") - .putMapping(Strings.toString(mapping)) - .settings(settings(IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - ) - .build(); - - DiscoveryNode node1 = DiscoveryNodeUtils.builder("nodeId1") - .name("node1") - .ephemeralId("ephemeralId1") - .address("hostName1", "hostAddress1", new TransportAddress(TransportAddress.META_ADDRESS, 9300)) - .roles(Set.of()) - .build(); - DiscoveryNode node2 = DiscoveryNodeUtils.builder("nodeId2") - .name("node2") - .ephemeralId("ephemeralId2") - .address("hostName2", "hostAddress2", new TransportAddress(TransportAddress.META_ADDRESS, 9500)) - .roles(Set.of()) - .build(); - ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); - IndexNameExpressionResolver resolver = TestIndexNameExpressionResolver.newInstance(); - Map metaMap1 = DeprecationIssue.createMetaMapForRemovableSettings(List.of("setting.1", "setting.2", "setting.3")); - Map metaMap2 = DeprecationIssue.createMetaMapForRemovableSettings(List.of("setting.2", "setting.3")); - DeprecationIssue foundIssue1 = createTestDeprecationIssue(metaMap1); - DeprecationIssue foundIssue2 = createTestDeprecationIssue(foundIssue1, metaMap2); - List> clusterSettingsChecks = List.of(); - List resourceCheckers = List.of(); - - NodesDeprecationCheckResponse nodeDeprecationIssues = new NodesDeprecationCheckResponse( - new ClusterName(randomAlphaOfLength(5)), - Arrays.asList( - new NodesDeprecationCheckAction.NodeResponse(node1, List.of(foundIssue1)), - new NodesDeprecationCheckAction.NodeResponse(node2, List.of(foundIssue2)) - ), - List.of() - ); - - DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY); - DeprecationInfoAction.Response response = DeprecationInfoAction.Response.from( - state, - resolver, - request, - nodeDeprecationIssues, - clusterSettingsChecks, - new HashMap<>(), // modified in the method to move transform deprecation issues into cluster_settings - List.of(), - resourceCheckers - ); - - String details = foundIssue1.getDetails() != null ? foundIssue1.getDetails() + " " : ""; - DeprecationIssue mergedFoundIssue = new DeprecationIssue( - foundIssue1.getLevel(), - foundIssue1.getMessage(), - foundIssue1.getUrl(), - details + "(nodes impacted: [" + node1.getName() + ", " + node2.getName() + "])", - foundIssue1.isResolveDuringRollingUpgrade(), - foundIssue2.getMeta() - ); - assertThat(response.getNodeSettingsIssues(), equalTo(List.of(mergedFoundIssue))); - } - - public void testRemoveSkippedSettings() { - Settings.Builder settingsBuilder = settings(IndexVersion.current()); - settingsBuilder.put("some.deprecated.property", "someValue1"); - settingsBuilder.put("some.other.bad.deprecated.property", "someValue2"); - settingsBuilder.put("some.undeprecated.property", "someValue3"); - settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5")); - Settings inputSettings = settingsBuilder.build(); - IndexMetadata dataStreamIndexMetadata = IndexMetadata.builder("ds-test-index-1") - .settings(inputSettings) - .numberOfShards(1) - .numberOfReplicas(0) - .build(); - ComponentTemplate componentTemplate = new ComponentTemplate(Template.builder().settings(inputSettings).build(), null, null); - ComposableIndexTemplate indexTemplate = ComposableIndexTemplate.builder() - .template(Template.builder().settings(inputSettings)) - .build(); - Metadata metadata = Metadata.builder() - .put(IndexMetadata.builder("test").settings(inputSettings).numberOfShards(1).numberOfReplicas(0)) - .put(dataStreamIndexMetadata, true) - .put(DataStream.builder("ds-test", List.of(dataStreamIndexMetadata.getIndex())).build()) - .indexTemplates(Map.of("my-index-template", indexTemplate)) - .componentTemplates(Map.of("my-component-template", componentTemplate)) - .persistentSettings(inputSettings) - .build(); - - ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); - IndexNameExpressionResolver resolver = TestIndexNameExpressionResolver.newInstance(); - AtomicReference visibleClusterSettings = new AtomicReference<>(); - List> clusterSettingsChecks = List.of((s) -> { - visibleClusterSettings.set(s.getMetadata().settings()); - return null; - }); - AtomicReference visibleIndexSettings = new AtomicReference<>(); - AtomicReference visibleComponentTemplateSettings = new AtomicReference<>(); - AtomicReference visibleIndexTemplateSettings = new AtomicReference<>(); - AtomicInteger backingIndicesCount = new AtomicInteger(0); - List resourceCheckers = List.of(createResourceChecker("index_settings", (cs, req) -> { - for (String indexName : resolver.concreteIndexNames(cs, req)) { - visibleIndexSettings.set(cs.metadata().index(indexName).getSettings()); - } - return Map.of(); - }), createResourceChecker("data_streams", (cs, req) -> { - cs.metadata().dataStreams().values().forEach(ds -> backingIndicesCount.set(ds.getIndices().size())); - return Map.of(); - }), createResourceChecker("templates", (cs, req) -> { - cs.metadata() - .componentTemplates() - .values() - .forEach(template -> visibleComponentTemplateSettings.set(template.template().settings())); - cs.metadata().templatesV2().values().forEach(template -> visibleIndexTemplateSettings.set(template.template().settings())); - return Map.of(); - })); - - NodesDeprecationCheckResponse nodeDeprecationIssues = new NodesDeprecationCheckResponse( - new ClusterName(randomAlphaOfLength(5)), - List.of(), - List.of() - ); - - DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY); - DeprecationInfoAction.Response.from( - state, - resolver, - request, - nodeDeprecationIssues, - clusterSettingsChecks, - new HashMap<>(), // modified in the method to move transform deprecation issues into cluster_settings - List.of("some.deprecated.property", "some.other.*.deprecated.property"), - resourceCheckers - ); - - settingsBuilder = settings(IndexVersion.current()); - settingsBuilder.put("some.undeprecated.property", "someValue3"); - settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5")); - - Settings expectedSettings = settingsBuilder.build(); - Settings resultClusterSettings = visibleClusterSettings.get(); - Assert.assertNotNull(resultClusterSettings); - Assert.assertEquals(expectedSettings, visibleClusterSettings.get()); - - Settings resultIndexSettings = visibleIndexSettings.get(); - Assert.assertNotNull(resultIndexSettings); - Assert.assertEquals("someValue3", resultIndexSettings.get("some.undeprecated.property")); - Assert.assertEquals(resultIndexSettings.getAsList("some.undeprecated.list.property"), List.of("someValue4", "someValue5")); - Assert.assertFalse(resultIndexSettings.hasValue("some.deprecated.property")); - Assert.assertFalse(resultIndexSettings.hasValue("some.other.bad.deprecated.property")); - - assertThat(backingIndicesCount.get(), equalTo(1)); - - Assert.assertNotNull(visibleComponentTemplateSettings.get()); - Assert.assertEquals(expectedSettings, visibleComponentTemplateSettings.get()); - Assert.assertNotNull(visibleIndexTemplateSettings.get()); - Assert.assertEquals(expectedSettings, visibleIndexTemplateSettings.get()); - } - - public void testCtorFailure() { - Map> indexNames = Stream.generate(() -> randomAlphaOfLength(10)) - .limit(10) - .collect(Collectors.toMap(Function.identity(), (_k) -> List.of())); - Map> dataStreamNames = Stream.generate(() -> randomAlphaOfLength(10)) - .limit(10) - .collect(Collectors.toMap(Function.identity(), (_k) -> List.of())); - Set shouldCauseFailure = new HashSet<>(RESERVED_NAMES); - for (int i = 0; i < NUMBER_OF_TEST_RUNS; i++) { - Map> pluginSettingsIssues = randomSubsetOf(3, shouldCauseFailure).stream() - .collect(Collectors.toMap(Function.identity(), (_k) -> List.of())); - expectThrows( - ElasticsearchStatusException.class, - () -> new DeprecationInfoAction.Response( - List.of(), - List.of(), - Map.of("data_streams", dataStreamNames, "index_settings", indexNames), - pluginSettingsIssues - ) - ); - } - } - - private static DeprecationIssue createTestDeprecationIssue() { + static DeprecationIssue createTestDeprecationIssue() { return createTestDeprecationIssue(randomMap(1, 5, () -> Tuple.tuple(randomAlphaOfLength(4), randomAlphaOfLength(4)))); } - private static DeprecationIssue createTestDeprecationIssue(Map metaMap) { + static DeprecationIssue createTestDeprecationIssue(Map metaMap) { String details = randomBoolean() ? randomAlphaOfLength(10) : null; return new DeprecationIssue( randomFrom(Level.values()), @@ -476,7 +136,7 @@ private static DeprecationIssue createTestDeprecationIssue(Map m ); } - private static DeprecationIssue createTestDeprecationIssue(DeprecationIssue seedIssue, Map metaMap) { + static DeprecationIssue createTestDeprecationIssue(DeprecationIssue seedIssue, Map metaMap) { return new DeprecationIssue( seedIssue.getLevel(), seedIssue.getMessage(), @@ -487,27 +147,9 @@ private static DeprecationIssue createTestDeprecationIssue(DeprecationIssue seed ); } - private static List randomDeprecationIssues() { + static List randomDeprecationIssues() { return Stream.generate(DeprecationInfoActionResponseTests::createTestDeprecationIssue) .limit(randomIntBetween(0, 10)) .collect(Collectors.toList()); } - - private static ResourceDeprecationChecker createResourceChecker( - String name, - BiFunction>> check - ) { - return new ResourceDeprecationChecker() { - - @Override - public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { - return check.apply(clusterState, request); - } - - @Override - public String getName() { - return name; - } - }; - } } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationCheckerTests.java index 2032a6faedc92..475cd3e6a24bc 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationCheckerTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationCheckerTests.java @@ -90,7 +90,7 @@ public void testLegacyTierSettings() { ) .build(); - Map> issuesByComponentTemplate = checker.check(clusterState, null); + Map> issuesByComponentTemplate = checker.check(clusterState); final DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.WARNING, "Configuring tiers via filtered allocation is not recommended.", @@ -136,7 +136,7 @@ public void testFrozenAction() { ) .build(); - Map> issuesByComponentTemplate = checker.check(clusterState, null); + Map> issuesByComponentTemplate = checker.check(clusterState); final DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.WARNING, "ILM policy [deprecated-action] contains the action 'freeze' that is deprecated and will be removed in a future version.", diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java index dae7970d4a2e7..44a7d4bf57bdc 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java @@ -7,11 +7,16 @@ package org.elasticsearch.xpack.deprecation; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamMetadata; import org.elasticsearch.cluster.metadata.DataStreamOptions; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.MetadataIndexStateService; import org.elasticsearch.common.collect.ImmutableOpenMap; @@ -20,12 +25,15 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.engine.frozen.FrozenEngine; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.snapshots.SearchableSnapshotsSettings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; +import org.elasticsearch.xpack.core.transform.transforms.DestConfig; +import org.elasticsearch.xpack.core.transform.transforms.SourceConfig; +import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -38,40 +46,172 @@ public class IndexDeprecationCheckerTests extends ESTestCase { - private final IndexDeprecationChecker checker = new IndexDeprecationChecker(TestIndexNameExpressionResolver.newInstance()); + private static final IndexVersion OLD_VERSION = IndexVersion.fromId(7170099); + private final IndexNameExpressionResolver indexNameExpressionResolver = TestIndexNameExpressionResolver.newInstance(); + private final IndexDeprecationChecker checker = new IndexDeprecationChecker(indexNameExpressionResolver); + private final TransportDeprecationInfoAction.PrecomputedData emptyPrecomputedData = + new TransportDeprecationInfoAction.PrecomputedData(); + private final IndexMetadata.State indexMetdataState; + + public IndexDeprecationCheckerTests(@Name("indexMetadataState") IndexMetadata.State indexMetdataState) { + this.indexMetdataState = indexMetdataState; + emptyPrecomputedData.setOnceNodeSettingsIssues(List.of()); + emptyPrecomputedData.setOncePluginIssues(Map.of()); + emptyPrecomputedData.setOnceTransformConfigs(List.of()); + } + + @ParametersFactory + public static List createParameters() { + return List.of(new Object[] { IndexMetadata.State.OPEN }, new Object[] { IndexMetadata.State.CLOSE }); + } public void testOldIndicesCheck() { - IndexVersion createdWith = IndexVersion.fromId(7170099); IndexMetadata indexMetadata = IndexMetadata.builder("test") - .settings(settings(createdWith)) + .settings(settings(OLD_VERSION)) .numberOfShards(1) .numberOfReplicas(0) + .state(indexMetdataState) .build(); ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) .build(); DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.CRITICAL, "Old index with a compatibility version < 9.0", "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", - "This index has version: " + createdWith.toReleaseVersion(), + "This index has version: " + OLD_VERSION.toReleaseVersion(), false, singletonMap("reindex_required", true) ); Map> issuesByIndex = checker.check( clusterState, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); List issues = issuesByIndex.get("test"); assertEquals(singletonList(expected), issues); } + public void testOldTransformIndicesCheck() { + var checker = new IndexDeprecationChecker(indexNameExpressionResolver); + var indexMetadata = indexMetadata("test", OLD_VERSION); + var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); + var expected = new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + "One or more Transforms write to this index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + "This index was created in version [" + + OLD_VERSION.toReleaseVersion() + + "] and requires action before upgrading to 9.0. " + + "The following transforms are configured to write to this index: [test-transform]. Refer to the " + + "migration guide to learn more about how to prepare transforms destination indices for your upgrade.", + false, + Map.of("reindex_required", true, "transform_ids", List.of("test-transform")) + ); + var issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + createContextWithTransformConfigs(Map.of("test", List.of("test-transform"))) + ); + assertEquals(singletonList(expected), issuesByIndex.get("test")); + } + + public void testOldIndicesCheckWithMultipleTransforms() { + var indexMetadata = indexMetadata("test", OLD_VERSION); + var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); + var expected = new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + "One or more Transforms write to this index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + "This index was created in version [" + + OLD_VERSION.toReleaseVersion() + + "] and requires action before upgrading to 9.0. " + + "The following transforms are configured to write to this index: [test-transform1, test-transform2]. Refer to the " + + "migration guide to learn more about how to prepare transforms destination indices for your upgrade.", + false, + Map.of("reindex_required", true, "transform_ids", List.of("test-transform1", "test-transform2")) + ); + var issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + createContextWithTransformConfigs(Map.of("test", List.of("test-transform1", "test-transform2"))) + ); + assertEquals(singletonList(expected), issuesByIndex.get("test")); + } + + public void testMultipleOldIndicesCheckWithTransforms() { + var indexMetadata1 = indexMetadata("test1", OLD_VERSION); + var indexMetadata2 = indexMetadata("test2", OLD_VERSION); + var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata1, true).put(indexMetadata2, true)) + .blocks(clusterBlocksForIndices(indexMetadata1, indexMetadata2)) + .build(); + var expected = Map.of( + "test1", + List.of( + new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + "One or more Transforms write to this index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + "This index was created in version [" + + OLD_VERSION.toReleaseVersion() + + "] and requires action before upgrading to 9.0. " + + "The following transforms are configured to write to this index: [test-transform1]. Refer to the " + + "migration guide to learn more about how to prepare transforms destination indices for your upgrade.", + false, + Map.of("reindex_required", true, "transform_ids", List.of("test-transform1")) + ) + ), + "test2", + List.of( + new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + "One or more Transforms write to this index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + "This index was created in version [" + + OLD_VERSION.toReleaseVersion() + + "] and requires action before upgrading to 9.0. " + + "The following transforms are configured to write to this index: [test-transform2]. Refer to the " + + "migration guide to learn more about how to prepare transforms destination indices for your upgrade.", + false, + Map.of("reindex_required", true, "transform_ids", List.of("test-transform2")) + ) + ) + ); + var issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + createContextWithTransformConfigs(Map.of("test1", List.of("test-transform1"), "test2", List.of("test-transform2"))) + ); + assertEquals(expected, issuesByIndex); + } + + private IndexMetadata indexMetadata(String indexName, IndexVersion indexVersion) { + return IndexMetadata.builder(indexName) + .settings(settings(indexVersion)) + .numberOfShards(1) + .numberOfReplicas(0) + .state(indexMetdataState) + .build(); + } + public void testOldIndicesCheckDataStreamIndex() { - IndexVersion createdWith = IndexVersion.fromId(7170099); IndexMetadata indexMetadata = IndexMetadata.builder(".ds-test") - .settings(settings(createdWith).put("index.hidden", true)) + .settings(settings(OLD_VERSION).put("index.hidden", true)) .numberOfShards(1) .numberOfReplicas(0) + .state(indexMetdataState) .build(); DataStream dataStream = new DataStream( randomAlphaOfLength(10), @@ -103,81 +243,188 @@ public void testOldIndicesCheckDataStreamIndex() { ) ) ) + .blocks(clusterBlocksForIndices(indexMetadata)) .build(); Map> issuesByIndex = checker.check( clusterState, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); assertThat(issuesByIndex.size(), equalTo(0)); } public void testOldIndicesCheckSnapshotIgnored() { - IndexVersion createdWith = IndexVersion.fromId(7170099); - Settings.Builder settings = settings(createdWith); + Settings.Builder settings = settings(OLD_VERSION); settings.put(INDEX_STORE_TYPE_SETTING.getKey(), SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE); - IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) - .metadata(Metadata.builder().put(indexMetadata, true)) - .build(); - - Map> issuesByIndex = checker.check( - clusterState, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) - ); - assertThat(issuesByIndex.size(), equalTo(0)); - } - - public void testOldIndicesCheckClosedIgnored() { - IndexVersion createdWith = IndexVersion.fromId(7170099); - Settings.Builder settings = settings(createdWith); IndexMetadata indexMetadata = IndexMetadata.builder("test") .settings(settings) .numberOfShards(1) .numberOfReplicas(0) - .state(IndexMetadata.State.CLOSE) + .state(indexMetdataState) .build(); ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) .build(); + Map> issuesByIndex = checker.check( clusterState, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); assertThat(issuesByIndex.size(), equalTo(0)); } public void testOldIndicesIgnoredWarningCheck() { - IndexVersion createdWith = IndexVersion.fromId(7170099); - Settings.Builder settings = settings(createdWith).put(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey(), true); - IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); + IndexMetadata indexMetadata = readonlyIndexMetadata("test", OLD_VERSION); ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) .build(); DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.WARNING, - "Old index with a compatibility version < 9.0 Has Been Ignored", + "Old index with a compatibility version < 9.0 has been ignored", "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", - "This read-only index has version: " + createdWith.toReleaseVersion() + " and will be supported as read-only in 9.0", + "This read-only index has version: " + OLD_VERSION.toReleaseVersion() + " and will be supported as read-only in 9.0", false, singletonMap("reindex_required", true) ); Map> issuesByIndex = checker.check( clusterState, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); assertTrue(issuesByIndex.containsKey("test")); assertEquals(List.of(expected), issuesByIndex.get("test")); } + private IndexMetadata readonlyIndexMetadata(String indexName, IndexVersion indexVersion) { + Settings.Builder settings = settings(indexVersion).put(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey(), true); + return IndexMetadata.builder(indexName).settings(settings).numberOfShards(1).numberOfReplicas(0).state(indexMetdataState).build(); + } + + public void testOldTransformIndicesIgnoredCheck() { + var checker = new IndexDeprecationChecker(indexNameExpressionResolver); + var indexMetadata = readonlyIndexMetadata("test", OLD_VERSION); + var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); + var expected = new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "One or more Transforms write to this old index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + "This index was created in version [" + + OLD_VERSION.toReleaseVersion() + + "] and will be supported as a read-only index in 9.0. " + + "The following transforms are no longer able to write to this index: [test-transform]. Refer to the " + + "migration guide to learn more about how to handle your transforms destination indices.", + false, + Map.of("reindex_required", true, "transform_ids", List.of("test-transform")) + ); + var issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + createContextWithTransformConfigs(Map.of("test", List.of("test-transform"))) + ); + assertEquals(singletonList(expected), issuesByIndex.get("test")); + } + + public void testOldIndicesIgnoredCheckWithMultipleTransforms() { + var indexMetadata = readonlyIndexMetadata("test", OLD_VERSION); + var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); + var expected = new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "One or more Transforms write to this old index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + "This index was created in version [" + + OLD_VERSION.toReleaseVersion() + + "] and will be supported as a read-only index in 9.0. " + + "The following transforms are no longer able to write to this index: [test-transform1, test-transform2]. Refer to the " + + "migration guide to learn more about how to handle your transforms destination indices.", + false, + Map.of("reindex_required", true, "transform_ids", List.of("test-transform1", "test-transform2")) + ); + var issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + createContextWithTransformConfigs(Map.of("test", List.of("test-transform1", "test-transform2"))) + ); + assertEquals(singletonList(expected), issuesByIndex.get("test")); + } + + public void testMultipleOldIndicesIgnoredCheckWithTransforms() { + var indexMetadata1 = readonlyIndexMetadata("test1", OLD_VERSION); + var indexMetadata2 = readonlyIndexMetadata("test2", OLD_VERSION); + var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata1, true).put(indexMetadata2, true)) + .blocks(clusterBlocksForIndices(indexMetadata1, indexMetadata2)) + .build(); + var expected = Map.of( + "test1", + List.of( + new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "One or more Transforms write to this old index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + "This index was created in version [" + + OLD_VERSION.toReleaseVersion() + + "] and will be supported as a read-only index in 9.0. " + + "The following transforms are no longer able to write to this index: [test-transform1]. Refer to the " + + "migration guide to learn more about how to handle your transforms destination indices.", + false, + Map.of("reindex_required", true, "transform_ids", List.of("test-transform1")) + ) + ), + "test2", + List.of( + new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "One or more Transforms write to this old index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/migrating-9.0.html" + + "#breaking_90_transform_destination_index", + "This index was created in version [" + + OLD_VERSION.toReleaseVersion() + + "] and will be supported as a read-only index in 9.0. " + + "The following transforms are no longer able to write to this index: [test-transform2]. Refer to the " + + "migration guide to learn more about how to handle your transforms destination indices.", + false, + Map.of("reindex_required", true, "transform_ids", List.of("test-transform2")) + ) + ) + ); + var issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + createContextWithTransformConfigs(Map.of("test1", List.of("test-transform1"), "test2", List.of("test-transform2"))) + ); + assertEquals(expected, issuesByIndex); + } + public void testTranslogRetentionSettings() { Settings.Builder settings = settings(IndexVersion.current()); settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), randomPositiveTimeValue()); settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), between(1, 1024) + "b"); - IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + IndexMetadata indexMetadata = IndexMetadata.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(0) + .state(indexMetdataState) + .build(); + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); Map> issuesByIndex = checker.check( state, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); List issues = issuesByIndex.get("test"); assertThat( @@ -208,11 +455,20 @@ public void testDefaultTranslogRetentionSettings() { settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), between(1, 1024) + "b"); settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false); } - IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + IndexMetadata indexMetadata = IndexMetadata.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(0) + .state(indexMetdataState) + .build(); + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); Map> issuesByIndex = checker.check( state, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); assertThat(issuesByIndex.size(), equalTo(0)); } @@ -220,11 +476,20 @@ public void testDefaultTranslogRetentionSettings() { public void testIndexDataPathSetting() { Settings.Builder settings = settings(IndexVersion.current()); settings.put(IndexMetadata.INDEX_DATA_PATH_SETTING.getKey(), createTempDir()); - IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + IndexMetadata indexMetadata = IndexMetadata.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(0) + .state(indexMetdataState) + .build(); + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); Map> issuesByIndex = checker.check( state, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); final String expectedUrl = "https://www.elastic.co/guide/en/elasticsearch/reference/7.13/breaking-changes-7.13.html#deprecate-shared-data-path-setting"; @@ -246,11 +511,20 @@ public void testIndexDataPathSetting() { public void testSimpleFSSetting() { Settings.Builder settings = settings(IndexVersion.current()); settings.put(INDEX_STORE_TYPE_SETTING.getKey(), "simplefs"); - IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + IndexMetadata indexMetadata = IndexMetadata.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(0) + .state(indexMetdataState) + .build(); + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); Map> issuesByIndex = checker.check( state, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); assertThat( issuesByIndex.get("test"), @@ -269,30 +543,6 @@ public void testSimpleFSSetting() { ); } - public void testFrozenIndex() { - Settings.Builder settings = settings(IndexVersion.current()); - settings.put(FrozenEngine.INDEX_FROZEN.getKey(), true); - IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); - Map> issuesByIndex = checker.check( - state, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) - ); - assertThat( - issuesByIndex.get("test"), - contains( - new DeprecationIssue( - DeprecationIssue.Level.WARNING, - "index [test] is a frozen index. The frozen indices feature is deprecated and will be removed in a future version", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/frozen-indices.html", - "Frozen indices no longer offer any advantages. Consider cold or frozen tiers in place of frozen indices.", - false, - null - ) - ) - ); - } - public void testCamelCaseDeprecation() { String simpleMapping = "{\n\"_doc\": {" + "\"properties\" : {\n" @@ -309,11 +559,16 @@ public void testCamelCaseDeprecation() { .numberOfShards(1) .numberOfReplicas(1) .putMapping(simpleMapping) + .state(indexMetdataState) + .build(); + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(simpleIndex, true)) + .blocks(clusterBlocksForIndices(simpleIndex)) .build(); - ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(simpleIndex, true)).build(); Map> issuesByIndex = checker.check( state, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.CRITICAL, @@ -332,11 +587,20 @@ public void testLegacyTierIndex() { String filter = randomFrom("include", "exclude", "require"); String tier = randomFrom("hot", "warm", "cold", "frozen"); settings.put("index.routing.allocation." + filter + ".data", tier); - IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + IndexMetadata indexMetadata = IndexMetadata.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(0) + .state(indexMetdataState) + .build(); + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); Map> issuesByIndex = checker.check( state, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData ); assertThat( issuesByIndex.get("test"), @@ -354,4 +618,33 @@ public void testLegacyTierIndex() { ) ); } + + private ClusterBlocks clusterBlocksForIndices(IndexMetadata... indicesMetadatas) { + ClusterBlocks.Builder builder = ClusterBlocks.builder(); + for (IndexMetadata indexMetadata : indicesMetadatas) { + if (indexMetadata.getState() == IndexMetadata.State.CLOSE) { + builder.addIndexBlock(indexMetadata.getIndex().getName(), MetadataIndexStateService.INDEX_CLOSED_BLOCK); + } + } + return builder.build(); + } + + private TransportDeprecationInfoAction.PrecomputedData createContextWithTransformConfigs(Map> indexToTransform) { + List transforms = new ArrayList<>(); + for (Map.Entry> entry : indexToTransform.entrySet()) { + String index = entry.getKey(); + for (String transform : entry.getValue()) { + transforms.add( + TransformConfig.builder() + .setId(transform) + .setSource(new SourceConfig(randomAlphaOfLength(10))) + .setDest(new DestConfig(index, List.of(), null)) + .build() + ); + } + } + TransportDeprecationInfoAction.PrecomputedData precomputedData = new TransportDeprecationInfoAction.PrecomputedData(); + precomputedData.setOnceTransformConfigs(transforms); + return precomputedData; + } } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationCheckerTests.java new file mode 100644 index 0000000000000..78ddba87b9f85 --- /dev/null +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationCheckerTests.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.deprecation; + +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.deprecation.DeprecationInfoActionResponseTests.createTestDeprecationIssue; +import static org.hamcrest.core.IsEqual.equalTo; + +public class NodeDeprecationCheckerTests extends ESTestCase { + + public void testMergingNodeIssues() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all"); + mapping.field("enabled", false); + mapping.endObject().endObject(); + + DiscoveryNode node1 = DiscoveryNodeUtils.builder("nodeId1") + .name("node1") + .ephemeralId("ephemeralId1") + .address("hostName1", "hostAddress1", new TransportAddress(TransportAddress.META_ADDRESS, 9300)) + .roles(Set.of()) + .build(); + DiscoveryNode node2 = DiscoveryNodeUtils.builder("nodeId2") + .name("node2") + .ephemeralId("ephemeralId2") + .address("hostName2", "hostAddress2", new TransportAddress(TransportAddress.META_ADDRESS, 9500)) + .roles(Set.of()) + .build(); + Map metaMap1 = DeprecationIssue.createMetaMapForRemovableSettings(List.of("setting.1", "setting.2", "setting.3")); + Map metaMap2 = DeprecationIssue.createMetaMapForRemovableSettings(List.of("setting.2", "setting.3")); + DeprecationIssue foundIssue1 = createTestDeprecationIssue(metaMap1); + DeprecationIssue foundIssue2 = createTestDeprecationIssue(foundIssue1, metaMap2); + + NodesDeprecationCheckResponse nodeDeprecationIssues = new NodesDeprecationCheckResponse( + new ClusterName(randomAlphaOfLength(5)), + Arrays.asList( + new NodesDeprecationCheckAction.NodeResponse(node1, List.of(foundIssue1)), + new NodesDeprecationCheckAction.NodeResponse(node2, List.of(foundIssue2)) + ), + List.of() + ); + + List result = NodeDeprecationChecker.reduceToDeprecationIssues(nodeDeprecationIssues); + + String details = foundIssue1.getDetails() != null ? foundIssue1.getDetails() + " " : ""; + DeprecationIssue mergedFoundIssue = new DeprecationIssue( + foundIssue1.getLevel(), + foundIssue1.getMessage(), + foundIssue1.getUrl(), + details + "(nodes impacted: [" + node1.getName() + ", " + node2.getName() + "])", + foundIssue1.isResolveDuringRollingUpgrade(), + foundIssue2.getMeta() + ); + assertThat(result, equalTo(List.of(mergedFoundIssue))); + } +} diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java index 3aaee0e5cdb52..18b85ff532234 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java @@ -30,9 +30,11 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.function.Function; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.deprecation.DeprecationChecks.NODE_SETTINGS_CHECKS; +import static org.elasticsearch.xpack.deprecation.NodeDeprecationChecks.SINGLE_NODE_CHECKS; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.not; @@ -154,8 +156,8 @@ public void testSharedDataPathSetting() { .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), createTempDir()) .build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); final String expectedUrl = @@ -209,8 +211,8 @@ public void testCheckReservedPrefixedRealmNames() { } final Settings settings = builder.build(); - final List deprecationIssues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + final List deprecationIssues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -235,8 +237,8 @@ public void testCheckReservedPrefixedRealmNames() { void monitoringSetting(String settingKey, String value) { Settings settings = Settings.builder().put(settingKey, value).build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); final String expectedUrl = "https://ela.st/es-deprecation-7-monitoring-settings"; @@ -259,8 +261,8 @@ void monitoringExporterSetting(String suffix, String value) { String settingKey = "xpack.monitoring.exporters.test." + suffix; Settings settings = Settings.builder().put(settingKey, value).build(); final XPackLicenseState licenseState = new XPackLicenseState(() -> 0); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, licenseState) ); final String expectedUrl = "https://ela.st/es-deprecation-7-monitoring-settings"; @@ -284,8 +286,8 @@ void monitoringExporterGroupedSetting(String suffix, String value) { String subSettingKey = settingKey + ".subsetting"; Settings settings = Settings.builder().put(subSettingKey, value).build(); final XPackLicenseState licenseState = new XPackLicenseState(() -> 0); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, licenseState) ); final String expectedUrl = "https://ela.st/es-deprecation-7-monitoring-settings"; @@ -310,8 +312,8 @@ void monitoringExporterSecureSetting(String suffix, String value) { secureSettings.setString(settingKey, value); Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); final XPackLicenseState licenseState = new XPackLicenseState(() -> 0); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, licenseState) ); final String expectedUrl = "https://ela.st/es-deprecation-7-monitoring-settings"; @@ -457,8 +459,8 @@ public void testCheckMonitoringSettingCollectionInterval() { public void testExporterUseIngestPipelineSettings() { Settings settings = Settings.builder().put("xpack.monitoring.exporters.test.use_ingest", true).build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -483,8 +485,8 @@ public void testExporterPipelineMasterTimeoutSetting() { .put("xpack.monitoring.exporters.test.index.pipeline.master_timeout", TimeValue.timeValueSeconds(10)) .build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -508,8 +510,8 @@ public void testExporterPipelineMasterTimeoutSetting() { public void testExporterCreateLegacyTemplateSetting() { Settings settings = Settings.builder().put("xpack.monitoring.exporters.test.index.template.create_legacy_templates", true).build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -535,8 +537,8 @@ public void testScriptContextCacheSetting() { .put(ScriptService.SCRIPT_GENERAL_MAX_COMPILATIONS_RATE_SETTING.getKey(), "use-context") .build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -564,8 +566,8 @@ public void testScriptContextCompilationsRateLimitSetting() { .put(ScriptService.SCRIPT_MAX_COMPILATIONS_RATE_SETTING.getConcreteSettingForNamespace(contexts.get(1)).getKey(), "456/7m") .build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -601,8 +603,8 @@ public void testImplicitScriptContextCacheSetting() { .put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getConcreteSettingForNamespace(contexts.get(1)).getKey(), "2453") .build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -639,8 +641,8 @@ public void testScriptContextCacheSizeSetting() { .put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getConcreteSettingForNamespace(contexts.get(1)).getKey(), 200) .build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -676,8 +678,8 @@ public void testScriptContextCacheExpirationSetting() { .put(ScriptService.SCRIPT_CACHE_EXPIRE_SETTING.getConcreteSettingForNamespace(contexts.get(1)).getKey(), "2d") .build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -708,8 +710,8 @@ public void testScriptContextCacheExpirationSetting() { public void testEnforceDefaultTierPreferenceSetting() { Settings settings = Settings.builder().put(DataTier.ENFORCE_DEFAULT_TIER_PREFERENCE_SETTING.getKey(), randomBoolean()).build(); - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, + List issues = filterChecks( + SINGLE_NODE_CHECKS, c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -731,8 +733,8 @@ public void testEnforceDefaultTierPreferenceSetting() { } private List getDeprecationIssues(Settings settings, PluginsAndModules pluginsAndModules) { - final List issues = DeprecationChecks.filterChecks( - DeprecationChecks.NODE_SETTINGS_CHECKS, + final List issues = filterChecks( + NodeDeprecationChecks.SINGLE_NODE_CHECKS, c -> c.apply(settings, pluginsAndModules, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) ); @@ -799,8 +801,8 @@ public void testDynamicSettings() { } Metadata metadata = metadataBuilder.build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); - final List issues = DeprecationChecks.filterChecks( - DeprecationChecks.NODE_SETTINGS_CHECKS, + final List issues = filterChecks( + NodeDeprecationChecks.SINGLE_NODE_CHECKS, c -> c.apply(nodettings, pluginsAndModules, clusterState, licenseState) ); @@ -832,4 +834,8 @@ public void testCheckNodeAttrData() { ); assertThat(issues, hasItem(expected)); } + + static List filterChecks(List checks, Function mapper) { + return checks.stream().map(mapper).filter(Objects::nonNull).toList(); + } } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationCheckerTests.java index 81c0d1c7dc918..4e1b28b341282 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationCheckerTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationCheckerTests.java @@ -48,12 +48,12 @@ public void testCheckSourceModeInComponentTemplates() throws IOException { ) .build(); - Map> issuesByComponentTemplate = checker.check(clusterState, null); + Map> issuesByComponentTemplate = checker.check(clusterState); final DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.CRITICAL, + SourceFieldMapper.DEPRECATION_WARNING_TITLE, + "https://ela.st/migrate-source-mode", SourceFieldMapper.DEPRECATION_WARNING, - "https://github.com/elastic/elasticsearch/pull/117172", - null, false, null ); @@ -81,7 +81,7 @@ public void testCheckLegacyTiersInComponentTemplates() { ) .build(); - Map> issuesByComponentTemplate = checker.check(clusterState, null); + Map> issuesByComponentTemplate = checker.check(clusterState); final DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.WARNING, "Configuring tiers via filtered allocation is not recommended.", @@ -121,7 +121,7 @@ public void testCheckLegacyTierSettings() { ) .build(); - Map> issuesByComponentTemplate = checker.check(clusterState, null); + Map> issuesByComponentTemplate = checker.check(clusterState); final DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.WARNING, "Configuring tiers via filtered allocation is not recommended.", @@ -164,7 +164,7 @@ public void testComponentAndComposableTemplateWithSameName() { ) .build(); - Map> issuesByComponentTemplate = checker.check(clusterState, null); + Map> issuesByComponentTemplate = checker.check(clusterState); final DeprecationIssue expectedIndexTemplateIssue = new DeprecationIssue( DeprecationIssue.Level.WARNING, "Configuring tiers via filtered allocation is not recommended.", diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoActionTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoActionTests.java index 85fa375c09c5f..945068ba3a107 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoActionTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoActionTests.java @@ -6,22 +6,295 @@ */ package org.elasticsearch.xpack.deprecation; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.ComponentTemplate; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; +import org.hamcrest.core.IsEqual; +import org.junit.Assert; +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.Response.RESERVED_NAMES; +import static org.elasticsearch.xpack.deprecation.DeprecationInfoActionResponseTests.createTestDeprecationIssue; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class TransportDeprecationInfoActionTests extends ESTestCase { + public void testCheckAndCreateResponse() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all"); + mapping.field("enabled", false); + mapping.endObject().endObject(); + + Metadata metadata = Metadata.builder() + .put( + IndexMetadata.builder("test") + .putMapping(Strings.toString(mapping)) + .settings(settings(IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(0) + ) + .build(); + + DiscoveryNode discoveryNode = DiscoveryNodeUtils.create("test", new TransportAddress(TransportAddress.META_ADDRESS, 9300)); + ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); + IndexNameExpressionResolver resolver = TestIndexNameExpressionResolver.newInstance(); + boolean clusterIssueFound = randomBoolean(); + boolean nodeIssueFound = randomBoolean(); + boolean indexIssueFound = randomBoolean(); + boolean dataStreamIssueFound = randomBoolean(); + boolean indexTemplateIssueFound = randomBoolean(); + boolean componentTemplateIssueFound = randomBoolean(); + boolean ilmPolicyIssueFound = randomBoolean(); + DeprecationIssue foundIssue = createTestDeprecationIssue(); + ClusterDeprecationChecker clusterDeprecationChecker = mock(ClusterDeprecationChecker.class); + when(clusterDeprecationChecker.check(any(), any())).thenReturn(clusterIssueFound ? List.of(foundIssue) : List.of()); + List resourceCheckers = List.of(createResourceChecker("index_settings", (cs, req) -> { + if (indexIssueFound) { + return Map.of("test", List.of(foundIssue)); + } + return Map.of(); + }), createResourceChecker("data_streams", (cs, req) -> { + if (dataStreamIssueFound) { + return Map.of("my-ds", List.of(foundIssue)); + } + return Map.of(); + }), createResourceChecker("templates", (cs, req) -> { + Map> issues = new HashMap<>(); + if (componentTemplateIssueFound) { + issues.put("my-component-template", List.of(foundIssue)); + } + if (indexTemplateIssueFound) { + issues.put("my-index-template", List.of(foundIssue)); + } + return issues; + }), createResourceChecker("ilm_policies", (cs, req) -> { + if (ilmPolicyIssueFound) { + return Map.of("my-policy", List.of(foundIssue)); + } + return Map.of(); + })); + + List nodeDeprecationIssues = nodeIssueFound ? List.of(foundIssue) : List.of(); + + DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY); + TransportDeprecationInfoAction.PrecomputedData precomputedData = new TransportDeprecationInfoAction.PrecomputedData(); + precomputedData.setOnceTransformConfigs(List.of()); + precomputedData.setOncePluginIssues(Map.of()); + precomputedData.setOnceNodeSettingsIssues(nodeDeprecationIssues); + DeprecationInfoAction.Response response = TransportDeprecationInfoAction.checkAndCreateResponse( + state, + resolver, + request, + List.of(), + clusterDeprecationChecker, + resourceCheckers, + precomputedData + ); + + if (clusterIssueFound) { + assertThat(response.getClusterSettingsIssues(), IsEqual.equalTo(List.of(foundIssue))); + } else { + assertThat(response.getClusterSettingsIssues(), empty()); + } + + if (nodeIssueFound) { + assertThat(response.getNodeSettingsIssues(), IsEqual.equalTo(List.of(foundIssue))); + } else { + assertTrue(response.getNodeSettingsIssues().isEmpty()); + } + + if (indexIssueFound) { + assertThat(response.getIndexSettingsIssues(), IsEqual.equalTo(Map.of("test", List.of(foundIssue)))); + } else { + assertTrue(response.getIndexSettingsIssues().isEmpty()); + } + if (dataStreamIssueFound) { + assertThat(response.getDataStreamDeprecationIssues(), IsEqual.equalTo(Map.of("my-ds", List.of(foundIssue)))); + } else { + assertTrue(response.getDataStreamDeprecationIssues().isEmpty()); + } + if (ilmPolicyIssueFound) { + assertThat(response.getIlmPolicyDeprecationIssues(), IsEqual.equalTo(Map.of("my-policy", List.of(foundIssue)))); + } else { + assertTrue(response.getIlmPolicyDeprecationIssues().isEmpty()); + } + if (componentTemplateIssueFound == false && indexTemplateIssueFound == false) { + assertTrue(response.getTemplateDeprecationIssues().isEmpty()); + } else { + if (componentTemplateIssueFound) { + assertThat(response.getTemplateDeprecationIssues().get("my-component-template"), IsEqual.equalTo(List.of(foundIssue))); + } + if (indexTemplateIssueFound) { + assertThat(response.getTemplateDeprecationIssues().get("my-index-template"), IsEqual.equalTo(List.of(foundIssue))); + } + + } + } + + public void testRemoveSkippedSettings() { + Settings.Builder settingsBuilder = settings(IndexVersion.current()); + settingsBuilder.put("some.deprecated.property", "someValue1"); + settingsBuilder.put("some.other.bad.deprecated.property", "someValue2"); + settingsBuilder.put("some.undeprecated.property", "someValue3"); + settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5")); + Settings inputSettings = settingsBuilder.build(); + IndexMetadata dataStreamIndexMetadata = IndexMetadata.builder("ds-test-index-1") + .settings(inputSettings) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + ComponentTemplate componentTemplate = new ComponentTemplate(Template.builder().settings(inputSettings).build(), null, null); + ComposableIndexTemplate indexTemplate = ComposableIndexTemplate.builder() + .template(Template.builder().settings(inputSettings)) + .build(); + Metadata metadata = Metadata.builder() + .put(IndexMetadata.builder("test").settings(inputSettings).numberOfShards(1).numberOfReplicas(0)) + .put(dataStreamIndexMetadata, true) + .put(DataStream.builder("ds-test", List.of(dataStreamIndexMetadata.getIndex())).build()) + .indexTemplates( + Map.of( + "my-index-template", + indexTemplate, + "empty-template", + ComposableIndexTemplate.builder().indexPatterns(List.of("random")).build() + ) + ) + .componentTemplates(Map.of("my-component-template", componentTemplate)) + .persistentSettings(inputSettings) + .build(); + + ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); + IndexNameExpressionResolver resolver = TestIndexNameExpressionResolver.newInstance(); + AtomicReference visibleClusterSettings = new AtomicReference<>(); + ClusterDeprecationChecker clusterDeprecationChecker = mock(ClusterDeprecationChecker.class); + when(clusterDeprecationChecker.check(any(), any())).thenAnswer(invocationOnMock -> { + ClusterState observedState = invocationOnMock.getArgument(0); + visibleClusterSettings.set(observedState.getMetadata().settings()); + return List.of(); + }); + AtomicReference visibleIndexSettings = new AtomicReference<>(); + AtomicReference visibleComponentTemplateSettings = new AtomicReference<>(); + AtomicReference visibleIndexTemplateSettings = new AtomicReference<>(); + AtomicInteger backingIndicesCount = new AtomicInteger(0); + List resourceCheckers = List.of(createResourceChecker("index_settings", (cs, req) -> { + for (String indexName : resolver.concreteIndexNames(cs, req)) { + visibleIndexSettings.set(cs.metadata().index(indexName).getSettings()); + } + return Map.of(); + }), createResourceChecker("data_streams", (cs, req) -> { + cs.metadata().dataStreams().values().forEach(ds -> backingIndicesCount.set(ds.getIndices().size())); + return Map.of(); + }), createResourceChecker("templates", (cs, req) -> { + cs.metadata() + .componentTemplates() + .values() + .forEach(template -> visibleComponentTemplateSettings.set(template.template().settings())); + cs.metadata().templatesV2().values().forEach(template -> { + if (template.template() != null && template.template().settings() != null) { + visibleIndexTemplateSettings.set(template.template().settings()); + } + }); + return Map.of(); + })); + TransportDeprecationInfoAction.PrecomputedData precomputedData = new TransportDeprecationInfoAction.PrecomputedData(); + precomputedData.setOnceTransformConfigs(List.of()); + precomputedData.setOncePluginIssues(Map.of()); + precomputedData.setOnceNodeSettingsIssues(List.of()); + DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY); + TransportDeprecationInfoAction.checkAndCreateResponse( + state, + resolver, + request, + List.of("some.deprecated.property", "some.other.*.deprecated.property"), + clusterDeprecationChecker, + resourceCheckers, + precomputedData + ); + + settingsBuilder = settings(IndexVersion.current()); + settingsBuilder.put("some.undeprecated.property", "someValue3"); + settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5")); + + Settings expectedSettings = settingsBuilder.build(); + Settings resultClusterSettings = visibleClusterSettings.get(); + Assert.assertNotNull(resultClusterSettings); + Assert.assertEquals(expectedSettings, visibleClusterSettings.get()); + + Settings resultIndexSettings = visibleIndexSettings.get(); + Assert.assertNotNull(resultIndexSettings); + Assert.assertEquals("someValue3", resultIndexSettings.get("some.undeprecated.property")); + Assert.assertEquals(resultIndexSettings.getAsList("some.undeprecated.list.property"), List.of("someValue4", "someValue5")); + Assert.assertFalse(resultIndexSettings.hasValue("some.deprecated.property")); + Assert.assertFalse(resultIndexSettings.hasValue("some.other.bad.deprecated.property")); + + assertThat(backingIndicesCount.get(), IsEqual.equalTo(1)); + + Assert.assertNotNull(visibleComponentTemplateSettings.get()); + Assert.assertEquals(expectedSettings, visibleComponentTemplateSettings.get()); + Assert.assertNotNull(visibleIndexTemplateSettings.get()); + Assert.assertEquals(expectedSettings, visibleIndexTemplateSettings.get()); + } + + public void testCtorFailure() { + Map> indexNames = Stream.generate(() -> randomAlphaOfLength(10)) + .limit(10) + .collect(Collectors.toMap(Function.identity(), (_k) -> List.of())); + Map> dataStreamNames = Stream.generate(() -> randomAlphaOfLength(10)) + .limit(10) + .collect(Collectors.toMap(Function.identity(), (_k) -> List.of())); + Set shouldCauseFailure = new HashSet<>(RESERVED_NAMES); + for (int i = 0; i < randomIntBetween(1, 100); i++) { + Map> pluginSettingsIssues = randomSubsetOf(3, shouldCauseFailure).stream() + .collect(Collectors.toMap(Function.identity(), (_k) -> List.of())); + expectThrows( + ElasticsearchStatusException.class, + () -> new DeprecationInfoAction.Response( + List.of(), + List.of(), + Map.of("data_streams", dataStreamNames, "index_settings", indexNames), + pluginSettingsIssues + ) + ); + } + } + public void testPluginSettingIssues() { DeprecationChecker.Components components = new DeprecationChecker.Components(null, Settings.EMPTY, null); PlainActionFuture>> future = new PlainActionFuture<>(); @@ -65,6 +338,28 @@ public void testPluginSettingIssuesWithFailures() { assertThat(exception.getCause().getMessage(), containsString("boom")); } + private static ResourceDeprecationChecker createResourceChecker( + String name, + BiFunction>> check + ) { + return new ResourceDeprecationChecker() { + + @Override + public Map> check( + ClusterState clusterState, + DeprecationInfoAction.Request request, + TransportDeprecationInfoAction.PrecomputedData precomputedData + ) { + return check.apply(clusterState, request); + } + + @Override + public String getName() { + return name; + } + }; + } + private static class NamedChecker implements DeprecationChecker { private final String name; diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckActionTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckActionTests.java index 80692efb7474a..a0a37f2bb52d1 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckActionTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TransportNodeDeprecationCheckActionTests.java @@ -61,7 +61,7 @@ public void testNodeOperation() { settingsBuilder.put("some.undeprecated.property", "someValue3"); settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5")); settingsBuilder.putList( - DeprecationChecks.SKIP_DEPRECATIONS_SETTING.getKey(), + TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING.getKey(), List.of("some.deprecated.property", "some.other.*.deprecated.property", "some.bad.dynamic.property") ); Settings nodeSettings = settingsBuilder.build(); @@ -73,7 +73,10 @@ public void testNodeOperation() { ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); ClusterService clusterService = Mockito.mock(ClusterService.class); when(clusterService.state()).thenReturn(clusterState); - ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, Set.of(DeprecationChecks.SKIP_DEPRECATIONS_SETTING)); + ClusterSettings clusterSettings = new ClusterSettings( + nodeSettings, + Set.of(TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING) + ); when((clusterService.getClusterSettings())).thenReturn(clusterSettings); DiscoveryNode node = Mockito.mock(DiscoveryNode.class); when(node.getId()).thenReturn("mock-node"); @@ -98,7 +101,7 @@ public void testNodeOperation() { NodesDeprecationCheckAction.NodeRequest nodeRequest = null; AtomicReference visibleNodeSettings = new AtomicReference<>(); AtomicReference visibleClusterStateMetadataSettings = new AtomicReference<>(); - DeprecationChecks.NodeDeprecationCheck< + NodeDeprecationChecks.NodeDeprecationCheck< Settings, PluginsAndModules, ClusterState, @@ -109,7 +112,7 @@ public void testNodeOperation() { return null; }; java.util.List< - DeprecationChecks.NodeDeprecationCheck< + NodeDeprecationChecks.NodeDeprecationCheck< Settings, PluginsAndModules, ClusterState, @@ -120,7 +123,7 @@ public void testNodeOperation() { settingsBuilder.put("some.undeprecated.property", "someValue3"); settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5")); settingsBuilder.putList( - DeprecationChecks.SKIP_DEPRECATIONS_SETTING.getKey(), + TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING.getKey(), List.of("some.deprecated.property", "some.other.*.deprecated.property", "some.bad.dynamic.property") ); Settings expectedSettings = settingsBuilder.build(); @@ -131,7 +134,7 @@ public void testNodeOperation() { // Testing that the setting is dynamically updatable: Settings newSettings = Settings.builder() - .putList(DeprecationChecks.SKIP_DEPRECATIONS_SETTING.getKey(), List.of("some.undeprecated.property")) + .putList(TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING.getKey(), List.of("some.undeprecated.property")) .build(); clusterSettings.applySettings(newSettings); transportNodeDeprecationCheckAction.nodeOperation(nodeRequest, nodeSettingsChecks); @@ -141,7 +144,7 @@ public void testNodeOperation() { settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5")); // This is the node setting (since this is the node deprecation check), not the cluster setting: settingsBuilder.putList( - DeprecationChecks.SKIP_DEPRECATIONS_SETTING.getKey(), + TransportDeprecationInfoAction.SKIP_DEPRECATIONS_SETTING.getKey(), List.of("some.deprecated.property", "some.other.*.deprecated.property", "some.bad.dynamic.property") ); expectedSettings = settingsBuilder.build(); diff --git a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/80_downsample_aggregate.yml b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/80_downsample_aggregate.yml new file mode 100644 index 0000000000000..991aa3858d8bc --- /dev/null +++ b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/80_downsample_aggregate.yml @@ -0,0 +1,79 @@ +"downsample aggregate field": + - requires: + cluster_features: ["data_stream.downsample.default_aggregate_metric_fix"] + reason: "#119696 fixed" + + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + index: + mode: time_series + routing_path: [sensor_id] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + "@timestamp": + type: date + sensor_id: + type: keyword + time_series_dimension: true + temperature: + type: aggregate_metric_double + metrics: [min, sum, value_count] + default_metric: sum + time_series_metric: gauge + - do: + bulk: + refresh: true + index: test + body: + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:00:00Z", "sensor_id": "1", "temperature": {"min": 24.7, "sum": 50.2, "value_count": 2}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:30:00Z", "sensor_id": "1", "temperature": {"min": 24.2, "sum": 73.8, "value_count": 3}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:00:00Z", "sensor_id": "1", "temperature": {"min": 25.1, "sum": 51.0, "value_count": 2}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:30:00Z", "sensor_id": "1", "temperature": {"min": 24.8, "sum": 24.8, "value_count": 1}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T20:00:00Z", "sensor_id": "1", "temperature": {"min": 24.6, "sum": 49.1, "value_count": 2}}' + + - do: + indices.put_settings: + index: test + body: + index.blocks.write: true + + - do: + indices.downsample: + index: test + target_index: test-downsample + body: > + { + "fixed_interval": "1h" + } + - is_true: acknowledged + + - do: + search: + index: test-downsample + body: + size: 0 + + - match: + hits.total.value: 3 + + - do: + indices.get_mapping: + index: test-downsample + - match: + test-downsample.mappings.properties.temperature: + type: aggregate_metric_double + metrics: [min, sum, value_count] + default_metric: sum + time_series_metric: gauge diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java index afa2e95e1284c..917ce781fb1f8 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java @@ -119,7 +119,7 @@ public void testDataStreamLifecycleDownsampleRollingRestart() throws Exception { } catch (Exception e) { throw new AssertionError(e); } - }, 60, TimeUnit.SECONDS); + }, 120, TimeUnit.SECONDS); ensureGreen(targetIndex); } } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/AggregateMetricFieldValueFetcher.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/AggregateMetricFieldValueFetcher.java index a451439fadea1..94650e33a397f 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/AggregateMetricFieldValueFetcher.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/AggregateMetricFieldValueFetcher.java @@ -10,18 +10,18 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType; public final class AggregateMetricFieldValueFetcher extends FieldValueFetcher { - private final AggregateDoubleMetricFieldType aggMetricFieldType; + private final AggregateMetricDoubleFieldType aggMetricFieldType; private final AbstractDownsampleFieldProducer fieldProducer; AggregateMetricFieldValueFetcher( MappedFieldType fieldType, - AggregateDoubleMetricFieldType aggMetricFieldType, + AggregateMetricDoubleFieldType aggMetricFieldType, IndexFieldData fieldData ) { super(fieldType.name(), fieldType, fieldData); @@ -34,7 +34,7 @@ public AbstractDownsampleFieldProducer fieldProducer() { } private AbstractDownsampleFieldProducer createFieldProducer() { - AggregateDoubleMetricFieldMapper.Metric metric = null; + AggregateMetricDoubleFieldMapper.Metric metric = null; for (var e : aggMetricFieldType.getMetricFields().entrySet()) { NumberFieldMapper.NumberFieldType metricSubField = e.getValue(); if (metricSubField.name().equals(name())) { @@ -52,7 +52,7 @@ private AbstractDownsampleFieldProducer createFieldProducer() { case min -> new MetricFieldProducer.Min(); case sum -> new MetricFieldProducer.Sum(); // To compute value_count summary, we must sum all field values - case value_count -> new MetricFieldProducer.Sum(AggregateDoubleMetricFieldMapper.Metric.value_count.name()); + case value_count -> new MetricFieldProducer.Sum(AggregateMetricDoubleFieldMapper.Metric.value_count.name()); }; return new MetricFieldProducer.GaugeMetricFieldProducer(aggMetricFieldType.name(), metricOperation); } else { diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/FieldValueFetcher.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/FieldValueFetcher.java index 3657e4989ccbd..811d36ec1075a 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/FieldValueFetcher.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/FieldValueFetcher.java @@ -15,7 +15,7 @@ import org.elasticsearch.index.mapper.flattened.FlattenedFieldMapper; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper; import java.util.ArrayList; import java.util.Collections; @@ -82,7 +82,7 @@ static List create(SearchExecutionContext context, String[] f MappedFieldType fieldType = context.getFieldType(field); assert fieldType != null : "Unknown field type for field: [" + field + "]"; - if (fieldType instanceof AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType aggMetricFieldType) { + if (fieldType instanceof AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType aggMetricFieldType) { // If the field is an aggregate_metric_double field, we should load all its subfields // This is a downsample-of-downsample case for (NumberFieldMapper.NumberFieldType metricSubField : aggMetricFieldType.getMetricFields().values()) { diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/LabelFieldProducer.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/LabelFieldProducer.java index b211c5bfb0d12..8a90411bc1c5f 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/LabelFieldProducer.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/LabelFieldProducer.java @@ -12,7 +12,7 @@ import org.elasticsearch.index.fielddata.HistogramValue; import org.elasticsearch.index.mapper.flattened.FlattenedFieldSyntheticWriterHelper; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index 7c26ad60fb13c..2c08dcd9017fd 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -76,7 +76,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.downsample.DownsampleShardPersistentTaskState; import org.elasticsearch.xpack.core.downsample.DownsampleShardTask; @@ -91,6 +91,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -739,6 +740,39 @@ private static void addTimestampField( .endObject(); } + // public for testing + public record AggregateMetricDoubleFieldSupportedMetrics(String defaultMetric, List supportedMetrics) {} + + // public for testing + public static AggregateMetricDoubleFieldSupportedMetrics getSupportedMetrics( + final TimeSeriesParams.MetricType metricType, + final Map fieldProperties + ) { + boolean sourceIsAggregate = fieldProperties.get("type").equals(AggregateMetricDoubleFieldMapper.CONTENT_TYPE); + List supportedAggs = List.of(metricType.supportedAggs()); + + if (sourceIsAggregate) { + @SuppressWarnings("unchecked") + List currentAggs = (List) fieldProperties.get(AggregateMetricDoubleFieldMapper.Names.METRICS); + supportedAggs = supportedAggs.stream().filter(currentAggs::contains).toList(); + } + + assert supportedAggs.size() > 0; + + String defaultMetric = "max"; + if (supportedAggs.contains(defaultMetric) == false) { + defaultMetric = supportedAggs.get(0); + } + if (sourceIsAggregate) { + defaultMetric = Objects.requireNonNullElse( + (String) fieldProperties.get(AggregateMetricDoubleFieldMapper.Names.DEFAULT_METRIC), + defaultMetric + ); + } + + return new AggregateMetricDoubleFieldSupportedMetrics(defaultMetric, supportedAggs); + } + private static void addMetricFieldMapping(final XContentBuilder builder, final String field, final Map fieldProperties) throws IOException { final TimeSeriesParams.MetricType metricType = TimeSeriesParams.MetricType.fromString( @@ -752,12 +786,11 @@ private static void addMetricFieldMapping(final XContentBuilder builder, final S builder.field(fieldProperty, fieldProperties.get(fieldProperty)); } } else { - final String[] supportedAggsArray = metricType.supportedAggs(); - // We choose max as the default metric - final String defaultMetric = List.of(supportedAggsArray).contains("max") ? "max" : supportedAggsArray[0]; - builder.field("type", AggregateDoubleMetricFieldMapper.CONTENT_TYPE) - .array(AggregateDoubleMetricFieldMapper.Names.METRICS, supportedAggsArray) - .field(AggregateDoubleMetricFieldMapper.Names.DEFAULT_METRIC, defaultMetric) + var supported = getSupportedMetrics(metricType, fieldProperties); + + builder.field("type", AggregateMetricDoubleFieldMapper.CONTENT_TYPE) + .stringListField(AggregateMetricDoubleFieldMapper.Names.METRICS, supported.supportedMetrics) + .field(AggregateMetricDoubleFieldMapper.Names.DEFAULT_METRIC, supported.defaultMetric) .field(TIME_SERIES_METRIC_PARAM, metricType); } builder.endObject(); diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java index 3d3fe3650ebd9..8c396c4934956 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -35,6 +34,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.List; import java.util.concurrent.atomic.AtomicReferenceArray; /** @@ -79,17 +79,12 @@ public TransportDownsampleIndexerAction( } @Override - protected GroupShardsIterator shards( - ClusterState clusterState, - DownsampleIndexerAction.Request request, - String[] concreteIndices - ) { + protected List shards(ClusterState clusterState, DownsampleIndexerAction.Request request, String[] concreteIndices) { if (concreteIndices.length > 1) { throw new IllegalArgumentException("multiple indices: " + Arrays.toString(concreteIndices)); } - final GroupShardsIterator groups = clusterService.operationRouting() - .searchShards(clusterState, concreteIndices, null, null); + final List groups = clusterService.operationRouting().searchShards(clusterState, concreteIndices, null, null); for (ShardIterator group : groups) { // fails fast if any non-active groups if (group.size() == 0) { diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index ce9b60938526a..2c759f150e575 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -1760,7 +1760,7 @@ public void testDuplicateDownsampleRequest() throws Exception { new Thread(() -> { try { downsample(sourceIndex, targetIndex, config); - } catch (ResourceAlreadyExistsException e) { + } catch (ElasticsearchException e) { firstFailed.set(true); } finally { downsampleComplete.countDown(); @@ -1770,7 +1770,7 @@ public void testDuplicateDownsampleRequest() throws Exception { new Thread(() -> { try { downsample(sourceIndex, targetIndex, config); - } catch (ResourceAlreadyExistsException e) { + } catch (ElasticsearchException e) { secondFailed.set(true); } finally { downsampleComplete.countDown(); diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/TransportDownsampleActionTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/TransportDownsampleActionTests.java index fb699fd7c3418..1b2cc32e12a65 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/TransportDownsampleActionTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/TransportDownsampleActionTests.java @@ -13,12 +13,16 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.mapper.TimeSeriesParams; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ilm.LifecycleSettings; import java.util.List; +import java.util.Map; import java.util.UUID; +import static org.hamcrest.Matchers.is; + public class TransportDownsampleActionTests extends ESTestCase { public void testCopyIndexMetadata() { // GIVEN @@ -107,4 +111,25 @@ private static void assertTargetSettings(final IndexMetadata indexMetadata, fina settings.get(IndexMetadata.SETTING_CREATION_DATE) ); } + + public void testGetSupportedMetrics() { + TimeSeriesParams.MetricType metricType = TimeSeriesParams.MetricType.GAUGE; + Map fieldProperties = Map.of( + "type", + "aggregate_metric_double", + "metrics", + List.of("max", "sum"), + "default_metric", + "sum" + ); + + var supported = TransportDownsampleAction.getSupportedMetrics(metricType, fieldProperties); + assertThat(supported.defaultMetric(), is("sum")); + assertThat(supported.supportedMetrics(), is(List.of("max", "sum"))); + + fieldProperties = Map.of("type", "integer"); + supported = TransportDownsampleAction.getSupportedMetrics(metricType, fieldProperties); + assertThat(supported.defaultMetric(), is("max")); + assertThat(supported.supportedMetrics(), is(List.of(metricType.supportedAggs()))); + } } diff --git a/x-pack/plugin/enrich/build.gradle b/x-pack/plugin/enrich/build.gradle index 352b7a3e64171..46972578ae1fc 100644 --- a/x-pack/plugin/enrich/build.gradle +++ b/x-pack/plugin/enrich/build.gradle @@ -19,6 +19,7 @@ dependencies { testImplementation project(path: ':modules:legacy-geo') testImplementation project(xpackModule('spatial')) testImplementation(testArtifact(project(xpackModule('monitoring')))) + internalClusterTestImplementation project(':modules:rest-root') } addQaCheckDependencies(project) diff --git a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichRestActionCancellationIT.java b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichRestActionCancellationIT.java new file mode 100644 index 0000000000000..29808e4a91362 --- /dev/null +++ b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichRestActionCancellationIT.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.enrich; + +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.action.support.CancellableActionTestPlugin; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.RefCountingListener; +import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.root.MainRestPlugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.transport.netty4.Netty4Plugin; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; +import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.CancellationException; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.action.support.ActionTestUtils.wrapAsRestResponseListener; +import static org.elasticsearch.test.TaskAssertions.assertAllTasksHaveFinished; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.oneOf; + +public class EnrichRestActionCancellationIT extends ESIntegTestCase { + + @Override + protected boolean addMockHttpTransport() { + return false; // enable http + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(NetworkModule.TRANSPORT_TYPE_KEY, Netty4Plugin.NETTY_TRANSPORT_NAME) + .put(NetworkModule.HTTP_TYPE_KEY, Netty4Plugin.NETTY_HTTP_TRANSPORT_NAME) + .build(); + } + + @Override + protected Collection> nodePlugins() { + return List.of(getTestTransportPlugin(), MainRestPlugin.class, CancellableActionTestPlugin.class, EnrichPlugin.class); + } + + public void testGetEnrichPolicyCancellation() throws IOException { + runRestActionCancellationTest(new Request(HttpGet.METHOD_NAME, "/_enrich/policy"), GetEnrichPolicyAction.NAME); + } + + public void testEnrichStatsCancellation() throws IOException { + runRestActionCancellationTest(new Request(HttpGet.METHOD_NAME, "/_enrich/_stats"), EnrichStatsAction.NAME); + } + + private void runRestActionCancellationTest(Request request, String actionName) { + final var node = usually() ? internalCluster().getRandomNodeName() : internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); + + try ( + var restClient = createRestClient(node); + var capturingAction = CancellableActionTestPlugin.capturingActionOnNode(actionName, node) + ) { + final var responseFuture = new PlainActionFuture(); + final var restInvocation = restClient.performRequestAsync(request, wrapAsRestResponseListener(responseFuture)); + + if (randomBoolean()) { + // cancel by aborting the REST request + capturingAction.captureAndCancel(restInvocation::cancel); + expectThrows(ExecutionException.class, CancellationException.class, () -> responseFuture.get(10, TimeUnit.SECONDS)); + } else { + // cancel via the task management API + final var cancelFuture = new PlainActionFuture(); + capturingAction.captureAndCancel( + () -> SubscribableListener + + .newForked( + l -> restClient.performRequestAsync( + getListTasksRequest(node, actionName), + wrapAsRestResponseListener(l.map(ObjectPath::createFromResponse)) + ) + ) + + .andThen((l, listTasksResponse) -> { + final var taskCount = listTasksResponse.evaluateArraySize("tasks"); + assertThat(taskCount, greaterThan(0)); + try (var listeners = new RefCountingListener(l)) { + for (int i = 0; i < taskCount; i++) { + final var taskPrefix = "tasks." + i + "."; + assertTrue(listTasksResponse.evaluate(taskPrefix + "cancellable")); + assertFalse(listTasksResponse.evaluate(taskPrefix + "cancelled")); + restClient.performRequestAsync( + getCancelTaskRequest( + listTasksResponse.evaluate(taskPrefix + "node"), + listTasksResponse.evaluate(taskPrefix + "id") + ), + wrapAsRestResponseListener(listeners.acquire(EnrichRestActionCancellationIT::assertOK)) + ); + } + } + }) + + .addListener(cancelFuture) + ); + cancelFuture.get(10, TimeUnit.SECONDS); + expectThrows(Exception.class, () -> responseFuture.get(10, TimeUnit.SECONDS)); + } + + assertAllTasksHaveFinished(actionName); + } catch (Exception e) { + fail(e); + } + } + + private static Request getListTasksRequest(String taskNode, String actionName) { + final var listTasksRequest = new Request(HttpGet.METHOD_NAME, "/_tasks"); + listTasksRequest.addParameter("nodes", taskNode); + listTasksRequest.addParameter("actions", actionName); + listTasksRequest.addParameter("group_by", "none"); + return listTasksRequest; + } + + private static Request getCancelTaskRequest(String taskNode, int taskId) { + final var cancelTaskRequest = new Request(HttpPost.METHOD_NAME, Strings.format("/_tasks/%s:%d/_cancel", taskNode, taskId)); + cancelTaskRequest.addParameter("wait_for_completion", null); + return cancelTaskRequest; + } + + public static void assertOK(Response response) { + assertThat(response.getStatusLine().getStatusCode(), oneOf(200, 201)); + } + +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichStore.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichStore.java index 82f9877826a5c..6e7f3846963ca 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichStore.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichStore.java @@ -81,6 +81,10 @@ public static void putPolicy( } updateClusterState(clusterService, handler, current -> { + final Map originalPolicies = getPolicies(current); + if (originalPolicies.containsKey(name)) { + throw new ResourceAlreadyExistsException("policy [{}] already exists", name); + } for (String indexExpression : policy.getIndices()) { // indices field in policy can contain wildcards, aliases etc. String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames( @@ -101,12 +105,9 @@ public static void putPolicy( } } - final Map policies = getPolicies(current); - EnrichPolicy existing = policies.putIfAbsent(name, policy); - if (existing != null) { - throw new ResourceAlreadyExistsException("policy [{}] already exists", name); - } - return policies; + final Map updatedPolicies = new HashMap<>(originalPolicies); + updatedPolicies.put(name, policy); + return updatedPolicies; }); } @@ -125,13 +126,14 @@ public static void deletePolicy(String name, ClusterService clusterService, Cons } updateClusterState(clusterService, handler, current -> { - final Map policies = getPolicies(current); - if (policies.containsKey(name) == false) { + final Map originalPolicies = getPolicies(current); + if (originalPolicies.containsKey(name) == false) { throw new ResourceNotFoundException("policy [{}] not found", name); } - policies.remove(name); - return policies; + final Map updatedPolicies = new HashMap<>(originalPolicies); + updatedPolicies.remove(name); + return updatedPolicies; }); } @@ -153,18 +155,11 @@ public static EnrichPolicy getPolicy(String name, ClusterState state) { * Gets all policies in the cluster. * * @param state the cluster state - * @return a Map of policyName, EnrichPolicy of the policies + * @return a read-only Map of policyName, EnrichPolicy of the policies */ public static Map getPolicies(ClusterState state) { - final Map policies; - final EnrichMetadata enrichMetadata = state.metadata().custom(EnrichMetadata.TYPE); - if (enrichMetadata != null) { - // Make a copy, because policies map inside custom metadata is read only: - policies = new HashMap<>(enrichMetadata.getPolicies()); - } else { - policies = new HashMap<>(); - } - return policies; + final EnrichMetadata metadata = state.metadata().custom(EnrichMetadata.TYPE, EnrichMetadata.EMPTY); + return metadata.getPolicies(); } private static void updateClusterState( diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java index c43dc99f147b0..a7167e176cbef 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java @@ -25,10 +25,8 @@ import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.Preference; -import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -216,10 +214,7 @@ protected ShardsIterator shards(ClusterState state, InternalRequest request) { if (numShards != 1) { throw new IllegalStateException("index [" + index + "] should have 1 shard, but has " + numShards + " shards"); } - - GroupShardsIterator result = clusterService.operationRouting() - .searchShards(state, new String[] { index }, null, Preference.LOCAL.type()); - return result.get(0); + return clusterService.operationRouting().searchShards(state, new String[] { index }, null, Preference.LOCAL.type()).getFirst(); } @Override diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportEnrichStatsAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportEnrichStatsAction.java index b78aaa28428ce..c42bdcdfcea7c 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportEnrichStatsAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportEnrichStatsAction.java @@ -9,17 +9,18 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.action.support.ChannelActionListener; +import org.elasticsearch.action.support.local.TransportLocalClusterStateAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.CoordinatorStats; @@ -31,34 +32,44 @@ import java.util.Objects; import java.util.stream.Collectors; -public class TransportEnrichStatsAction extends TransportMasterNodeAction { +public class TransportEnrichStatsAction extends TransportLocalClusterStateAction { private final Client client; + /** + * NB prior to 9.0 this was a TransportMasterNodeAction so for BwC it must be registered with the TransportService until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) + @SuppressWarnings("this-escape") @Inject public TransportEnrichStatsAction( TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, Client client ) { super( EnrichStatsAction.NAME, - transportService, - clusterService, - threadPool, actionFilters, - EnrichStatsAction.Request::new, - EnrichStatsAction.Response::new, + transportService.getTaskManager(), + clusterService, EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.client = client; + + transportService.registerRequestHandler( + actionName, + executor, + false, + true, + EnrichStatsAction.Request::new, + (request, channel, task) -> executeDirect(task, request, new ChannelActionListener<>(channel)) + ); } @Override - protected void masterOperation( + protected void localClusterStateOperation( Task task, EnrichStatsAction.Request request, ClusterState state, @@ -101,6 +112,7 @@ protected void masterOperation( .collect(Collectors.toList()); delegate.onResponse(new EnrichStatsAction.Response(policyExecutionTasks, coordinatorStats, cacheStats)); }); + ((CancellableTask) task).ensureNotCancelled(); client.execute(EnrichCoordinatorStatsAction.INSTANCE, statsRequest, statsListener); } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyAction.java index cff0ff60c599b..3af102e481e38 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyAction.java @@ -8,16 +8,17 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.action.support.ChannelActionListener; +import org.elasticsearch.action.support.local.TransportLocalClusterStateAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; @@ -26,32 +27,38 @@ import java.util.HashMap; import java.util.Map; -public class TransportGetEnrichPolicyAction extends TransportMasterNodeReadAction< +public class TransportGetEnrichPolicyAction extends TransportLocalClusterStateAction< GetEnrichPolicyAction.Request, GetEnrichPolicyAction.Response> { + /** + * NB prior to 9.0 this was a TransportMasterNodeReadAction so for BwC it must be registered with the TransportService until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) + @SuppressWarnings("this-escape") @Inject - public TransportGetEnrichPolicyAction( - TransportService transportService, - ClusterService clusterService, - ThreadPool threadPool, - ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver - ) { + public TransportGetEnrichPolicyAction(TransportService transportService, ClusterService clusterService, ActionFilters actionFilters) { super( GetEnrichPolicyAction.NAME, - transportService, - clusterService, - threadPool, actionFilters, - GetEnrichPolicyAction.Request::new, - GetEnrichPolicyAction.Response::new, + transportService.getTaskManager(), + clusterService, EsExecutors.DIRECT_EXECUTOR_SERVICE ); + + transportService.registerRequestHandler( + actionName, + executor, + false, + true, + GetEnrichPolicyAction.Request::new, + (request, channel, task) -> executeDirect(task, request, new ChannelActionListener<>(channel)) + ); } @Override - protected void masterOperation( + protected void localClusterStateOperation( Task task, GetEnrichPolicyAction.Request request, ClusterState state, @@ -71,6 +78,7 @@ protected void masterOperation( } } } + ((CancellableTask) task).ensureNotCancelled(); listener.onResponse(new GetEnrichPolicyAction.Response(policies)); } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestEnrichStatsAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestEnrichStatsAction.java index 2c78556df489d..9f14a5944422b 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestEnrichStatsAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestEnrichStatsAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; @@ -43,7 +44,11 @@ public Set supportedCapabilities() { @Override protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) { final var request = new EnrichStatsAction.Request(RestUtils.getMasterNodeTimeout(restRequest)); - return channel -> client.execute(EnrichStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); + return channel -> new RestCancellableNodeClient(client, restRequest.getHttpChannel()).execute( + EnrichStatsAction.INSTANCE, + request, + new RestToXContentListener<>(channel) + ); } } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestGetEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestGetEnrichPolicyAction.java index 2fb9f63c1eb4a..4796bfcdbfeb0 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestGetEnrichPolicyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestGetEnrichPolicyAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; @@ -39,6 +40,10 @@ protected RestChannelConsumer prepareRequest(final RestRequest restRequest, fina RestUtils.getMasterNodeTimeout(restRequest), Strings.splitStringByCommaToArray(restRequest.param("name")) ); - return channel -> client.execute(GetEnrichPolicyAction.INSTANCE, request, new RestToXContentListener<>(channel)); + return channel -> new RestCancellableNodeClient(client, restRequest.getHttpChannel()).execute( + GetEnrichPolicyAction.INSTANCE, + request, + new RestToXContentListener<>(channel) + ); } } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichStatsResponseTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichStatsResponseTests.java deleted file mode 100644 index aec184472d41e..0000000000000 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichStatsResponseTests.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.enrich.action; - -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.tasks.TaskInfo; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; -import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.CacheStats; -import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.CoordinatorStats; -import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.ExecutingPolicy; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -public class EnrichStatsResponseTests extends AbstractWireSerializingTestCase { - - @Override - protected EnrichStatsAction.Response createTestInstance() { - int numExecutingPolicies = randomIntBetween(0, 16); - List executingPolicies = new ArrayList<>(numExecutingPolicies); - for (int i = 0; i < numExecutingPolicies; i++) { - TaskInfo taskInfo = randomTaskInfo(); - executingPolicies.add(new ExecutingPolicy(randomAlphaOfLength(4), taskInfo)); - } - int numCoordinatingStats = randomIntBetween(0, 16); - List coordinatorStats = new ArrayList<>(numCoordinatingStats); - List cacheStats = new ArrayList<>(numCoordinatingStats); - for (int i = 0; i < numCoordinatingStats; i++) { - String nodeId = randomAlphaOfLength(4); - CoordinatorStats stats = new CoordinatorStats( - nodeId, - randomIntBetween(0, 8096), - randomIntBetween(0, 8096), - randomNonNegativeLong(), - randomNonNegativeLong() - ); - coordinatorStats.add(stats); - cacheStats.add( - new CacheStats( - nodeId, - randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong() - ) - ); - } - return new EnrichStatsAction.Response(executingPolicies, coordinatorStats, cacheStats); - } - - @Override - protected EnrichStatsAction.Response mutateInstance(EnrichStatsAction.Response instance) { - return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 - } - - @Override - protected Writeable.Reader instanceReader() { - return EnrichStatsAction.Response::new; - } - - public static TaskInfo randomTaskInfo() { - String nodeId = randomAlphaOfLength(5); - TaskId taskId = new TaskId(nodeId, randomLong()); - String type = randomAlphaOfLength(5); - String action = randomAlphaOfLength(5); - String description = randomAlphaOfLength(5); - long startTime = randomLong(); - long runningTimeNanos = randomNonNegativeLong(); - boolean cancellable = randomBoolean(); - boolean cancelled = cancellable && randomBoolean(); - TaskId parentTaskId = TaskId.EMPTY_TASK_ID; - Map headers = randomBoolean() - ? Collections.emptyMap() - : Collections.singletonMap(randomAlphaOfLength(5), randomAlphaOfLength(5)); - return new TaskInfo( - taskId, - type, - nodeId, - action, - description, - null, - startTime, - runningTimeNanos, - cancellable, - cancelled, - parentTaskId, - headers - ); - } -} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionRequestTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionRequestTests.java deleted file mode 100644 index 051eadac48467..0000000000000 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionRequestTests.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.enrich.action; - -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; - -public class GetEnrichPolicyActionRequestTests extends AbstractWireSerializingTestCase { - - @Override - protected GetEnrichPolicyAction.Request createTestInstance() { - return new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, generateRandomStringArray(0, 4, false)); - } - - @Override - protected GetEnrichPolicyAction.Request mutateInstance(GetEnrichPolicyAction.Request instance) { - return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 - } - - @Override - protected Writeable.Reader instanceReader() { - return GetEnrichPolicyAction.Request::new; - } -} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionResponseTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionResponseTests.java deleted file mode 100644 index c46005163fa12..0000000000000 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionResponseTests.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.enrich.action; - -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.test.AbstractXContentSerializingTestCase; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.enrich.EnrichPolicy; -import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -import static org.elasticsearch.xpack.enrich.EnrichPolicyTests.assertEqualPolicies; -import static org.elasticsearch.xpack.enrich.EnrichPolicyTests.randomEnrichPolicy; -import static org.hamcrest.core.IsEqual.equalTo; - -public class GetEnrichPolicyActionResponseTests extends AbstractXContentSerializingTestCase { - - @Override - protected GetEnrichPolicyAction.Response doParseInstance(XContentParser parser) throws IOException { - Map policies = new HashMap<>(); - assert parser.nextToken() == XContentParser.Token.START_OBJECT; - assert parser.nextToken() == XContentParser.Token.FIELD_NAME; - assert parser.currentName().equals("policies"); - assert parser.nextToken() == XContentParser.Token.START_ARRAY; - - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - assert token == XContentParser.Token.START_OBJECT; - assert parser.nextToken() == XContentParser.Token.FIELD_NAME; - assert parser.currentName().equals("config"); - assert parser.nextToken() == XContentParser.Token.START_OBJECT; - EnrichPolicy.NamedPolicy policy = EnrichPolicy.NamedPolicy.fromXContent(parser); - policies.put(policy.getName(), policy.getPolicy()); - assert parser.nextToken() == XContentParser.Token.END_OBJECT; - } - - return new GetEnrichPolicyAction.Response(policies); - } - - @Override - protected GetEnrichPolicyAction.Response createTestInstance() { - Map items = new HashMap<>(); - for (int i = 0; i < randomIntBetween(0, 3); i++) { - EnrichPolicy policy = randomEnrichPolicy(XContentType.JSON); - items.put(randomAlphaOfLength(3), policy); - } - return new GetEnrichPolicyAction.Response(items); - } - - @Override - protected GetEnrichPolicyAction.Response mutateInstance(GetEnrichPolicyAction.Response instance) { - return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 - } - - @Override - protected Writeable.Reader instanceReader() { - return GetEnrichPolicyAction.Response::new; - } - - @Override - protected void assertEqualInstances(GetEnrichPolicyAction.Response expectedInstance, GetEnrichPolicyAction.Response newInstance) { - assertNotSame(expectedInstance, newInstance); - // the tests shuffle around the policy query source xcontent type, so this is needed here - assertThat(expectedInstance.getPolicies().size(), equalTo(newInstance.getPolicies().size())); - // since the backing store is a treemap the list will be sorted so we can just check each - // instance is the same - for (int i = 0; i < expectedInstance.getPolicies().size(); i++) { - EnrichPolicy.NamedPolicy expected = expectedInstance.getPolicies().get(i); - EnrichPolicy.NamedPolicy newed = newInstance.getPolicies().get(i); - assertThat(expected.getName(), equalTo(newed.getName())); - assertEqualPolicies(expected.getPolicy(), newed.getPolicy()); - } - } -} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyActionTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyActionTests.java index 6a3c1eb2555b1..448f6d42a992c 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyActionTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyActionTests.java @@ -10,6 +10,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; @@ -17,6 +19,7 @@ import org.elasticsearch.xpack.enrich.EnrichPolicyLocks; import org.junit.After; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; @@ -34,7 +37,8 @@ public void cleanupPolicies() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); final AtomicReference reference = new AtomicReference<>(); final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); - ActionTestUtils.execute(transportAction, null, new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT), new ActionListener<>() { + final var task = createTask(); + ActionTestUtils.execute(transportAction, task, new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT), new ActionListener<>() { @Override public void onResponse(GetEnrichPolicyAction.Response response) { reference.set(response); @@ -43,7 +47,7 @@ public void onResponse(GetEnrichPolicyAction.Response response) { } public void onFailure(final Exception e) { - fail(); + fail(e); } }); latch.await(); @@ -74,7 +78,8 @@ public void testListPolicies() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); final AtomicReference reference = new AtomicReference<>(); final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); - ActionTestUtils.execute(transportAction, null, new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT), new ActionListener<>() { + final var task = createTask(); + ActionTestUtils.execute(transportAction, task, new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT), new ActionListener<>() { @Override public void onResponse(GetEnrichPolicyAction.Response response) { reference.set(response); @@ -83,7 +88,7 @@ public void onResponse(GetEnrichPolicyAction.Response response) { } public void onFailure(final Exception e) { - fail(); + fail(e); } }); latch.await(); @@ -101,7 +106,8 @@ public void testListEmptyPolicies() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); final AtomicReference reference = new AtomicReference<>(); final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); - ActionTestUtils.execute(transportAction, null, new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT), new ActionListener<>() { + final var task = createTask(); + ActionTestUtils.execute(transportAction, task, new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT), new ActionListener<>() { @Override public void onResponse(GetEnrichPolicyAction.Response response) { reference.set(response); @@ -110,7 +116,7 @@ public void onResponse(GetEnrichPolicyAction.Response response) { } public void onFailure(final Exception e) { - fail(); + fail(e); } }); latch.await(); @@ -137,7 +143,7 @@ public void testGetPolicy() throws InterruptedException { final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); ActionTestUtils.execute( transportAction, - null, + createTask(), new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, name), new ActionListener<>() { @Override @@ -147,7 +153,7 @@ public void onResponse(GetEnrichPolicyAction.Response response) { } public void onFailure(final Exception e) { - fail(); + fail(e); } } ); @@ -184,7 +190,7 @@ public void testGetMultiplePolicies() throws InterruptedException { final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); ActionTestUtils.execute( transportAction, - null, + createTask(), new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, name, anotherName), new ActionListener<>() { @Override @@ -194,7 +200,7 @@ public void onResponse(GetEnrichPolicyAction.Response response) { } public void onFailure(final Exception e) { - fail(); + fail(e); } } ); @@ -218,7 +224,7 @@ public void testGetPolicyThrowsError() throws InterruptedException { final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); ActionTestUtils.execute( transportAction, - null, + createTask(), new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "non-exists"), new ActionListener<>() { @Override @@ -228,7 +234,7 @@ public void onResponse(GetEnrichPolicyAction.Response response) { } public void onFailure(final Exception e) { - fail(); + fail(e); } } ); @@ -236,4 +242,8 @@ public void onFailure(final Exception e) { assertNotNull(reference.get()); assertThat(reference.get().getPolicies().size(), equalTo(0)); } + + private static CancellableTask createTask() { + return new CancellableTask(randomNonNegativeLong(), "test", GetEnrichPolicyAction.NAME, "", TaskId.EMPTY_TASK_ID, Map.of()); + } } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichStatsCollectorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichStatsCollectorTests.java index 2a069eb596760..b7537089413d7 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichStatsCollectorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichStatsCollectorTests.java @@ -13,6 +13,8 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.CoordinatorStats; import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.ExecutingPolicy; @@ -21,9 +23,10 @@ import org.elasticsearch.xpack.monitoring.BaseCollectorTestCase; import java.util.ArrayList; +import java.util.Collections; import java.util.List; +import java.util.Map; -import static org.elasticsearch.xpack.enrich.action.EnrichStatsResponseTests.randomTaskInfo; import static org.elasticsearch.xpack.monitoring.MonitoringTestUtils.randomMonitoringNode; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -154,4 +157,34 @@ private EnrichStatsCollector createCollector(ClusterService clusterService, XPac return new EnrichStatsCollector(clusterService, licenseState, client); } + public static TaskInfo randomTaskInfo() { + String nodeId = randomAlphaOfLength(5); + TaskId taskId = new TaskId(nodeId, randomLong()); + String type = randomAlphaOfLength(5); + String action = randomAlphaOfLength(5); + String description = randomAlphaOfLength(5); + long startTime = randomLong(); + long runningTimeNanos = randomNonNegativeLong(); + boolean cancellable = randomBoolean(); + boolean cancelled = cancellable && randomBoolean(); + TaskId parentTaskId = TaskId.EMPTY_TASK_ID; + Map headers = randomBoolean() + ? Collections.emptyMap() + : Collections.singletonMap(randomAlphaOfLength(5), randomAlphaOfLength(5)); + return new TaskInfo( + taskId, + type, + nodeId, + action, + description, + null, + startTime, + runningTimeNanos, + cancellable, + cancelled, + parentTaskId, + headers + ); + } + } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/ExecutingPolicyDocTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/ExecutingPolicyDocTests.java index 3352e6e2bb8a4..72dccf77f391f 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/ExecutingPolicyDocTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/ExecutingPolicyDocTests.java @@ -23,8 +23,8 @@ import java.util.Optional; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.xpack.enrich.action.EnrichStatsResponseTests.randomTaskInfo; import static org.elasticsearch.xpack.monitoring.collector.enrich.EnrichCoordinatorDocTests.DATE_TIME_FORMATTER; +import static org.elasticsearch.xpack.monitoring.collector.enrich.EnrichStatsCollectorTests.randomTaskInfo; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; diff --git a/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java b/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java index a00dc28bd5fb6..6d2d3c33d3e94 100644 --- a/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java +++ b/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java @@ -43,7 +43,7 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas @ClassRule public static final ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(org.elasticsearch.test.cluster.util.Version.fromString(OLD_CLUSTER_VERSION)) .nodes(2) .setting("xpack.security.enabled", "false") .setting("xpack.license.self_generated.type", "trial") diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/80_query_rules_retriever.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/80_query_rules_retriever.yml index 089a078c62207..4ce0c55511cbd 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/80_query_rules_retriever.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/80_query_rules_retriever.yml @@ -288,10 +288,9 @@ setup: rank_window_size: 1 - match: { hits.total.value: 3 } + - length: { hits.hits: 1 } - match: { hits.hits.0._id: foo } - match: { hits.hits.0._score: 1.7014124E38 } - - match: { hits.hits.1._score: 0 } - - match: { hits.hits.2._score: 0 } - do: headers: @@ -315,12 +314,10 @@ setup: rank_window_size: 2 - match: { hits.total.value: 3 } + - length: { hits.hits: 2 } - match: { hits.hits.0._id: foo } - match: { hits.hits.0._score: 1.7014124E38 } - match: { hits.hits.1._id: foo2 } - - match: { hits.hits.1._score: 1.7014122E38 } - - match: { hits.hits.2._id: bar_no_rule } - - match: { hits.hits.2._score: 0 } - do: headers: @@ -344,6 +341,7 @@ setup: rank_window_size: 10 - match: { hits.total.value: 3 } + - length: { hits.hits: 3 } - match: { hits.hits.0._id: foo } - match: { hits.hits.0._score: 1.7014124E38 } - match: { hits.hits.1._id: foo2 } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Verifier.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Verifier.java index cbede5871f275..ccaf4ce3a8861 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Verifier.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/Verifier.java @@ -71,6 +71,9 @@ Collection verify(LogicalPlan plan) { // start bottom-up plan.forEachUp(p -> { + if (p.getClass().equals(Join.class)) { + failures.add(fail(p, "JOIN command is not supported")); + } if (p.analyzed()) { return; } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCSerializationTestCase.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCSerializationTestCase.java index fc41bdd627c95..2e8b8578b5056 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCSerializationTestCase.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCSerializationTestCase.java @@ -10,23 +10,21 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; -import java.util.Collections; -import java.util.List; +import java.util.NavigableSet; import static org.hamcrest.Matchers.equalTo; public abstract class AbstractBWCSerializationTestCase extends AbstractXContentSerializingTestCase { - private static List getAllBWCVersions() { - List allVersions = TransportVersion.getAllVersions(); - int minCompatVersion = Collections.binarySearch(allVersions, TransportVersions.MINIMUM_COMPATIBLE); - return allVersions.subList(minCompatVersion, allVersions.size()); + private static NavigableSet getAllBWCVersions() { + return TransportVersionUtils.allReleasedVersions().tailSet(TransportVersions.MINIMUM_COMPATIBLE, true); } - private static final List DEFAULT_BWC_VERSIONS = getAllBWCVersions(); + private static final NavigableSet DEFAULT_BWC_VERSIONS = getAllBWCVersions(); protected abstract T mutateInstanceForVersion(T instance, TransportVersion version); diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCWireSerializingTestCase.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCWireSerializingTestCase.java index 30777f43597c8..76c2b3355e236 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCWireSerializingTestCase.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCWireSerializingTestCase.java @@ -10,22 +10,20 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.test.TransportVersionUtils; import java.io.IOException; -import java.util.Collections; -import java.util.List; +import java.util.NavigableSet; import static org.hamcrest.Matchers.equalTo; public abstract class AbstractBWCWireSerializingTestCase extends AbstractWireSerializingTestCase { - private static List getAllBWCVersions() { - List allVersions = TransportVersion.getAllVersions(); - int minCompatVersion = Collections.binarySearch(allVersions, TransportVersions.MINIMUM_COMPATIBLE); - return allVersions.subList(minCompatVersion, allVersions.size()); + private static NavigableSet getAllBWCVersions() { + return TransportVersionUtils.allReleasedVersions().tailSet(TransportVersions.MINIMUM_COMPATIBLE, true); } - private static final List DEFAULT_BWC_VERSIONS = getAllBWCVersions(); + private static final NavigableSet DEFAULT_BWC_VERSIONS = getAllBWCVersions(); protected abstract T mutateInstanceForVersion(T instance, TransportVersion version); diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/VerifierTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/VerifierTests.java index bec71a9846562..887132ab729e9 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/VerifierTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/VerifierTests.java @@ -368,6 +368,13 @@ public void testJoin() { accept(idxr, "foo where serial_event_id == 0"); } + public void testJoinCommand() { + final IndexResolution idxr = loadIndexResolution("mapping-ip.json"); + + assertEquals("1:1: JOIN command is not supported", error(idxr, "join [any where true] [any where true]")); + assertEquals("1:1: JOIN command is not supported", error(idxr, "join [any where true] [any where true] | tail 3")); + } + public void testMultiField() { final IndexResolution idxr = loadIndexResolution("mapping-multi-field.json"); accept(idxr, "foo where multi_field.raw == \"bar\""); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java index 0f1cfbb85039c..dc75ac3a96248 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java @@ -32,6 +32,7 @@ public class MetadataAttribute extends TypedAttribute { public static final String TIMESTAMP_FIELD = "@timestamp"; public static final String TSID_FIELD = "_tsid"; public static final String SCORE = "_score"; + public static final String INDEX = "_index"; static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Attribute.class, @@ -42,7 +43,7 @@ public class MetadataAttribute extends TypedAttribute { private static final Map> ATTRIBUTES_MAP = Map.of( "_version", tuple(DataType.LONG, false), // _version field is not searchable - "_index", + INDEX, tuple(DataType.KEYWORD, true), IdFieldMapper.NAME, tuple(DataType.KEYWORD, false), // actually searchable, but fielddata access on the _id field is disallowed by default diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java index 61b480968e974..729188e2981d9 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java @@ -14,4 +14,5 @@ public class EsqlCorePlugin extends Plugin implements ExtensiblePlugin { public static final FeatureFlag SEMANTIC_TEXT_FEATURE_FLAG = new FeatureFlag("esql_semantic_text"); + public static final FeatureFlag AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG = new FeatureFlag("esql_aggregate_metric_double"); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index d86cdb0de038c..671e2df3650dd 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -307,7 +307,9 @@ public enum DataType { * loaded from the index and ESQL will load these fields as strings without their attached * chunks or embeddings. */ - SEMANTIC_TEXT(builder().esType("semantic_text").unknownSize()); + SEMANTIC_TEXT(builder().esType("semantic_text").unknownSize()), + + AGGREGATE_METRIC_DOUBLE(builder().esType("aggregate_metric_double").estimatedSize(Double.BYTES * 3 + Integer.BYTES)); /** * Types that are actively being built. These types are not returned @@ -316,7 +318,8 @@ public enum DataType { * check that sending them to a function produces a sane error message. */ public static final Map UNDER_CONSTRUCTION = Map.ofEntries( - Map.entry(SEMANTIC_TEXT, EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG) + Map.entry(SEMANTIC_TEXT, EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG), + Map.entry(AGGREGATE_METRIC_DOUBLE, EsqlCorePlugin.AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG) ); private final String typeName; @@ -553,6 +556,7 @@ public static boolean isRepresentable(DataType t) { && t != SOURCE && t != HALF_FLOAT && t != PARTIAL_AGG + && t != AGGREGATE_METRIC_DOUBLE && t.isCounter() == false; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/Holder.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/Holder.java index 1290bbca59ee7..9aadcefb84e84 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/Holder.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/Holder.java @@ -26,6 +26,16 @@ public void set(T value) { this.value = value; } + /** + * Sets a value in the holder, but only if none has already been set. + * @param value the new value to set. + */ + public void setIfAbsent(T value) { + if (this.value == null) { + this.value = value; + } + } + public T get() { return value; } diff --git a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/Aggregator.java b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/Aggregator.java index 444dbcc1b9e58..cdff28acecdbe 100644 --- a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/Aggregator.java +++ b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/Aggregator.java @@ -37,18 +37,10 @@ * are ever collected. *

*

- * The generation code will also look for a method called {@code combineValueCount} - * which is called once per received block with a count of values. NOTE: We may - * not need this after we convert AVG into a composite operation. - *

- *

* The generation code also looks for the optional methods {@code combineIntermediate} * and {@code evaluateFinal} which are used to combine intermediate states and - * produce the final output. If the first is missing then the generated code will - * call the {@code combine} method to combine intermediate states. If the second - * is missing the generated code will make a block containing the primitive from - * the state. If either of those don't have sensible interpretations then the code - * generation code will throw an error, aborting the compilation. + * produce the final output. Please note, those are auto-generated when aggregating + * primitive types such as boolean, int, long, float, double. *

*/ @Target(ElementType.TYPE) @@ -63,4 +55,8 @@ */ Class[] warnExceptions() default {}; + /** + * If {@code true} then the @timestamp LongVector will be appended to the input blocks of the aggregation function. + */ + boolean includeTimestamps() default false; } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java index f11ccbced6fbe..52e4e66f38482 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java @@ -31,8 +31,10 @@ import static org.elasticsearch.compute.gen.Types.AGGREGATOR_FUNCTION_SUPPLIER; import static org.elasticsearch.compute.gen.Types.DRIVER_CONTEXT; +import static org.elasticsearch.compute.gen.Types.LIST_AGG_FUNC_DESC; import static org.elasticsearch.compute.gen.Types.LIST_INTEGER; import static org.elasticsearch.compute.gen.Types.STRING; +import static org.elasticsearch.compute.gen.Types.WARNINGS; /** * Implements "AggregationFunctionSupplier" from a class annotated with both @@ -66,7 +68,6 @@ public AggregatorFunctionSupplierImplementer( createParameters.addAll(groupingAggregatorImplementer.createParameters()); } this.createParameters = new ArrayList<>(createParameters); - this.createParameters.add(0, new Parameter(LIST_INTEGER, "channels")); this.implementation = ClassName.get( elements.getPackageOf(declarationType).toString(), @@ -87,7 +88,7 @@ public JavaFile sourceFile() { private TypeSpec type() { TypeSpec.Builder builder = TypeSpec.classBuilder(implementation); builder.addJavadoc("{@link $T} implementation for {@link $T}.\n", AGGREGATOR_FUNCTION_SUPPLIER, declarationType); - builder.addJavadoc("This class is generated. Do not edit it."); + builder.addJavadoc("This class is generated. Edit {@code " + getClass().getSimpleName() + "} instead."); builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); builder.addSuperinterface(AGGREGATOR_FUNCTION_SUPPLIER); @@ -98,11 +99,9 @@ private TypeSpec type() { } createParameters.stream().forEach(p -> p.declareField(builder)); builder.addMethod(ctor()); - if (aggregatorImplementer != null) { - builder.addMethod(aggregator()); - } else { - builder.addMethod(unsupportedNonGroupingAggregator()); - } + builder.addMethod(nonGroupingIntermediateStateDesc()); + builder.addMethod(groupingIntermediateStateDesc()); + builder.addMethod(aggregator()); builder.addMethod(groupingAggregator()); builder.addMethod(describe()); return builder.build(); @@ -122,12 +121,28 @@ private MethodSpec ctor() { return builder.build(); } - private MethodSpec unsupportedNonGroupingAggregator() { - MethodSpec.Builder builder = MethodSpec.methodBuilder("aggregator") - .addParameter(DRIVER_CONTEXT, "driverContext") - .returns(Types.AGGREGATOR_FUNCTION); + private MethodSpec nonGroupingIntermediateStateDesc() { + MethodSpec.Builder builder = MethodSpec.methodBuilder("nonGroupingIntermediateStateDesc"); builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC); - builder.addStatement("throw new UnsupportedOperationException($S)", "non-grouping aggregator is not supported"); + builder.returns(LIST_AGG_FUNC_DESC); + + if (aggregatorImplementer == null) { + builder.addStatement("throw new UnsupportedOperationException($S)", "non-grouping aggregator is not supported"); + return builder.build(); + } + + builder.addStatement("return $T.intermediateStateDesc()", aggregatorImplementer.implementation()); + + return builder.build(); + } + + private MethodSpec groupingIntermediateStateDesc() { + MethodSpec.Builder builder = MethodSpec.methodBuilder("groupingIntermediateStateDesc"); + builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC); + builder.returns(LIST_AGG_FUNC_DESC); + + builder.addStatement("return $T.intermediateStateDesc()", groupingAggregatorImplementer.implementation()); + return builder.build(); } @@ -135,12 +150,21 @@ private MethodSpec aggregator() { MethodSpec.Builder builder = MethodSpec.methodBuilder("aggregator"); builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC); builder.addParameter(DRIVER_CONTEXT, "driverContext"); + builder.addParameter(LIST_INTEGER, "channels"); + + if (aggregatorImplementer == null) { + builder.returns(Types.AGGREGATOR_FUNCTION); + builder.addStatement("throw new UnsupportedOperationException($S)", "non-grouping aggregator is not supported"); + return builder.build(); + } + builder.returns(aggregatorImplementer.implementation()); if (hasWarnings) { builder.addStatement( - "var warnings = Warnings.createWarnings(driverContext.warningsMode(), " - + "warningsLineNumber, warningsColumnNumber, warningsSourceText)" + "var warnings = $T.createWarnings(driverContext.warningsMode(), " + + "warningsLineNumber, warningsColumnNumber, warningsSourceText)", + WARNINGS ); } @@ -160,12 +184,14 @@ private MethodSpec groupingAggregator() { MethodSpec.Builder builder = MethodSpec.methodBuilder("groupingAggregator"); builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC); builder.addParameter(DRIVER_CONTEXT, "driverContext"); + builder.addParameter(LIST_INTEGER, "channels"); builder.returns(groupingAggregatorImplementer.implementation()); if (hasWarnings) { builder.addStatement( - "var warnings = Warnings.createWarnings(driverContext.warningsMode(), " - + "warningsLineNumber, warningsColumnNumber, warningsSourceText)" + "var warnings = $T.createWarnings(driverContext.warningsMode(), " + + "warningsLineNumber, warningsColumnNumber, warningsSourceText)", + WARNINGS ); } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java index 4589ab13a4e39..d775a46109214 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java @@ -17,12 +17,12 @@ import org.elasticsearch.compute.ann.Aggregator; import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.gen.Methods.TypeMatcher; import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.Objects; -import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -34,27 +34,24 @@ import javax.lang.model.util.Elements; import static java.util.stream.Collectors.joining; -import static org.elasticsearch.compute.gen.Methods.findMethod; -import static org.elasticsearch.compute.gen.Methods.findRequiredMethod; +import static org.elasticsearch.compute.gen.Methods.requireAnyArgs; +import static org.elasticsearch.compute.gen.Methods.requireAnyType; +import static org.elasticsearch.compute.gen.Methods.requireArgs; +import static org.elasticsearch.compute.gen.Methods.requireName; +import static org.elasticsearch.compute.gen.Methods.requirePrimitiveOrImplements; +import static org.elasticsearch.compute.gen.Methods.requireStaticMethod; +import static org.elasticsearch.compute.gen.Methods.requireType; +import static org.elasticsearch.compute.gen.Methods.requireVoidType; import static org.elasticsearch.compute.gen.Methods.vectorAccessorName; import static org.elasticsearch.compute.gen.Types.AGGREGATOR_FUNCTION; import static org.elasticsearch.compute.gen.Types.BIG_ARRAYS; import static org.elasticsearch.compute.gen.Types.BLOCK; import static org.elasticsearch.compute.gen.Types.BLOCK_ARRAY; -import static org.elasticsearch.compute.gen.Types.BOOLEAN_BLOCK; import static org.elasticsearch.compute.gen.Types.BOOLEAN_VECTOR; import static org.elasticsearch.compute.gen.Types.BYTES_REF; -import static org.elasticsearch.compute.gen.Types.BYTES_REF_BLOCK; -import static org.elasticsearch.compute.gen.Types.BYTES_REF_VECTOR; -import static org.elasticsearch.compute.gen.Types.DOUBLE_BLOCK; -import static org.elasticsearch.compute.gen.Types.DOUBLE_VECTOR; import static org.elasticsearch.compute.gen.Types.DRIVER_CONTEXT; import static org.elasticsearch.compute.gen.Types.ELEMENT_TYPE; -import static org.elasticsearch.compute.gen.Types.FLOAT_BLOCK; -import static org.elasticsearch.compute.gen.Types.FLOAT_VECTOR; import static org.elasticsearch.compute.gen.Types.INTERMEDIATE_STATE_DESC; -import static org.elasticsearch.compute.gen.Types.INT_BLOCK; -import static org.elasticsearch.compute.gen.Types.INT_VECTOR; import static org.elasticsearch.compute.gen.Types.LIST_AGG_FUNC_DESC; import static org.elasticsearch.compute.gen.Types.LIST_INTEGER; import static org.elasticsearch.compute.gen.Types.LONG_BLOCK; @@ -78,46 +75,41 @@ public class AggregatorImplementer { private final List warnExceptions; private final ExecutableElement init; private final ExecutableElement combine; - private final ExecutableElement combineValueCount; - private final ExecutableElement combineIntermediate; - private final ExecutableElement evaluateFinal; + private final List createParameters; private final ClassName implementation; - private final TypeName stateType; - private final boolean stateTypeHasSeen; - private final boolean stateTypeHasFailed; - private final boolean valuesIsBytesRef; - private final boolean valuesIsArray; private final List intermediateState; - private final List createParameters; + private final boolean includeTimestampVector; + + private final AggregationState aggState; + private final AggregationParameter aggParam; public AggregatorImplementer( Elements elements, TypeElement declarationType, IntermediateState[] interStateAnno, - List warnExceptions + List warnExceptions, + boolean includeTimestampVector ) { this.declarationType = declarationType; this.warnExceptions = warnExceptions; - this.init = findRequiredMethod(declarationType, new String[] { "init", "initSingle" }, e -> true); - this.stateType = choseStateType(); - this.stateTypeHasSeen = elements.getAllMembers(elements.getTypeElement(stateType.toString())) - .stream() - .anyMatch(e -> e.toString().equals("seen()")); - this.stateTypeHasFailed = elements.getAllMembers(elements.getTypeElement(stateType.toString())) - .stream() - .anyMatch(e -> e.toString().equals("failed()")); + this.init = requireStaticMethod( + declarationType, + requirePrimitiveOrImplements(elements, Types.AGGREGATOR_STATE), + requireName("init", "initSingle"), + requireAnyArgs("") + ); + this.aggState = AggregationState.create(elements, init.getReturnType(), warnExceptions.isEmpty() == false, false); + + this.combine = requireStaticMethod( + declarationType, + aggState.declaredType().isPrimitive() ? requireType(aggState.declaredType()) : requireVoidType(), + requireName("combine"), + combineArgs(aggState, includeTimestampVector) + ); + // TODO support multiple parameters + this.aggParam = AggregationParameter.create(combine.getParameters().getLast().asType()); - this.combine = findRequiredMethod(declarationType, new String[] { "combine" }, e -> { - if (e.getParameters().size() == 0) { - return false; - } - TypeName firstParamType = TypeName.get(e.getParameters().get(0).asType()); - return firstParamType.isPrimitive() || firstParamType.toString().equals(stateType.toString()); - }); - this.combineValueCount = findMethod(declarationType, "combineValueCount"); - this.combineIntermediate = findMethod(declarationType, "combineIntermediate"); - this.evaluateFinal = findMethod(declarationType, "evaluateFinal"); this.createParameters = init.getParameters() .stream() .map(Parameter::from) @@ -128,9 +120,20 @@ public AggregatorImplementer( elements.getPackageOf(declarationType).toString(), (declarationType.getSimpleName() + "AggregatorFunction").replace("AggregatorAggregator", "Aggregator") ); - this.valuesIsBytesRef = BYTES_REF.equals(valueTypeName()); - this.valuesIsArray = TypeKind.ARRAY.equals(valueTypeKind()); - intermediateState = Arrays.stream(interStateAnno).map(IntermediateStateDesc::newIntermediateStateDesc).toList(); + this.intermediateState = Arrays.stream(interStateAnno).map(IntermediateStateDesc::newIntermediateStateDesc).toList(); + this.includeTimestampVector = includeTimestampVector; + } + + private static Methods.ArgumentMatcher combineArgs(AggregationState aggState, boolean includeTimestampVector) { + if (includeTimestampVector) { + return requireArgs( + requireType(aggState.declaredType()), + requireType(TypeName.LONG), // @timestamp + requireAnyType("") + ); + } else { + return requireArgs(requireType(aggState.declaredType()), requireAnyType("")); + } } ClassName implementation() { @@ -141,68 +144,8 @@ List createParameters() { return createParameters; } - private TypeName choseStateType() { - TypeName initReturn = TypeName.get(init.getReturnType()); - if (false == initReturn.isPrimitive()) { - return initReturn; - } - String simpleName = firstUpper(initReturn.toString()); - if (warnExceptions.isEmpty()) { - return ClassName.get("org.elasticsearch.compute.aggregation", simpleName + "State"); - } - return ClassName.get("org.elasticsearch.compute.aggregation", simpleName + "FallibleState"); - } - - static String valueType(ExecutableElement init, ExecutableElement combine) { - if (combine != null) { - // If there's an explicit combine function it's final parameter is the type of the value. - return combine.getParameters().get(combine.getParameters().size() - 1).asType().toString(); - } - String initReturn = init.getReturnType().toString(); - switch (initReturn) { - case "double": - return "double"; - case "float": - return "float"; - case "long": - return "long"; - case "int": - return "int"; - case "boolean": - return "boolean"; - default: - throw new IllegalArgumentException("unknown primitive type for " + initReturn); - } - } - - static ClassName valueBlockType(ExecutableElement init, ExecutableElement combine) { - return switch (valueType(init, combine)) { - case "boolean" -> BOOLEAN_BLOCK; - case "double" -> DOUBLE_BLOCK; - case "float" -> FLOAT_BLOCK; - case "long" -> LONG_BLOCK; - case "int", "int[]" -> INT_BLOCK; - case "org.apache.lucene.util.BytesRef" -> BYTES_REF_BLOCK; - default -> throw new IllegalArgumentException("unknown block type for " + valueType(init, combine)); - }; - } - - static ClassName valueVectorType(ExecutableElement init, ExecutableElement combine) { - return switch (valueType(init, combine)) { - case "boolean" -> BOOLEAN_VECTOR; - case "double" -> DOUBLE_VECTOR; - case "float" -> FLOAT_VECTOR; - case "long" -> LONG_VECTOR; - case "int", "int[]" -> INT_VECTOR; - case "org.apache.lucene.util.BytesRef" -> BYTES_REF_VECTOR; - default -> throw new IllegalArgumentException("unknown vector type for " + valueType(init, combine)); - }; - } - - public static String firstUpper(String s) { - String head = s.toString().substring(0, 1).toUpperCase(Locale.ROOT); - String tail = s.toString().substring(1); - return head + tail; + public static String capitalize(String s) { + return Character.toUpperCase(s.charAt(0)) + s.substring(1); } public JavaFile sourceFile() { @@ -218,7 +161,7 @@ public JavaFile sourceFile() { private TypeSpec type() { TypeSpec.Builder builder = TypeSpec.classBuilder(implementation); builder.addJavadoc("{@link $T} implementation for {@link $T}.\n", AGGREGATOR_FUNCTION, declarationType); - builder.addJavadoc("This class is generated. Do not edit it."); + builder.addJavadoc("This class is generated. Edit {@code " + getClass().getSimpleName() + "} instead."); builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); builder.addSuperinterface(AGGREGATOR_FUNCTION); builder.addField( @@ -232,7 +175,7 @@ private TypeSpec type() { } builder.addField(DRIVER_CONTEXT, "driverContext", Modifier.PRIVATE, Modifier.FINAL); - builder.addField(stateType, "state", Modifier.PRIVATE, Modifier.FINAL); + builder.addField(aggState.type, "state", Modifier.PRIVATE, Modifier.FINAL); builder.addField(LIST_INTEGER, "channels", Modifier.PRIVATE, Modifier.FINAL); for (Parameter p : createParameters) { @@ -292,10 +235,10 @@ private CodeBlock callInit() { .map(p -> TypeName.get(p.asType()).equals(BIG_ARRAYS) ? "driverContext.bigArrays()" : p.getSimpleName().toString()) .collect(joining(", ")); CodeBlock.Builder builder = CodeBlock.builder(); - if (init.getReturnType().toString().equals(stateType.toString())) { - builder.add("$T.$L($L)", declarationType, init.getSimpleName(), initParametersCall); + if (aggState.declaredType().isPrimitive()) { + builder.add("new $T($T.$L($L))", aggState.type(), declarationType, init.getSimpleName(), initParametersCall); } else { - builder.add("new $T($T.$L($L))", stateType, declarationType, init.getSimpleName(), initParametersCall); + builder.add("$T.$L($L)", declarationType, init.getSimpleName(), initParametersCall); } return builder.build(); } @@ -320,7 +263,7 @@ private MethodSpec ctor() { } builder.addParameter(DRIVER_CONTEXT, "driverContext"); builder.addParameter(LIST_INTEGER, "channels"); - builder.addParameter(stateType, "state"); + builder.addParameter(aggState.type, "state"); if (warnExceptions.isEmpty() == false) { builder.addStatement("this.warnings = warnings"); @@ -352,7 +295,7 @@ private MethodSpec intermediateBlockCount() { private MethodSpec addRawInput() { MethodSpec.Builder builder = MethodSpec.methodBuilder("addRawInput"); builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).addParameter(PAGE, "page").addParameter(BOOLEAN_VECTOR, "mask"); - if (stateTypeHasFailed) { + if (aggState.hasFailed()) { builder.beginControlFlow("if (state.failed())"); builder.addStatement("return"); builder.endControlFlow(); @@ -366,43 +309,62 @@ private MethodSpec addRawInput() { builder.beginControlFlow("if (mask.allTrue())"); { builder.addComment("No masking"); - builder.addStatement("$T block = page.getBlock(channels.get(0))", valueBlockType(init, combine)); - builder.addStatement("$T vector = block.asVector()", valueVectorType(init, combine)); + builder.addStatement("$T block = page.getBlock(channels.get(0))", blockType(aggParam.type())); + builder.addStatement("$T vector = block.asVector()", vectorType(aggParam.type())); + if (includeTimestampVector) { + builder.addStatement("$T timestampsBlock = page.getBlock(channels.get(1))", LONG_BLOCK); + builder.addStatement("$T timestampsVector = timestampsBlock.asVector()", LONG_VECTOR); + + builder.beginControlFlow("if (timestampsVector == null) "); + builder.addStatement("throw new IllegalStateException($S)", "expected @timestamp vector; but got a block"); + builder.endControlFlow(); + } builder.beginControlFlow("if (vector != null)"); - builder.addStatement("addRawVector(vector)"); + builder.addStatement(includeTimestampVector ? "addRawVector(vector, timestampsVector)" : "addRawVector(vector)"); builder.nextControlFlow("else"); - builder.addStatement("addRawBlock(block)"); + builder.addStatement(includeTimestampVector ? "addRawBlock(block, timestampsVector)" : "addRawBlock(block)"); builder.endControlFlow(); builder.addStatement("return"); } builder.endControlFlow(); builder.addComment("Some positions masked away, others kept"); - builder.addStatement("$T block = page.getBlock(channels.get(0))", valueBlockType(init, combine)); - builder.addStatement("$T vector = block.asVector()", valueVectorType(init, combine)); + builder.addStatement("$T block = page.getBlock(channels.get(0))", blockType(aggParam.type())); + builder.addStatement("$T vector = block.asVector()", vectorType(aggParam.type())); + if (includeTimestampVector) { + builder.addStatement("$T timestampsBlock = page.getBlock(channels.get(1))", LONG_BLOCK); + builder.addStatement("$T timestampsVector = timestampsBlock.asVector()", LONG_VECTOR); + + builder.beginControlFlow("if (timestampsVector == null) "); + builder.addStatement("throw new IllegalStateException($S)", "expected @timestamp vector; but got a block"); + builder.endControlFlow(); + } builder.beginControlFlow("if (vector != null)"); - builder.addStatement("addRawVector(vector, mask)"); + builder.addStatement(includeTimestampVector ? "addRawVector(vector, timestampsVector, mask)" : "addRawVector(vector, mask)"); builder.nextControlFlow("else"); - builder.addStatement("addRawBlock(block, mask)"); + builder.addStatement(includeTimestampVector ? "addRawBlock(block, timestampsVector, mask)" : "addRawBlock(block, mask)"); builder.endControlFlow(); return builder.build(); } private MethodSpec addRawVector(boolean masked) { MethodSpec.Builder builder = MethodSpec.methodBuilder("addRawVector"); - builder.addModifiers(Modifier.PRIVATE).addParameter(valueVectorType(init, combine), "vector"); + builder.addModifiers(Modifier.PRIVATE).addParameter(vectorType(aggParam.type()), "vector"); + if (includeTimestampVector) { + builder.addParameter(LONG_VECTOR, "timestamps"); + } if (masked) { builder.addParameter(BOOLEAN_VECTOR, "mask"); } - if (valuesIsArray) { + if (aggParam.isArray()) { builder.addComment("This type does not support vectors because all values are multi-valued"); return builder.build(); } - if (stateTypeHasSeen) { + if (aggState.hasSeen()) { builder.addStatement("state.seen(true)"); } - if (valuesIsBytesRef) { + if (aggParam.isBytesRef()) { // Add bytes_ref scratch var that will be used for bytes_ref blocks/vectors builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); } @@ -415,20 +377,20 @@ private MethodSpec addRawVector(boolean masked) { combineRawInput(builder, "vector"); } builder.endControlFlow(); - if (combineValueCount != null) { - builder.addStatement("$T.combineValueCount(state, vector.getPositionCount())", declarationType); - } return builder.build(); } private MethodSpec addRawBlock(boolean masked) { MethodSpec.Builder builder = MethodSpec.methodBuilder("addRawBlock"); - builder.addModifiers(Modifier.PRIVATE).addParameter(valueBlockType(init, combine), "block"); + builder.addModifiers(Modifier.PRIVATE).addParameter(blockType(aggParam.type()), "block"); + if (includeTimestampVector) { + builder.addParameter(LONG_VECTOR, "timestamps"); + } if (masked) { builder.addParameter(BOOLEAN_VECTOR, "mask"); } - if (valuesIsBytesRef) { + if (aggParam.isBytesRef()) { // Add bytes_ref scratch var that will only be used for bytes_ref blocks/vectors builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); } @@ -440,16 +402,16 @@ private MethodSpec addRawBlock(boolean masked) { builder.beginControlFlow("if (block.isNull(p))"); builder.addStatement("continue"); builder.endControlFlow(); - if (stateTypeHasSeen) { + if (aggState.hasSeen()) { builder.addStatement("state.seen(true)"); } builder.addStatement("int start = block.getFirstValueIndex(p)"); builder.addStatement("int end = start + block.getValueCount(p)"); - if (valuesIsArray) { - String arrayType = valueTypeString(); + if (aggParam.isArray()) { + String arrayType = aggParam.type().toString().replace("[]", ""); builder.addStatement("$L[] valuesArray = new $L[end - start]", arrayType, arrayType); builder.beginControlFlow("for (int i = start; i < end; i++)"); - builder.addStatement("valuesArray[i-start] = $L.get$L(i)", "block", firstUpper(arrayType)); + builder.addStatement("valuesArray[i-start] = $L.get$L(i)", "block", capitalize(arrayType)); builder.endControlFlow(); combineRawInputForArray(builder, "valuesArray"); } else { @@ -459,16 +421,13 @@ private MethodSpec addRawBlock(boolean masked) { } } builder.endControlFlow(); - if (combineValueCount != null) { - builder.addStatement("$T.combineValueCount(state, block.getTotalValueCount())", declarationType); - } return builder.build(); } private void combineRawInput(MethodSpec.Builder builder, String blockVariable) { TypeName returnType = TypeName.get(combine.getReturnType()); warningsBlock(builder, () -> { - if (valuesIsBytesRef) { + if (aggParam.isBytesRef()) { combineRawInputForBytesRef(builder, blockVariable); } else if (returnType.isPrimitive()) { combineRawInputForPrimitive(returnType, builder, blockVariable); @@ -480,33 +439,57 @@ private void combineRawInput(MethodSpec.Builder builder, String blockVariable) { }); } - private void combineRawInputForPrimitive(TypeName returnType, MethodSpec.Builder builder, String blockVariable) { - builder.addStatement( - "state.$TValue($T.combine(state.$TValue(), $L.get$L(i)))", - returnType, - declarationType, - returnType, - blockVariable, - firstUpper(combine.getParameters().get(1).asType().toString()) - ); + private void combineRawInputForBytesRef(MethodSpec.Builder builder, String blockVariable) { + // scratch is a BytesRef var that must have been defined before the iteration starts + if (includeTimestampVector) { + builder.addStatement("$T.combine(state, timestamps.getLong(i), $L.getBytesRef(i, scratch))", declarationType, blockVariable); + } else { + builder.addStatement("$T.combine(state, $L.getBytesRef(i, scratch))", declarationType, blockVariable); + } } - private void combineRawInputForArray(MethodSpec.Builder builder, String arrayVariable) { - warningsBlock(builder, () -> builder.addStatement("$T.combine(state, $L)", declarationType, arrayVariable)); + private void combineRawInputForPrimitive(TypeName returnType, MethodSpec.Builder builder, String blockVariable) { + if (includeTimestampVector) { + builder.addStatement( + "state.$TValue($T.combine(state.$TValue(), timestamps.getLong(i), $L.get$L(i)))", + returnType, + declarationType, + returnType, + blockVariable, + capitalize(combine.getParameters().get(1).asType().toString()) + ); + } else { + builder.addStatement( + "state.$TValue($T.combine(state.$TValue(), $L.get$L(i)))", + returnType, + declarationType, + returnType, + blockVariable, + capitalize(combine.getParameters().get(1).asType().toString()) + ); + } } private void combineRawInputForVoid(MethodSpec.Builder builder, String blockVariable) { - builder.addStatement( - "$T.combine(state, $L.get$L(i))", - declarationType, - blockVariable, - firstUpper(combine.getParameters().get(1).asType().toString()) - ); + if (includeTimestampVector) { + builder.addStatement( + "$T.combine(state, timestamps.getLong(i), $L.get$L(i))", + declarationType, + blockVariable, + capitalize(combine.getParameters().get(1).asType().toString()) + ); + } else { + builder.addStatement( + "$T.combine(state, $L.get$L(i))", + declarationType, + blockVariable, + capitalize(combine.getParameters().get(1).asType().toString()) + ); + } } - private void combineRawInputForBytesRef(MethodSpec.Builder builder, String blockVariable) { - // scratch is a BytesRef var that must have been defined before the iteration starts - builder.addStatement("$T.combine(state, $L.getBytesRef(i, scratch))", declarationType, blockVariable); + private void combineRawInputForArray(MethodSpec.Builder builder, String arrayVariable) { + warningsBlock(builder, () -> builder.addStatement("$T.combine(state, $L)", declarationType, arrayVariable)); } private void warningsBlock(MethodSpec.Builder builder, Runnable block) { @@ -534,12 +517,7 @@ private MethodSpec addIntermediateInput() { interState.assignToVariable(builder, i); builder.addStatement("assert $L.getPositionCount() == 1", interState.name()); } - if (combineIntermediate != null) { - if (intermediateState.stream().map(IntermediateStateDesc::elementType).anyMatch(n -> n.equals("BYTES_REF"))) { - builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); - } - builder.addStatement("$T.combineIntermediate(state, " + intermediateStateRowAccess() + ")", declarationType); - } else if (hasPrimitiveState()) { + if (aggState.declaredType().isPrimitive()) { if (warnExceptions.isEmpty()) { assert intermediateState.size() == 2; assert intermediateState.get(1).name().equals("seen"); @@ -557,14 +535,36 @@ private MethodSpec addIntermediateInput() { } warningsBlock(builder, () -> { + var primitiveStateMethod = switch (aggState.declaredType().toString()) { + case "boolean" -> "booleanValue"; + case "int" -> "intValue"; + case "long" -> "longValue"; + case "double" -> "doubleValue"; + case "float" -> "floatValue"; + default -> throw new IllegalArgumentException("Unexpected primitive type: [" + aggState.declaredType() + "]"); + }; var state = intermediateState.get(0); var s = "state.$L($T.combine(state.$L(), " + state.name() + "." + vectorAccessorName(state.elementType()) + "(0)))"; - builder.addStatement(s, primitiveStateMethod(), declarationType, primitiveStateMethod()); + builder.addStatement(s, primitiveStateMethod, declarationType, primitiveStateMethod); builder.addStatement("state.seen(true)"); }); builder.endControlFlow(); } else { - throw new IllegalArgumentException("Don't know how to combine intermediate input. Define combineIntermediate"); + requireStaticMethod( + declarationType, + requireVoidType(), + requireName("combineIntermediate"), + requireArgs( + Stream.concat( + Stream.of(aggState.declaredType()), // aggState + intermediateState.stream().map(IntermediateStateDesc::combineArgType) // intermediate state + ).map(Methods::requireType).toArray(TypeMatcher[]::new) + ) + ); + if (intermediateState.stream().map(IntermediateStateDesc::elementType).anyMatch(n -> n.equals("BYTES_REF"))) { + builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); + } + builder.addStatement("$T.combineIntermediate(state, " + intermediateStateRowAccess() + ")", declarationType); } return builder.build(); } @@ -573,25 +573,6 @@ String intermediateStateRowAccess() { return intermediateState.stream().map(desc -> desc.access("0")).collect(joining(", ")); } - private String primitiveStateMethod() { - switch (stateType.toString()) { - case "org.elasticsearch.compute.aggregation.BooleanState", "org.elasticsearch.compute.aggregation.BooleanFallibleState": - return "booleanValue"; - case "org.elasticsearch.compute.aggregation.IntState", "org.elasticsearch.compute.aggregation.IntFallibleState": - return "intValue"; - case "org.elasticsearch.compute.aggregation.LongState", "org.elasticsearch.compute.aggregation.LongFallibleState": - return "longValue"; - case "org.elasticsearch.compute.aggregation.DoubleState", "org.elasticsearch.compute.aggregation.DoubleFallibleState": - return "doubleValue"; - case "org.elasticsearch.compute.aggregation.FloatState", "org.elasticsearch.compute.aggregation.FloatFallibleState": - return "floatValue"; - default: - throw new IllegalArgumentException( - "don't know how to fetch primitive values from " + stateType + ". define combineIntermediate." - ); - } - } - private MethodSpec evaluateIntermediate() { MethodSpec.Builder builder = MethodSpec.methodBuilder("evaluateIntermediate"); builder.addAnnotation(Override.class) @@ -610,45 +591,39 @@ private MethodSpec evaluateFinal() { .addParameter(BLOCK_ARRAY, "blocks") .addParameter(TypeName.INT, "offset") .addParameter(DRIVER_CONTEXT, "driverContext"); - if (stateTypeHasSeen || stateTypeHasFailed) { - var condition = Stream.of(stateTypeHasSeen ? "state.seen() == false" : null, stateTypeHasFailed ? "state.failed()" : null) - .filter(Objects::nonNull) - .collect(joining(" || ")); - builder.beginControlFlow("if ($L)", condition); + if (aggState.hasSeen() || aggState.hasFailed()) { + builder.beginControlFlow( + "if ($L)", + Stream.concat( + Stream.of("state.seen() == false").filter(c -> aggState.hasSeen()), + Stream.of("state.failed()").filter(c -> aggState.hasFailed()) + ).collect(joining(" || ")) + ); builder.addStatement("blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1)", BLOCK); builder.addStatement("return"); builder.endControlFlow(); } - if (evaluateFinal == null) { - primitiveStateToResult(builder); + if (aggState.declaredType().isPrimitive()) { + builder.addStatement(switch (aggState.declaredType().toString()) { + case "boolean" -> "blocks[offset] = driverContext.blockFactory().newConstantBooleanBlockWith(state.booleanValue(), 1)"; + case "int" -> "blocks[offset] = driverContext.blockFactory().newConstantIntBlockWith(state.intValue(), 1)"; + case "long" -> "blocks[offset] = driverContext.blockFactory().newConstantLongBlockWith(state.longValue(), 1)"; + case "double" -> "blocks[offset] = driverContext.blockFactory().newConstantDoubleBlockWith(state.doubleValue(), 1)"; + case "float" -> "blocks[offset] = driverContext.blockFactory().newConstantFloatBlockWith(state.floatValue(), 1)"; + default -> throw new IllegalArgumentException("Unexpected primitive type: [" + aggState.declaredType() + "]"); + }); } else { + requireStaticMethod( + declarationType, + requireType(BLOCK), + requireName("evaluateFinal"), + requireArgs(requireType(aggState.declaredType()), requireType(DRIVER_CONTEXT)) + ); builder.addStatement("blocks[offset] = $T.evaluateFinal(state, driverContext)", declarationType); } return builder.build(); } - private void primitiveStateToResult(MethodSpec.Builder builder) { - switch (stateType.toString()) { - case "org.elasticsearch.compute.aggregation.BooleanState", "org.elasticsearch.compute.aggregation.BooleanFallibleState": - builder.addStatement("blocks[offset] = driverContext.blockFactory().newConstantBooleanBlockWith(state.booleanValue(), 1)"); - return; - case "org.elasticsearch.compute.aggregation.IntState", "org.elasticsearch.compute.aggregation.IntFallibleState": - builder.addStatement("blocks[offset] = driverContext.blockFactory().newConstantIntBlockWith(state.intValue(), 1)"); - return; - case "org.elasticsearch.compute.aggregation.LongState", "org.elasticsearch.compute.aggregation.LongFallibleState": - builder.addStatement("blocks[offset] = driverContext.blockFactory().newConstantLongBlockWith(state.longValue(), 1)"); - return; - case "org.elasticsearch.compute.aggregation.DoubleState", "org.elasticsearch.compute.aggregation.DoubleFallibleState": - builder.addStatement("blocks[offset] = driverContext.blockFactory().newConstantDoubleBlockWith(state.doubleValue(), 1)"); - return; - case "org.elasticsearch.compute.aggregation.FloatState", "org.elasticsearch.compute.aggregation.FloatFallibleState": - builder.addStatement("blocks[offset] = driverContext.blockFactory().newConstantFloatBlockWith(state.floatValue(), 1)"); - return; - default: - throw new IllegalArgumentException("don't know how to convert state to result: " + stateType); - } - } - private MethodSpec toStringMethod() { MethodSpec.Builder builder = MethodSpec.methodBuilder("toString"); builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).returns(String.class); @@ -667,14 +642,6 @@ private MethodSpec close() { return builder.build(); } - private static final Pattern PRIMITIVE_STATE_PATTERN = Pattern.compile( - "org.elasticsearch.compute.aggregation.(Boolean|Int|Long|Double|Float)(Fallible)?State" - ); - - private boolean hasPrimitiveState() { - return PRIMITIVE_STATE_PATTERN.matcher(stateType.toString()).matches(); - } - record IntermediateStateDesc(String name, String elementType, boolean block) { static IntermediateStateDesc newIntermediateStateDesc(IntermediateState state) { String type = state.type(); @@ -711,22 +678,57 @@ public void assignToVariable(MethodSpec.Builder builder, int offset) { builder.addStatement("$T $L = (($T) $L).asVector()", vectorType(elementType), name, blockType, name + "Uncast"); } } - } - private TypeMirror valueTypeMirror() { - return combine.getParameters().get(combine.getParameters().size() - 1).asType(); + public TypeName combineArgType() { + var type = Types.fromString(elementType); + return block ? blockType(type) : type; + } } - private TypeName valueTypeName() { - return TypeName.get(valueTypeMirror()); + /** + * This represents the type returned by init method used to keep aggregation state + * @param declaredType declared state type as returned by init method + * @param type actual type used (we have some predefined state types for primitive values) + */ + public record AggregationState(TypeName declaredType, TypeName type, boolean hasSeen, boolean hasFailed) { + + public static AggregationState create(Elements elements, TypeMirror mirror, boolean hasFailures, boolean isArray) { + var declaredType = TypeName.get(mirror); + var stateType = declaredType.isPrimitive() + ? ClassName.get("org.elasticsearch.compute.aggregation", primitiveStateStoreClassname(declaredType, hasFailures, isArray)) + : declaredType; + return new AggregationState( + declaredType, + stateType, + hasMethod(elements, stateType, "seen()"), + hasMethod(elements, stateType, "failed()") + ); + } + + private static String primitiveStateStoreClassname(TypeName declaredType, boolean hasFailures, boolean isArray) { + var name = capitalize(declaredType.toString()); + if (hasFailures) { + name += "Fallible"; + } + if (isArray) { + name += "Array"; + } + return name + "State"; + } } - private TypeKind valueTypeKind() { - return valueTypeMirror().getKind(); + public record AggregationParameter(TypeName type, boolean isArray) { + + public static AggregationParameter create(TypeMirror mirror) { + return new AggregationParameter(TypeName.get(mirror), Objects.equals(mirror.getKind(), TypeKind.ARRAY)); + } + + public boolean isBytesRef() { + return Objects.equals(type, BYTES_REF); + } } - private String valueTypeString() { - String valueTypeString = TypeName.get(valueTypeMirror()).toString(); - return valuesIsArray ? valueTypeString.substring(0, valueTypeString.length() - 2) : valueTypeString; + private static boolean hasMethod(Elements elements, TypeName type, String name) { + return elements.getAllMembers(elements.getTypeElement(type.toString())).stream().anyMatch(e -> e.toString().equals(name)); } } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java index 863db86eb934a..3ad2343ad1658 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java @@ -87,7 +87,13 @@ public boolean process(Set set, RoundEnvironment roundEnv ); if (aggClass.getAnnotation(Aggregator.class) != null) { IntermediateState[] intermediateState = aggClass.getAnnotation(Aggregator.class).value(); - implementer = new AggregatorImplementer(env.getElementUtils(), aggClass, intermediateState, warnExceptionsTypes); + implementer = new AggregatorImplementer( + env.getElementUtils(), + aggClass, + intermediateState, + warnExceptionsTypes, + aggClass.getAnnotation(Aggregator.class).includeTimestamps() + ); write(aggClass, "aggregator", implementer.sourceFile(), env); } GroupingAggregatorImplementer groupingAggregatorImplementer = null; @@ -96,13 +102,12 @@ public boolean process(Set set, RoundEnvironment roundEnv if (intermediateState.length == 0 && aggClass.getAnnotation(Aggregator.class) != null) { intermediateState = aggClass.getAnnotation(Aggregator.class).value(); } - boolean includeTimestamps = aggClass.getAnnotation(GroupingAggregator.class).includeTimestamps(); groupingAggregatorImplementer = new GroupingAggregatorImplementer( env.getElementUtils(), aggClass, intermediateState, warnExceptionsTypes, - includeTimestamps + aggClass.getAnnotation(GroupingAggregator.class).includeTimestamps() ); write(aggClass, "grouping aggregator", groupingAggregatorImplementer.sourceFile(), env); } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java index f875cd7e6480e..c58684c5d06c9 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java @@ -83,7 +83,7 @@ public JavaFile sourceFile() { private TypeSpec type() { TypeSpec.Builder builder = TypeSpec.classBuilder(implementation); builder.addJavadoc("{@link $T} implementation for {@link $T}.\n", EXPRESSION_EVALUATOR, declarationType); - builder.addJavadoc("This class is generated. Do not edit it."); + builder.addJavadoc("This class is generated. Edit {@code " + getClass().getSimpleName() + "} instead."); builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); builder.superclass(ABSTRACT_CONVERT_FUNCTION_EVALUATOR); diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java index b4a0cf9127f23..f0044ae4774f8 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java @@ -93,7 +93,7 @@ public JavaFile sourceFile() { private TypeSpec type() { TypeSpec.Builder builder = TypeSpec.classBuilder(implementation); builder.addJavadoc("{@link $T} implementation for {@link $T}.\n", EXPRESSION_EVALUATOR, declarationType); - builder.addJavadoc("This class is generated. Do not edit it."); + builder.addJavadoc("This class is generated. Edit {@code " + getClass().getSimpleName() + "} instead."); builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); builder.addSuperinterface(EXPRESSION_EVALUATOR); builder.addType(factory()); diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java index bae8800d3d62f..d2b6a0e011687 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java @@ -17,28 +17,35 @@ import org.elasticsearch.compute.ann.Aggregator; import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.gen.AggregatorImplementer.AggregationParameter; +import org.elasticsearch.compute.gen.AggregatorImplementer.AggregationState; import java.util.Arrays; import java.util.List; import java.util.function.Consumer; -import java.util.regex.Pattern; +import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.TypeElement; -import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.Elements; import static java.util.stream.Collectors.joining; -import static org.elasticsearch.compute.gen.AggregatorImplementer.firstUpper; -import static org.elasticsearch.compute.gen.AggregatorImplementer.valueBlockType; -import static org.elasticsearch.compute.gen.AggregatorImplementer.valueVectorType; -import static org.elasticsearch.compute.gen.Methods.findMethod; -import static org.elasticsearch.compute.gen.Methods.findRequiredMethod; +import static org.elasticsearch.compute.gen.AggregatorImplementer.capitalize; +import static org.elasticsearch.compute.gen.Methods.requireAnyArgs; +import static org.elasticsearch.compute.gen.Methods.requireAnyType; +import static org.elasticsearch.compute.gen.Methods.requireArgs; +import static org.elasticsearch.compute.gen.Methods.requireName; +import static org.elasticsearch.compute.gen.Methods.requirePrimitiveOrImplements; +import static org.elasticsearch.compute.gen.Methods.requireStaticMethod; +import static org.elasticsearch.compute.gen.Methods.requireType; +import static org.elasticsearch.compute.gen.Methods.requireVoidType; import static org.elasticsearch.compute.gen.Methods.vectorAccessorName; import static org.elasticsearch.compute.gen.Types.BIG_ARRAYS; +import static org.elasticsearch.compute.gen.Types.BLOCK; import static org.elasticsearch.compute.gen.Types.BLOCK_ARRAY; import static org.elasticsearch.compute.gen.Types.BYTES_REF; import static org.elasticsearch.compute.gen.Types.DRIVER_CONTEXT; @@ -55,6 +62,8 @@ import static org.elasticsearch.compute.gen.Types.PAGE; import static org.elasticsearch.compute.gen.Types.SEEN_GROUP_IDS; import static org.elasticsearch.compute.gen.Types.WARNINGS; +import static org.elasticsearch.compute.gen.Types.blockType; +import static org.elasticsearch.compute.gen.Types.vectorType; /** * Implements "GroupingAggregationFunction" from a class containing static methods @@ -70,17 +79,14 @@ public class GroupingAggregatorImplementer { private final List warnExceptions; private final ExecutableElement init; private final ExecutableElement combine; - private final ExecutableElement combineStates; - private final ExecutableElement evaluateFinal; - private final ExecutableElement combineIntermediate; - private final TypeName stateType; - private final boolean valuesIsBytesRef; - private final boolean valuesIsArray; private final List createParameters; private final ClassName implementation; private final List intermediateState; private final boolean includeTimestampVector; + private final AggregationState aggState; + private final AggregationParameter aggParam; + public GroupingAggregatorImplementer( Elements elements, TypeElement declarationType, @@ -91,21 +97,23 @@ public GroupingAggregatorImplementer( this.declarationType = declarationType; this.warnExceptions = warnExceptions; - this.init = findRequiredMethod(declarationType, new String[] { "init", "initGrouping" }, e -> true); - this.stateType = choseStateType(); + this.init = requireStaticMethod( + declarationType, + requirePrimitiveOrImplements(elements, Types.GROUPING_AGGREGATOR_STATE), + requireName("init", "initGrouping"), + requireAnyArgs("") + ); + this.aggState = AggregationState.create(elements, init.getReturnType(), warnExceptions.isEmpty() == false, true); + + this.combine = requireStaticMethod( + declarationType, + aggState.declaredType().isPrimitive() ? requireType(aggState.declaredType()) : requireVoidType(), + requireName("combine"), + combineArgs(aggState, includeTimestampVector) + ); + // TODO support multiple parameters + this.aggParam = AggregationParameter.create(combine.getParameters().getLast().asType()); - this.combine = findRequiredMethod(declarationType, new String[] { "combine" }, e -> { - if (e.getParameters().size() == 0) { - return false; - } - TypeName firstParamType = TypeName.get(e.getParameters().get(0).asType()); - return firstParamType.isPrimitive() || firstParamType.toString().equals(stateType.toString()); - }); - this.combineStates = findMethod(declarationType, "combineStates"); - this.combineIntermediate = findMethod(declarationType, "combineIntermediate"); - this.evaluateFinal = findMethod(declarationType, "evaluateFinal"); - this.valuesIsBytesRef = BYTES_REF.equals(valueTypeName()); - this.valuesIsArray = TypeKind.ARRAY.equals(valueTypeKind()); this.createParameters = init.getParameters() .stream() .map(Parameter::from) @@ -117,12 +125,31 @@ public GroupingAggregatorImplementer( (declarationType.getSimpleName() + "GroupingAggregatorFunction").replace("AggregatorGroupingAggregator", "GroupingAggregator") ); - intermediateState = Arrays.stream(interStateAnno) + this.intermediateState = Arrays.stream(interStateAnno) .map(AggregatorImplementer.IntermediateStateDesc::newIntermediateStateDesc) .toList(); this.includeTimestampVector = includeTimestampVector; } + private static Methods.ArgumentMatcher combineArgs(AggregationState aggState, boolean includeTimestampVector) { + if (aggState.declaredType().isPrimitive()) { + return requireArgs(requireType(aggState.declaredType()), requireAnyType("")); + } else if (includeTimestampVector) { + return requireArgs( + requireType(aggState.declaredType()), + requireType(TypeName.INT), + requireType(TypeName.LONG), // @timestamp + requireAnyType("") + ); + } else { + return requireArgs( + requireType(aggState.declaredType()), + requireType(TypeName.INT), + requireAnyType("") + ); + } + } + public ClassName implementation() { return implementation; } @@ -131,18 +158,6 @@ List createParameters() { return createParameters; } - private TypeName choseStateType() { - TypeName initReturn = TypeName.get(init.getReturnType()); - if (false == initReturn.isPrimitive()) { - return initReturn; - } - String simpleName = firstUpper(initReturn.toString()); - if (warnExceptions.isEmpty()) { - return ClassName.get("org.elasticsearch.compute.aggregation", simpleName + "ArrayState"); - } - return ClassName.get("org.elasticsearch.compute.aggregation", simpleName + "FallibleArrayState"); - } - public JavaFile sourceFile() { JavaFile.Builder builder = JavaFile.builder(implementation.packageName(), type()); builder.addFileComment(""" @@ -156,7 +171,7 @@ public JavaFile sourceFile() { private TypeSpec type() { TypeSpec.Builder builder = TypeSpec.classBuilder(implementation); builder.addJavadoc("{@link $T} implementation for {@link $T}.\n", GROUPING_AGGREGATOR_FUNCTION, declarationType); - builder.addJavadoc("This class is generated. Do not edit it."); + builder.addJavadoc("This class is generated. Edit {@code " + getClass().getSimpleName() + "} instead."); builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); builder.addSuperinterface(GROUPING_AGGREGATOR_FUNCTION); builder.addField( @@ -164,7 +179,7 @@ private TypeSpec type() { .initializer(initInterState()) .build() ); - builder.addField(stateType, "state", Modifier.PRIVATE, Modifier.FINAL); + builder.addField(aggState.type(), "state", Modifier.PRIVATE, Modifier.FINAL); if (warnExceptions.isEmpty() == false) { builder.addField(WARNINGS, "warnings", Modifier.PRIVATE, Modifier.FINAL); } @@ -180,10 +195,10 @@ private TypeSpec type() { builder.addMethod(intermediateStateDesc()); builder.addMethod(intermediateBlockCount()); builder.addMethod(prepareProcessPage()); - builder.addMethod(addRawInputLoop(INT_VECTOR, valueBlockType(init, combine))); - builder.addMethod(addRawInputLoop(INT_VECTOR, valueVectorType(init, combine))); - builder.addMethod(addRawInputLoop(INT_BLOCK, valueBlockType(init, combine))); - builder.addMethod(addRawInputLoop(INT_BLOCK, valueVectorType(init, combine))); + builder.addMethod(addRawInputLoop(INT_VECTOR, blockType(aggParam.type()))); + builder.addMethod(addRawInputLoop(INT_VECTOR, vectorType(aggParam.type()))); + builder.addMethod(addRawInputLoop(INT_BLOCK, blockType(aggParam.type()))); + builder.addMethod(addRawInputLoop(INT_BLOCK, vectorType(aggParam.type()))); builder.addMethod(selectedMayContainUnseenGroups()); builder.addMethod(addIntermediateInput()); builder.addMethod(addIntermediateRowInput()); @@ -230,16 +245,16 @@ private CodeBlock callInit() { .map(p -> TypeName.get(p.asType()).equals(BIG_ARRAYS) ? "driverContext.bigArrays()" : p.getSimpleName().toString()) .collect(joining(", ")); CodeBlock.Builder builder = CodeBlock.builder(); - if (init.getReturnType().toString().equals(stateType.toString())) { - builder.add("$T.$L($L)", declarationType, init.getSimpleName(), initParametersCall); - } else { + if (aggState.declaredType().isPrimitive()) { builder.add( "new $T(driverContext.bigArrays(), $T.$L($L))", - stateType, + aggState.type(), declarationType, init.getSimpleName(), initParametersCall ); + } else { + builder.add("$T.$L($L)", declarationType, init.getSimpleName(), initParametersCall); } return builder.build(); } @@ -263,7 +278,7 @@ private MethodSpec ctor() { builder.addParameter(WARNINGS, "warnings"); } builder.addParameter(LIST_INTEGER, "channels"); - builder.addParameter(stateType, "state"); + builder.addParameter(aggState.type(), "state"); builder.addParameter(DRIVER_CONTEXT, "driverContext"); if (warnExceptions.isEmpty() == false) { builder.addStatement("this.warnings = warnings"); @@ -301,8 +316,8 @@ private MethodSpec prepareProcessPage() { builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).returns(GROUPING_AGGREGATOR_FUNCTION_ADD_INPUT); builder.addParameter(SEEN_GROUP_IDS, "seenGroupIds").addParameter(PAGE, "page"); - builder.addStatement("$T valuesBlock = page.getBlock(channels.get(0))", valueBlockType(init, combine)); - builder.addStatement("$T valuesVector = valuesBlock.asVector()", valueVectorType(init, combine)); + builder.addStatement("$T valuesBlock = page.getBlock(channels.get(0))", blockType(aggParam.type())); + builder.addStatement("$T valuesVector = valuesBlock.asVector()", vectorType(aggParam.type())); if (includeTimestampVector) { builder.addStatement("$T timestampsBlock = page.getBlock(channels.get(1))", LONG_BLOCK); builder.addStatement("$T timestampsVector = timestampsBlock.asVector()", LONG_VECTOR); @@ -355,18 +370,17 @@ private TypeSpec addInput(Consumer addBlock) { private MethodSpec addRawInputLoop(TypeName groupsType, TypeName valuesType) { boolean groupsIsBlock = groupsType.toString().endsWith("Block"); boolean valuesIsBlock = valuesType.toString().endsWith("Block"); - String methodName = "addRawInput"; - MethodSpec.Builder builder = MethodSpec.methodBuilder(methodName); + MethodSpec.Builder builder = MethodSpec.methodBuilder("addRawInput"); builder.addModifiers(Modifier.PRIVATE); builder.addParameter(TypeName.INT, "positionOffset").addParameter(groupsType, "groups").addParameter(valuesType, "values"); if (includeTimestampVector) { builder.addParameter(LONG_VECTOR, "timestamps"); } - if (valuesIsBytesRef) { + if (aggParam.isBytesRef()) { // Add bytes_ref scratch var that will be used for bytes_ref blocks/vectors builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); } - if (valuesIsArray && valuesIsBlock == false) { + if (aggParam.isArray() && valuesIsBlock == false) { builder.addComment("This type does not support vectors because all values are multi-valued"); return builder.build(); } @@ -397,11 +411,11 @@ private MethodSpec addRawInputLoop(TypeName groupsType, TypeName valuesType) { builder.endControlFlow(); builder.addStatement("int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset)"); builder.addStatement("int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset)"); - if (valuesIsArray) { - String arrayType = valueTypeString(); + if (aggParam.isArray()) { + String arrayType = aggParam.type().toString().replace("[]", ""); builder.addStatement("$L[] valuesArray = new $L[valuesEnd - valuesStart]", arrayType, arrayType); builder.beginControlFlow("for (int v = valuesStart; v < valuesEnd; v++)"); - builder.addStatement("valuesArray[v-valuesStart] = $L.get$L(v)", "values", firstUpper(arrayType)); + builder.addStatement("valuesArray[v-valuesStart] = $L.get$L(v)", "values", capitalize(arrayType)); builder.endControlFlow(); combineRawInputForArray(builder, "valuesArray"); } else { @@ -422,14 +436,12 @@ private MethodSpec addRawInputLoop(TypeName groupsType, TypeName valuesType) { } private void combineRawInput(MethodSpec.Builder builder, String blockVariable, String offsetVariable) { - TypeName valueType = valueTypeName(); + TypeName valueType = aggParam.type(); TypeName returnType = TypeName.get(combine.getReturnType()); warningsBlock(builder, () -> { - if (valuesIsBytesRef) { + if (aggParam.isBytesRef()) { combineRawInputForBytesRef(builder, blockVariable, offsetVariable); - } else if (includeTimestampVector) { - combineRawInputWithTimestamp(builder, offsetVariable); } else if (valueType.isPrimitive() == false) { throw new IllegalArgumentException("second parameter to combine must be a primitive, array or BytesRef: " + valueType); } else if (returnType.isPrimitive()) { @@ -442,48 +454,75 @@ private void combineRawInput(MethodSpec.Builder builder, String blockVariable, S }); } - private void combineRawInputForPrimitive(MethodSpec.Builder builder, String blockVariable, String offsetVariable) { - builder.addStatement( - "state.set(groupId, $T.combine(state.getOrDefault(groupId), $L.get$L($L)))", - declarationType, - blockVariable, - firstUpper(valueTypeName().toString()), - offsetVariable - ); + private void combineRawInputForBytesRef(MethodSpec.Builder builder, String blockVariable, String offsetVariable) { + // scratch is a BytesRef var that must have been defined before the iteration starts + if (includeTimestampVector) { + if (offsetVariable.contains(" + ")) { + builder.addStatement("var valuePosition = $L", offsetVariable); + offsetVariable = "valuePosition"; + } + builder.addStatement( + "$T.combine(state, groupId, timestamps.getLong($L), $L.getBytesRef($L, scratch))", + declarationType, + offsetVariable, + blockVariable, + offsetVariable + ); + } else { + builder.addStatement("$T.combine(state, groupId, $L.getBytesRef($L, scratch))", declarationType, blockVariable, offsetVariable); + } } - private void combineRawInputForArray(MethodSpec.Builder builder, String arrayVariable) { - warningsBlock(builder, () -> builder.addStatement("$T.combine(state, groupId, $L)", declarationType, arrayVariable)); + private void combineRawInputForPrimitive(MethodSpec.Builder builder, String blockVariable, String offsetVariable) { + if (includeTimestampVector) { + if (offsetVariable.contains(" + ")) { + builder.addStatement("var valuePosition = $L", offsetVariable); + offsetVariable = "valuePosition"; + } + builder.addStatement( + "$T.combine(state, groupId, timestamps.getLong($L), values.get$L($L))", + declarationType, + offsetVariable, + capitalize(aggParam.type().toString()), + offsetVariable + ); + } else { + builder.addStatement( + "state.set(groupId, $T.combine(state.getOrDefault(groupId), $L.get$L($L)))", + declarationType, + blockVariable, + capitalize(aggParam.type().toString()), + offsetVariable + ); + } } private void combineRawInputForVoid(MethodSpec.Builder builder, String blockVariable, String offsetVariable) { - builder.addStatement( - "$T.combine(state, groupId, $L.get$L($L))", - declarationType, - blockVariable, - firstUpper(valueTypeName().toString()), - offsetVariable - ); - } - - private void combineRawInputWithTimestamp(MethodSpec.Builder builder, String offsetVariable) { - String blockType = firstUpper(valueTypeName().toString()); - if (offsetVariable.contains(" + ")) { - builder.addStatement("var valuePosition = $L", offsetVariable); - offsetVariable = "valuePosition"; + if (includeTimestampVector) { + if (offsetVariable.contains(" + ")) { + builder.addStatement("var valuePosition = $L", offsetVariable); + offsetVariable = "valuePosition"; + } + builder.addStatement( + "$T.combine(state, groupId, timestamps.getLong($L), values.get$L($L))", + declarationType, + offsetVariable, + capitalize(aggParam.type().toString()), + offsetVariable + ); + } else { + builder.addStatement( + "$T.combine(state, groupId, $L.get$L($L))", + declarationType, + blockVariable, + capitalize(aggParam.type().toString()), + offsetVariable + ); } - builder.addStatement( - "$T.combine(state, groupId, timestamps.getLong($L), values.get$L($L))", - declarationType, - offsetVariable, - blockType, - offsetVariable - ); } - private void combineRawInputForBytesRef(MethodSpec.Builder builder, String blockVariable, String offsetVariable) { - // scratch is a BytesRef var that must have been defined before the iteration starts - builder.addStatement("$T.combine(state, groupId, $L.getBytesRef($L, scratch))", declarationType, blockVariable, offsetVariable); + private void combineRawInputForArray(MethodSpec.Builder builder, String arrayVariable) { + warningsBlock(builder, () -> builder.addStatement("$T.combine(state, groupId, $L)", declarationType, arrayVariable)); } private void warningsBlock(MethodSpec.Builder builder, Runnable block) { @@ -539,7 +578,7 @@ private MethodSpec addIntermediateInput() { builder.beginControlFlow("for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++)"); { builder.addStatement("int groupId = groups.getInt(groupPosition)"); - if (hasPrimitiveState()) { + if (aggState.declaredType().isPrimitive()) { if (warnExceptions.isEmpty()) { assert intermediateState.size() == 2; assert intermediateState.get(1).name().equals("seen"); @@ -567,31 +606,33 @@ private MethodSpec addIntermediateInput() { }); builder.endControlFlow(); } else { - builder.addStatement("$T.combineIntermediate(state, groupId, " + intermediateStateRowAccess() + ")", declarationType); + var stateHasBlock = intermediateState.stream().anyMatch(AggregatorImplementer.IntermediateStateDesc::block); + requireStaticMethod( + declarationType, + requireVoidType(), + requireName("combineIntermediate"), + requireArgs( + Stream.of( + Stream.of(aggState.declaredType(), TypeName.INT), // aggState and groupId + intermediateState.stream().map(AggregatorImplementer.IntermediateStateDesc::combineArgType), + Stream.of(TypeName.INT).filter(p -> stateHasBlock) // position + ).flatMap(Function.identity()).map(Methods::requireType).toArray(Methods.TypeMatcher[]::new) + ) + ); + + builder.addStatement( + "$T.combineIntermediate(state, groupId, " + + intermediateState.stream().map(desc -> desc.access("groupPosition + positionOffset")).collect(joining(", ")) + + (stateHasBlock ? ", groupPosition + positionOffset" : "") + + ")", + declarationType + ); } builder.endControlFlow(); } return builder.build(); } - String intermediateStateRowAccess() { - String rowAccess = intermediateState.stream().map(desc -> desc.access("groupPosition + positionOffset")).collect(joining(", ")); - if (intermediateState.stream().anyMatch(AggregatorImplementer.IntermediateStateDesc::block)) { - rowAccess += ", groupPosition + positionOffset"; - } - return rowAccess; - } - - private void combineStates(MethodSpec.Builder builder) { - if (combineStates == null) { - builder.beginControlFlow("if (inState.hasValue(position))"); - builder.addStatement("state.set(groupId, $T.combine(state.getOrDefault(groupId), inState.get(position)))", declarationType); - builder.endControlFlow(); - return; - } - builder.addStatement("$T.combineStates(state, groupId, inState, position)", declarationType); - } - private MethodSpec addIntermediateRowInput() { MethodSpec.Builder builder = MethodSpec.methodBuilder("addIntermediateRowInput"); builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC); @@ -601,9 +642,26 @@ private MethodSpec addIntermediateRowInput() { builder.addStatement("throw new IllegalArgumentException($S + getClass() + $S + input.getClass())", "expected ", "; got "); } builder.endControlFlow(); - builder.addStatement("$T inState = (($T) input).state", stateType, implementation); + builder.addStatement("$T inState = (($T) input).state", aggState.type(), implementation); builder.addStatement("state.enableGroupIdTracking(new $T.Empty())", SEEN_GROUP_IDS); - combineStates(builder); + if (aggState.declaredType().isPrimitive()) { + builder.beginControlFlow("if (inState.hasValue(position))"); + builder.addStatement("state.set(groupId, $T.combine(state.getOrDefault(groupId), inState.get(position)))", declarationType); + builder.endControlFlow(); + } else { + requireStaticMethod( + declarationType, + requireVoidType(), + requireName("combineStates"), + requireArgs( + requireType(aggState.declaredType()), + requireType(TypeName.INT), + requireType(aggState.declaredType()), + requireType(TypeName.INT) + ) + ); + builder.addStatement("$T.combineStates(state, groupId, inState, position)", declarationType); + } return builder.build(); } @@ -627,9 +685,15 @@ private MethodSpec evaluateFinal() { .addParameter(INT_VECTOR, "selected") .addParameter(DRIVER_CONTEXT, "driverContext"); - if (evaluateFinal == null) { + if (aggState.declaredType().isPrimitive()) { builder.addStatement("blocks[offset] = state.toValuesBlock(selected, driverContext)"); } else { + requireStaticMethod( + declarationType, + requireType(BLOCK), + requireName("evaluateFinal"), + requireArgs(requireType(aggState.declaredType()), requireType(INT_VECTOR), requireType(DRIVER_CONTEXT)) + ); builder.addStatement("blocks[offset] = $T.evaluateFinal(state, selected, driverContext)", declarationType); } return builder.build(); @@ -652,32 +716,4 @@ private MethodSpec close() { builder.addStatement("state.close()"); return builder.build(); } - - private static final Pattern PRIMITIVE_STATE_PATTERN = Pattern.compile( - "org.elasticsearch.compute.aggregation.(Boolean|Int|Long|Double|Float)(Fallible)?ArrayState" - ); - - private boolean hasPrimitiveState() { - return PRIMITIVE_STATE_PATTERN.matcher(stateType.toString()).matches(); - } - - private TypeMirror valueTypeMirror() { - return combine.getParameters().get(combine.getParameters().size() - 1).asType(); - } - - private TypeName valueTypeName() { - return TypeName.get(valueTypeMirror()); - } - - private TypeKind valueTypeKind() { - return valueTypeMirror().getKind(); - } - - private String valueTypeString() { - String valueTypeString = TypeName.get(valueTypeMirror()).toString(); - if (valuesIsArray) { - valueTypeString = valueTypeString.substring(0, valueTypeString.length() - 2); - } - return valueTypeString; - } } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Methods.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Methods.java index 6f98f1f797ab0..f2fa7b8084448 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Methods.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Methods.java @@ -9,18 +9,22 @@ import com.squareup.javapoet.TypeName; -import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.Set; import java.util.function.Predicate; +import java.util.stream.IntStream; +import java.util.stream.Stream; -import javax.lang.model.element.Element; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.TypeElement; -import javax.lang.model.element.VariableElement; import javax.lang.model.type.DeclaredType; -import javax.lang.model.type.TypeMirror; +import javax.lang.model.type.TypeKind; import javax.lang.model.util.ElementFilter; +import javax.lang.model.util.Elements; +import static java.util.stream.Collectors.joining; import static org.elasticsearch.compute.gen.Types.BOOLEAN_BLOCK; import static org.elasticsearch.compute.gen.Types.BOOLEAN_BLOCK_BUILDER; import static org.elasticsearch.compute.gen.Types.BOOLEAN_VECTOR; @@ -49,30 +53,116 @@ * Finds declared methods for the code generator. */ public class Methods { - static ExecutableElement findRequiredMethod(TypeElement declarationType, String[] names, Predicate filter) { - ExecutableElement result = findMethod(names, filter, declarationType, superClassOf(declarationType)); - if (result == null) { - if (names.length == 1) { - throw new IllegalArgumentException(declarationType + "#" + names[0] + " is required"); - } - throw new IllegalArgumentException("one of " + declarationType + "#" + Arrays.toString(names) + " is required"); + + static ExecutableElement requireStaticMethod( + TypeElement declarationType, + TypeMatcher returnTypeMatcher, + NameMatcher nameMatcher, + ArgumentMatcher argumentMatcher + ) { + return typeAndSuperType(declarationType).flatMap(type -> ElementFilter.methodsIn(type.getEnclosedElements()).stream()) + .filter(method -> method.getModifiers().contains(Modifier.STATIC)) + .filter(method -> nameMatcher.test(method.getSimpleName().toString())) + .filter(method -> returnTypeMatcher.test(TypeName.get(method.getReturnType()))) + .filter(method -> argumentMatcher.test(method.getParameters().stream().map(it -> TypeName.get(it.asType())).toList())) + .findFirst() + .orElseThrow(() -> { + var message = nameMatcher.names.size() == 1 ? "Requires method: " : "Requires one of methods: "; + var signatures = nameMatcher.names.stream() + .map(name -> "public static " + returnTypeMatcher + " " + declarationType + "#" + name + "(" + argumentMatcher + ")") + .collect(joining(" or ")); + return new IllegalArgumentException(message + signatures); + }); + } + + static NameMatcher requireName(String... names) { + return new NameMatcher(Set.of(names)); + } + + static TypeMatcher requireVoidType() { + return new TypeMatcher(type -> Objects.equals(TypeName.VOID, type), "void"); + } + + static TypeMatcher requireAnyType(String description) { + return new TypeMatcher(type -> true, description); + } + + static TypeMatcher requirePrimitiveOrImplements(Elements elements, TypeName requiredInterface) { + return new TypeMatcher( + type -> type.isPrimitive() || isImplementing(elements, type, requiredInterface), + "[boolean|int|long|float|double|" + requiredInterface + "]" + ); + } + + static TypeMatcher requireType(TypeName requiredType) { + return new TypeMatcher(type -> Objects.equals(requiredType, type), requiredType.toString()); + } + + static ArgumentMatcher requireAnyArgs(String description) { + return new ArgumentMatcher(args -> true, description); + } + + static ArgumentMatcher requireArgs(TypeMatcher... argTypes) { + return new ArgumentMatcher( + args -> args.size() == argTypes.length && IntStream.range(0, argTypes.length).allMatch(i -> argTypes[i].test(args.get(i))), + Stream.of(argTypes).map(TypeMatcher::toString).collect(joining(", ")) + ); + } + + record NameMatcher(Set names) implements Predicate { + @Override + public boolean test(String name) { + return names.contains(name); } - return result; } - static ExecutableElement findMethod(TypeElement declarationType, String name) { - return findMethod(new String[] { name }, e -> true, declarationType, superClassOf(declarationType)); + record TypeMatcher(Predicate matcher, String description) implements Predicate { + @Override + public boolean test(TypeName typeName) { + return matcher.test(typeName); + } + + @Override + public String toString() { + return description; + } } - private static TypeElement superClassOf(TypeElement declarationType) { - TypeMirror superclass = declarationType.getSuperclass(); - if (superclass instanceof DeclaredType declaredType) { - Element superclassElement = declaredType.asElement(); - if (superclassElement instanceof TypeElement) { - return (TypeElement) superclassElement; - } + record ArgumentMatcher(Predicate> matcher, String description) implements Predicate> { + @Override + public boolean test(List typeName) { + return matcher.test(typeName); + } + + @Override + public String toString() { + return description; + } + } + + private static boolean isImplementing(Elements elements, TypeName type, TypeName requiredInterface) { + return allInterfacesOf(elements, type).anyMatch( + anInterface -> Objects.equals(anInterface.toString(), requiredInterface.toString()) + ); + } + + private static Stream allInterfacesOf(Elements elements, TypeName type) { + var typeElement = elements.getTypeElement(type.toString()); + var superType = Stream.of(typeElement.getSuperclass()).filter(sType -> sType.getKind() != TypeKind.NONE).map(TypeName::get); + var interfaces = typeElement.getInterfaces().stream().map(TypeName::get); + return Stream.concat( + superType.flatMap(sType -> allInterfacesOf(elements, sType)), + interfaces.flatMap(anInterface -> Stream.concat(Stream.of(anInterface), allInterfacesOf(elements, anInterface))) + ); + } + + private static Stream typeAndSuperType(TypeElement declarationType) { + if (declarationType.getSuperclass() instanceof DeclaredType declaredType + && declaredType.asElement() instanceof TypeElement superType) { + return Stream.of(declarationType, superType); + } else { + return Stream.of(declarationType); } - return null; } static ExecutableElement findMethod(TypeElement declarationType, String[] names, Predicate filter) { @@ -95,16 +185,6 @@ static ExecutableElement findMethod(String[] names, Predicate return null; } - /** - * Returns the arguments of a method after applying a filter. - */ - static VariableElement[] findMethodArguments(ExecutableElement method, Predicate filter) { - if (method.getParameters().isEmpty()) { - return new VariableElement[0]; - } - return method.getParameters().stream().filter(filter).toArray(VariableElement[]::new); - } - /** * Returns the name of the method used to add {@code valueType} instances * to vector or block builders. diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java index 30ca69b1651de..1872012500ea7 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java @@ -128,7 +128,7 @@ public JavaFile sourceFile() { private TypeSpec type() { TypeSpec.Builder builder = TypeSpec.classBuilder(implementation); builder.addJavadoc("{@link $T} implementation for {@link $T}.\n", EXPRESSION_EVALUATOR, declarationType); - builder.addJavadoc("This class is generated. Do not edit it."); + builder.addJavadoc("This class is generated. Edit {@code " + getClass().getSimpleName() + "} instead."); builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); if (warnExceptions.isEmpty()) { builder.superclass(ABSTRACT_MULTIVALUE_FUNCTION_EVALUATOR); diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java index 8b01d957f3bd2..35c42153f9ad6 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java @@ -15,9 +15,13 @@ import java.util.ArrayDeque; import java.util.Deque; import java.util.List; +import java.util.Map; +import java.util.stream.Stream; import javax.lang.model.type.TypeMirror; +import static java.util.stream.Collectors.toUnmodifiableMap; + /** * Types used by the code generator. */ @@ -75,26 +79,8 @@ public class Types { static final ClassName DOUBLE_VECTOR_FIXED_BUILDER = ClassName.get(DATA_PACKAGE, "DoubleVector", "FixedBuilder"); static final ClassName FLOAT_VECTOR_FIXED_BUILDER = ClassName.get(DATA_PACKAGE, "FloatVector", "FixedBuilder"); - static final ClassName BOOLEAN_ARRAY_VECTOR = ClassName.get(DATA_PACKAGE, "BooleanArrayVector"); - static final ClassName BYTES_REF_ARRAY_VECTOR = ClassName.get(DATA_PACKAGE, "BytesRefArrayVector"); - static final ClassName INT_ARRAY_VECTOR = ClassName.get(DATA_PACKAGE, "IntArrayVector"); - static final ClassName LONG_ARRAY_VECTOR = ClassName.get(DATA_PACKAGE, "LongArrayVector"); - static final ClassName DOUBLE_ARRAY_VECTOR = ClassName.get(DATA_PACKAGE, "DoubleArrayVector"); - static final ClassName FLOAT_ARRAY_VECTOR = ClassName.get(DATA_PACKAGE, "FloatArrayVector"); - - static final ClassName BOOLEAN_ARRAY_BLOCK = ClassName.get(DATA_PACKAGE, "BooleanArrayBlock"); - static final ClassName BYTES_REF_ARRAY_BLOCK = ClassName.get(DATA_PACKAGE, "BytesRefArrayBlock"); - static final ClassName INT_ARRAY_BLOCK = ClassName.get(DATA_PACKAGE, "IntArrayBlock"); - static final ClassName LONG_ARRAY_BLOCK = ClassName.get(DATA_PACKAGE, "LongArrayBlock"); - static final ClassName DOUBLE_ARRAY_BLOCK = ClassName.get(DATA_PACKAGE, "DoubleArrayBlock"); - static final ClassName FLOAT_ARRAY_BLOCK = ClassName.get(DATA_PACKAGE, "FloatArrayBlock"); - - static final ClassName BOOLEAN_CONSTANT_VECTOR = ClassName.get(DATA_PACKAGE, "ConstantBooleanVector"); - static final ClassName BYTES_REF_CONSTANT_VECTOR = ClassName.get(DATA_PACKAGE, "ConstantBytesRefVector"); - static final ClassName INT_CONSTANT_VECTOR = ClassName.get(DATA_PACKAGE, "ConstantIntVector"); - static final ClassName LONG_CONSTANT_VECTOR = ClassName.get(DATA_PACKAGE, "ConstantLongVector"); - static final ClassName DOUBLE_CONSTANT_VECTOR = ClassName.get(DATA_PACKAGE, "ConstantDoubleVector"); - static final ClassName FLOAT_CONSTANT_VECTOR = ClassName.get(DATA_PACKAGE, "ConstantFloatVector"); + static final ClassName AGGREGATOR_STATE = ClassName.get(AGGREGATION_PACKAGE, "AggregatorState"); + static final ClassName GROUPING_AGGREGATOR_STATE = ClassName.get(AGGREGATION_PACKAGE, "GroupingAggregatorState"); static final ClassName AGGREGATOR_FUNCTION = ClassName.get(AGGREGATION_PACKAGE, "AggregatorFunction"); static final ClassName AGGREGATOR_FUNCTION_SUPPLIER = ClassName.get(AGGREGATION_PACKAGE, "AggregatorFunctionSupplier"); @@ -138,89 +124,50 @@ public class Types { static final ClassName RELEASABLE = ClassName.get("org.elasticsearch.core", "Releasable"); static final ClassName RELEASABLES = ClassName.get("org.elasticsearch.core", "Releasables"); - static ClassName blockType(TypeName elementType) { - if (elementType.equals(TypeName.BOOLEAN)) { - return BOOLEAN_BLOCK; - } - if (elementType.equals(BYTES_REF)) { - return BYTES_REF_BLOCK; - } - if (elementType.equals(TypeName.INT)) { - return INT_BLOCK; - } - if (elementType.equals(TypeName.LONG)) { - return LONG_BLOCK; - } - if (elementType.equals(TypeName.DOUBLE)) { - return DOUBLE_BLOCK; + private record TypeDef(TypeName type, String alias, ClassName block, ClassName vector) { + + public static TypeDef of(TypeName type, String alias, String block, String vector) { + return new TypeDef(type, alias, ClassName.get(DATA_PACKAGE, block), ClassName.get(DATA_PACKAGE, vector)); } - throw new IllegalArgumentException("unknown block type for [" + elementType + "]"); + } + + private static final Map TYPES = Stream.of( + TypeDef.of(TypeName.BOOLEAN, "BOOLEAN", "BooleanBlock", "BooleanVector"), + TypeDef.of(TypeName.INT, "INT", "IntBlock", "IntVector"), + TypeDef.of(TypeName.LONG, "LONG", "LongBlock", "LongVector"), + TypeDef.of(TypeName.FLOAT, "FLOAT", "FloatBlock", "FloatVector"), + TypeDef.of(TypeName.DOUBLE, "DOUBLE", "DoubleBlock", "DoubleVector"), + TypeDef.of(BYTES_REF, "BYTES_REF", "BytesRefBlock", "BytesRefVector") + ) + .flatMap(def -> Stream.of(def.type.toString(), def.type + "[]", def.alias).map(alias -> Map.entry(alias, def))) + .collect(toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); + + private static TypeDef findRequired(String name, String kind) { + TypeDef typeDef = TYPES.get(name); + if (typeDef == null) { + throw new IllegalArgumentException("unknown " + kind + " type [" + name + "]"); + } + return typeDef; + } + + static TypeName fromString(String type) { + return findRequired(type, "plain").type; + } + + static ClassName blockType(TypeName elementType) { + return blockType(elementType.toString()); } static ClassName blockType(String elementType) { - if (elementType.equalsIgnoreCase(TypeName.BOOLEAN.toString())) { - return BOOLEAN_BLOCK; - } - if (elementType.equalsIgnoreCase("BYTES_REF")) { - return BYTES_REF_BLOCK; - } - if (elementType.equalsIgnoreCase(TypeName.INT.toString())) { - return INT_BLOCK; - } - if (elementType.equalsIgnoreCase(TypeName.LONG.toString())) { - return LONG_BLOCK; - } - if (elementType.equalsIgnoreCase(TypeName.DOUBLE.toString())) { - return DOUBLE_BLOCK; - } - if (elementType.equalsIgnoreCase(TypeName.FLOAT.toString())) { - return FLOAT_BLOCK; - } - throw new IllegalArgumentException("unknown vector type for [" + elementType + "]"); + return findRequired(elementType, "block").block; } static ClassName vectorType(TypeName elementType) { - if (elementType.equals(TypeName.BOOLEAN)) { - return BOOLEAN_VECTOR; - } - if (elementType.equals(BYTES_REF)) { - return BYTES_REF_VECTOR; - } - if (elementType.equals(TypeName.INT)) { - return INT_VECTOR; - } - if (elementType.equals(TypeName.LONG)) { - return LONG_VECTOR; - } - if (elementType.equals(TypeName.DOUBLE)) { - return DOUBLE_VECTOR; - } - if (elementType.equals(TypeName.FLOAT)) { - return FLOAT_VECTOR; - } - throw new IllegalArgumentException("unknown vector type for [" + elementType + "]"); + return vectorType(elementType.toString()); } static ClassName vectorType(String elementType) { - if (elementType.equalsIgnoreCase(TypeName.BOOLEAN.toString())) { - return BOOLEAN_VECTOR; - } - if (elementType.equalsIgnoreCase("BYTES_REF")) { - return BYTES_REF_VECTOR; - } - if (elementType.equalsIgnoreCase(TypeName.INT.toString())) { - return INT_VECTOR; - } - if (elementType.equalsIgnoreCase(TypeName.LONG.toString())) { - return LONG_VECTOR; - } - if (elementType.equalsIgnoreCase(TypeName.DOUBLE.toString())) { - return DOUBLE_VECTOR; - } - if (elementType.equalsIgnoreCase(TypeName.FLOAT.toString())) { - return FLOAT_VECTOR; - } - throw new IllegalArgumentException("unknown vector type for [" + elementType + "]"); + return findRequired(elementType, "vector").vector; } static ClassName builderType(TypeName resultType) { @@ -282,63 +229,6 @@ static ClassName vectorFixedBuilderType(TypeName elementType) { throw new IllegalArgumentException("unknown vector fixed builder type for [" + elementType + "]"); } - static ClassName arrayVectorType(TypeName elementType) { - if (elementType.equals(TypeName.BOOLEAN)) { - return BOOLEAN_ARRAY_VECTOR; - } - if (elementType.equals(BYTES_REF)) { - return BYTES_REF_ARRAY_VECTOR; - } - if (elementType.equals(TypeName.INT)) { - return INT_ARRAY_VECTOR; - } - if (elementType.equals(TypeName.LONG)) { - return LONG_ARRAY_VECTOR; - } - if (elementType.equals(TypeName.DOUBLE)) { - return DOUBLE_ARRAY_VECTOR; - } - throw new IllegalArgumentException("unknown vector type for [" + elementType + "]"); - } - - static ClassName arrayBlockType(TypeName elementType) { - if (elementType.equals(TypeName.BOOLEAN)) { - return BOOLEAN_ARRAY_BLOCK; - } - if (elementType.equals(BYTES_REF)) { - return BYTES_REF_ARRAY_BLOCK; - } - if (elementType.equals(TypeName.INT)) { - return INT_ARRAY_BLOCK; - } - if (elementType.equals(TypeName.LONG)) { - return LONG_ARRAY_BLOCK; - } - if (elementType.equals(TypeName.DOUBLE)) { - return DOUBLE_ARRAY_BLOCK; - } - throw new IllegalArgumentException("unknown vector type for [" + elementType + "]"); - } - - static ClassName constantVectorType(TypeName elementType) { - if (elementType.equals(TypeName.BOOLEAN)) { - return BOOLEAN_CONSTANT_VECTOR; - } - if (elementType.equals(BYTES_REF)) { - return BYTES_REF_CONSTANT_VECTOR; - } - if (elementType.equals(TypeName.INT)) { - return INT_CONSTANT_VECTOR; - } - if (elementType.equals(TypeName.LONG)) { - return LONG_CONSTANT_VECTOR; - } - if (elementType.equals(TypeName.DOUBLE)) { - return DOUBLE_CONSTANT_VECTOR; - } - throw new IllegalArgumentException("unknown vector type for [" + elementType + "]"); - } - static TypeName elementType(TypeName t) { if (t.equals(BOOLEAN_BLOCK) || t.equals(BOOLEAN_VECTOR) || t.equals(BOOLEAN_BLOCK_BUILDER)) { return TypeName.BOOLEAN; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanArrayState.java index 793e6cc1b37ef..314bb5fea28a6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanArrayState.java @@ -27,7 +27,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayState.java.st} instead. *

*/ final class BooleanArrayState extends AbstractArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanFallibleArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanFallibleArrayState.java index 6367fdfb6617e..d8d21a79f2141 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanFallibleArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanFallibleArrayState.java @@ -28,7 +28,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleArrayState.java.st} instead. *

*/ final class BooleanFallibleArrayState extends AbstractFallibleArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanFallibleState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanFallibleState.java index 073f31c390a6f..e75d0ddd76787 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanFallibleState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanFallibleState.java @@ -13,7 +13,7 @@ /** * Aggregator state for a single boolean. * It stores a third boolean to store if the aggregation failed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleState.java.st} instead. */ final class BooleanFallibleState implements AggregatorState { private boolean value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanState.java index ba4d133dee553..cb0f4f7c3c603 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanState.java @@ -12,7 +12,7 @@ /** * Aggregator state for a single boolean. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-State.java.st} instead. */ final class BooleanState implements AggregatorState { private boolean value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java index 953b7172a2862..b2f080e0484fd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java @@ -27,7 +27,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayState.java.st} instead. *

*/ final class DoubleArrayState extends AbstractArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleFallibleArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleFallibleArrayState.java index dd1d60f7bd246..f8d25c65c2f03 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleFallibleArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleFallibleArrayState.java @@ -28,7 +28,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleArrayState.java.st} instead. *

*/ final class DoubleFallibleArrayState extends AbstractFallibleArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleFallibleState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleFallibleState.java index 4cdeddec724bf..a59aff16f9ae5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleFallibleState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleFallibleState.java @@ -13,7 +13,7 @@ /** * Aggregator state for a single double. * It stores a third boolean to store if the aggregation failed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleState.java.st} instead. */ final class DoubleFallibleState implements AggregatorState { private double value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleState.java index 90ecc2c1d3c03..c224a7d02479e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleState.java @@ -12,7 +12,7 @@ /** * Aggregator state for a single double. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-State.java.st} instead. */ final class DoubleState implements AggregatorState { private double value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatArrayState.java index b3767828f00db..db30dd1bec9ed 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatArrayState.java @@ -27,7 +27,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayState.java.st} instead. *

*/ final class FloatArrayState extends AbstractArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatFallibleArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatFallibleArrayState.java index 055cf345033c5..d8c1cde11e1f1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatFallibleArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatFallibleArrayState.java @@ -28,7 +28,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleArrayState.java.st} instead. *

*/ final class FloatFallibleArrayState extends AbstractFallibleArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatFallibleState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatFallibleState.java index b050c86258dcd..bfaef7e6c291c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatFallibleState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatFallibleState.java @@ -13,7 +13,7 @@ /** * Aggregator state for a single float. * It stores a third boolean to store if the aggregation failed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleState.java.st} instead. */ final class FloatFallibleState implements AggregatorState { private float value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatState.java index 6f608271b6e42..b271ed042d993 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatState.java @@ -12,7 +12,7 @@ /** * Aggregator state for a single float. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-State.java.st} instead. */ final class FloatState implements AggregatorState { private float value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java index 034ed72d08c17..c1451ce76d710 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java @@ -27,7 +27,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayState.java.st} instead. *

*/ final class IntArrayState extends AbstractArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntFallibleArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntFallibleArrayState.java index e45d84720ca1a..b79e7114e7eb5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntFallibleArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntFallibleArrayState.java @@ -28,7 +28,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleArrayState.java.st} instead. *

*/ final class IntFallibleArrayState extends AbstractFallibleArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntFallibleState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntFallibleState.java index 360f3fdb009e4..91e952bf412d9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntFallibleState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntFallibleState.java @@ -13,7 +13,7 @@ /** * Aggregator state for a single int. * It stores a third boolean to store if the aggregation failed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleState.java.st} instead. */ final class IntFallibleState implements AggregatorState { private int value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntState.java index c539c576ef36d..52b3cf18a1263 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntState.java @@ -12,7 +12,7 @@ /** * Aggregator state for a single int. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-State.java.st} instead. */ final class IntState implements AggregatorState { private int value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java index 9ff7e3f536484..2fc2bac5783ce 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java @@ -27,7 +27,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayState.java.st} instead. *

*/ final class LongArrayState extends AbstractArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongFallibleArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongFallibleArrayState.java index cb69579906871..155248275133f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongFallibleArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongFallibleArrayState.java @@ -28,7 +28,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleArrayState.java.st} instead. *

*/ final class LongFallibleArrayState extends AbstractFallibleArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongFallibleState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongFallibleState.java index 98669ef627d04..b6f472a9feb78 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongFallibleState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongFallibleState.java @@ -13,7 +13,7 @@ /** * Aggregator state for a single long. * It stores a third boolean to store if the aggregation failed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleState.java.st} instead. */ final class LongFallibleState implements AggregatorState { private long value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongState.java index e9d97dcfe7fc1..64b227145bc98 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongState.java @@ -12,7 +12,7 @@ /** * Aggregator state for a single long. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-State.java.st} instead. */ final class LongState implements AggregatorState { private long value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java index cbd20f15c6511..deec1ef04f623 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java @@ -333,7 +333,8 @@ Block evaluateFinal(IntVector selected, BlockFactory blockFactory) { } } - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateFloatAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateFloatAggregator.java index b50b125d98331..94ad5254bc723 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateFloatAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateFloatAggregator.java @@ -334,7 +334,8 @@ Block evaluateFinal(IntVector selected, BlockFactory blockFactory) { } } - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java index 01c3e3d7fb8e7..011291dd08c52 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java @@ -334,7 +334,8 @@ Block evaluateFinal(IntVector selected, BlockFactory blockFactory) { } } - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java index c84985b703aed..9ccb5d3bd1b1a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java @@ -333,7 +333,8 @@ Block evaluateFinal(IntVector selected, BlockFactory blockFactory) { } } - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopBooleanAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopBooleanAggregator.java index 32391c4827303..a2e86b3b09340 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopBooleanAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopBooleanAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.sort.BooleanBucketedSort; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.sort.SortOrder; @@ -74,7 +73,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final BooleanBucketedSort sort; private GroupingState(BigArrays bigArrays, int limit, boolean ascending) { @@ -89,7 +88,8 @@ public void merge(int groupId, GroupingState other, int otherGroupId) { sort.merge(groupId, other.sort, otherGroupId); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -97,7 +97,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return sort.toBlock(blockFactory, selected); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } @@ -107,7 +108,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final GroupingState internalState; private SingleState(BigArrays bigArrays, int limit, boolean ascending) { @@ -122,7 +123,8 @@ public void merge(GroupingState other) { internalState.merge(0, other, 0); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopBytesRefAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopBytesRefAggregator.java index c9b0e679b3e64..0a965899c0775 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopBytesRefAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopBytesRefAggregator.java @@ -19,7 +19,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.sort.BytesRefBucketedSort; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.sort.SortOrder; @@ -78,7 +77,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final BytesRefBucketedSort sort; private GroupingState(BigArrays bigArrays, int limit, boolean ascending) { @@ -95,7 +94,8 @@ public void merge(int groupId, GroupingState other, int otherGroupId) { sort.merge(groupId, other.sort, otherGroupId); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -103,7 +103,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return sort.toBlock(blockFactory, selected); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } @@ -113,7 +114,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final GroupingState internalState; private SingleState(BigArrays bigArrays, int limit, boolean ascending) { @@ -128,7 +129,8 @@ public void merge(GroupingState other) { internalState.merge(0, other, 0); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopDoubleAggregator.java index d9a7a302f07c8..6a20ed99bc236 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopDoubleAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.sort.DoubleBucketedSort; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.sort.SortOrder; @@ -74,7 +73,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final DoubleBucketedSort sort; private GroupingState(BigArrays bigArrays, int limit, boolean ascending) { @@ -89,7 +88,8 @@ public void merge(int groupId, GroupingState other, int otherGroupId) { sort.merge(groupId, other.sort, otherGroupId); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -97,7 +97,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return sort.toBlock(blockFactory, selected); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } @@ -107,7 +108,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final GroupingState internalState; private SingleState(BigArrays bigArrays, int limit, boolean ascending) { @@ -122,7 +123,8 @@ public void merge(GroupingState other) { internalState.merge(0, other, 0); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopFloatAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopFloatAggregator.java index 8b65261e10f46..cf6ad0f9017de 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopFloatAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopFloatAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.sort.FloatBucketedSort; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.sort.SortOrder; @@ -74,7 +73,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final FloatBucketedSort sort; private GroupingState(BigArrays bigArrays, int limit, boolean ascending) { @@ -89,7 +88,8 @@ public void merge(int groupId, GroupingState other, int otherGroupId) { sort.merge(groupId, other.sort, otherGroupId); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -97,7 +97,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return sort.toBlock(blockFactory, selected); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } @@ -107,7 +108,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final GroupingState internalState; private SingleState(BigArrays bigArrays, int limit, boolean ascending) { @@ -122,7 +123,8 @@ public void merge(GroupingState other) { internalState.merge(0, other, 0); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIntAggregator.java index 5c6b79f710af5..f4ac83c438063 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIntAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.sort.IntBucketedSort; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.sort.SortOrder; @@ -74,7 +73,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final IntBucketedSort sort; private GroupingState(BigArrays bigArrays, int limit, boolean ascending) { @@ -89,7 +88,8 @@ public void merge(int groupId, GroupingState other, int otherGroupId) { sort.merge(groupId, other.sort, otherGroupId); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -97,7 +97,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return sort.toBlock(blockFactory, selected); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } @@ -107,7 +108,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final GroupingState internalState; private SingleState(BigArrays bigArrays, int limit, boolean ascending) { @@ -122,7 +123,8 @@ public void merge(GroupingState other) { internalState.merge(0, other, 0); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIpAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIpAggregator.java index 219f7385b56df..292dd539edeb5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIpAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIpAggregator.java @@ -18,7 +18,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.sort.IpBucketedSort; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.sort.SortOrder; @@ -77,7 +76,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final IpBucketedSort sort; private GroupingState(BigArrays bigArrays, int limit, boolean ascending) { @@ -92,7 +91,8 @@ public void merge(int groupId, GroupingState other, int otherGroupId) { sort.merge(groupId, other.sort, otherGroupId); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -100,7 +100,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return sort.toBlock(blockFactory, selected); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } @@ -110,7 +111,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final GroupingState internalState; private SingleState(BigArrays bigArrays, int limit, boolean ascending) { @@ -125,7 +126,8 @@ public void merge(GroupingState other) { internalState.merge(0, other, 0); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopLongAggregator.java index 44cef8df7257b..c5af92956bec1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopLongAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.sort.LongBucketedSort; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.sort.SortOrder; @@ -74,7 +73,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final LongBucketedSort sort; private GroupingState(BigArrays bigArrays, int limit, boolean ascending) { @@ -89,7 +88,8 @@ public void merge(int groupId, GroupingState other, int otherGroupId) { sort.merge(groupId, other.sort, otherGroupId); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -97,7 +97,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { return sort.toBlock(blockFactory, selected); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } @@ -107,7 +108,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final GroupingState internalState; private SingleState(BigArrays bigArrays, int limit, boolean ascending) { @@ -122,7 +123,8 @@ public void merge(GroupingState other) { internalState.merge(0, other, 0); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java index bd77bd7ff1e46..ad0ab2f7189f6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java @@ -20,7 +20,6 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -83,14 +82,15 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final BytesRefHash values; private SingleState(BigArrays bigArrays) { values = new BytesRefHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } @@ -125,7 +125,7 @@ public void close() { * an {@code O(n^2)} operation for collection to support a {@code O(1)} * collector operation. But at least it's fairly simple. */ - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final LongLongHash values; private final BytesRefHash bytes; @@ -146,7 +146,8 @@ private GroupingState(BigArrays bigArrays) { } } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -190,7 +191,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { } } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java index a8409367bc090..271d7120092ca 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java @@ -18,7 +18,6 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; /** * Aggregates field values for double. @@ -77,14 +76,15 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final LongHash values; private SingleState(BigArrays bigArrays) { values = new LongHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } @@ -118,14 +118,15 @@ public void close() { * an {@code O(n^2)} operation for collection to support a {@code O(1)} * collector operation. But at least it's fairly simple. */ - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final LongLongHash values; private GroupingState(BigArrays bigArrays) { values = new LongLongHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -168,7 +169,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { } } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesFloatAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesFloatAggregator.java index f9e5e1b7b283a..b44cad807fba2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesFloatAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesFloatAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.FloatBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; /** * Aggregates field values for float. @@ -82,14 +81,15 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final LongHash values; private SingleState(BigArrays bigArrays) { values = new LongHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } @@ -123,14 +123,15 @@ public void close() { * an {@code O(n^2)} operation for collection to support a {@code O(1)} * collector operation. But at least it's fairly simple. */ - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final LongHash values; private GroupingState(BigArrays bigArrays) { values = new LongHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -175,7 +176,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { } } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java index 2420dcee70712..4d0c518245694 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; /** * Aggregates field values for int. @@ -82,14 +81,15 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final LongHash values; private SingleState(BigArrays bigArrays) { values = new LongHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } @@ -123,14 +123,15 @@ public void close() { * an {@code O(n^2)} operation for collection to support a {@code O(1)} * collector operation. But at least it's fairly simple. */ - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final LongHash values; private GroupingState(BigArrays bigArrays) { values = new LongHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -175,7 +176,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { } } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java index 4938b8f15edb0..5471c90147ec4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java @@ -18,7 +18,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; /** * Aggregates field values for long. @@ -77,14 +76,15 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final LongHash values; private SingleState(BigArrays bigArrays) { values = new LongHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } @@ -118,14 +118,15 @@ public void close() { * an {@code O(n^2)} operation for collection to support a {@code O(1)} * collector operation. But at least it's fairly simple. */ - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final LongLongHash values; private GroupingState(BigArrays bigArrays) { values = new LongLongHash(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -168,7 +169,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { } } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java index b8ea7658a8247..9297e76d5c0be 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java @@ -29,7 +29,7 @@ /** * Maps a {@link BytesRefBlock} column to group ids. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockHash.java.st} instead. */ final class BytesRefBlockHash extends BlockHash { private final int channel; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java index c9c672112a630..ede268271d9dd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java @@ -28,7 +28,7 @@ /** * Maps a {@link DoubleBlock} column to group ids. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockHash.java.st} instead. */ final class DoubleBlockHash extends BlockHash { private final int channel; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java index 13b60c6f1fec5..3a3267af3c983 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java @@ -26,7 +26,7 @@ /** * Maps a {@link IntBlock} column to group ids. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockHash.java.st} instead. */ final class IntBlockHash extends BlockHash { private final int channel; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java index 5252bd742ec51..c5152ae3cd22d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java @@ -28,7 +28,7 @@ /** * Maps a {@link LongBlock} column to group ids. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockHash.java.st} instead. */ final class LongBlockHash extends BlockHash { private final int channel; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java index 896662dddf1eb..47d386d0bd690 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java @@ -18,7 +18,7 @@ /** * Block implementation that stores values in a {@link BooleanArrayVector}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayBlock.java.st} instead. */ final class BooleanArrayBlock extends AbstractArrayBlock implements BooleanBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java index f761ed5806a06..cde163a2d3bc5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java @@ -19,7 +19,7 @@ /** * Vector implementation that stores an array of boolean values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayVector.java.st} instead. */ final class BooleanArrayVector extends AbstractVector implements BooleanVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java index f353512eb93b7..ed93cc2cc21b8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java @@ -20,7 +20,7 @@ /** * Block implementation that stores values in a {@link BooleanBigArrayVector}. Does not take ownership of the given * {@link BitArray} and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayBlock.java.st} instead. */ public final class BooleanBigArrayBlock extends AbstractArrayBlock implements BooleanBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java index a1ccfc487cca9..fd7df3663afa8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java @@ -20,7 +20,7 @@ /** * Vector implementation that defers to an enclosed {@link BitArray}. * Does not take ownership of the array and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayVector.java.st} instead. */ public final class BooleanBigArrayVector extends AbstractVector implements BooleanVector, Releasable { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java index b08b80acc6976..c0e0ed0595c0f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java @@ -19,7 +19,7 @@ /** * Block that stores boolean values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Block.java.st} instead. */ public sealed interface BooleanBlock extends Block permits BooleanArrayBlock, BooleanVectorBlock, ConstantNullBlock, BooleanBigArrayBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java index 7f4705ddecb27..1fe75bff6e1a5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java @@ -16,7 +16,7 @@ /** * Block build of BooleanBlocks. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockBuilder.java.st} instead. */ final class BooleanBlockBuilder extends AbstractBlockBuilder implements BooleanBlock.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanLookup.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanLookup.java index f969e164eef68..10de16af922f6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanLookup.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanLookup.java @@ -14,7 +14,7 @@ /** * Generic {@link Block#lookup} implementation {@link BooleanBlock}s. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Lookup.java.st} instead. */ final class BooleanLookup implements ReleasableIterator { private final BooleanBlock values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java index f2d6b5fbd4ce9..813f7cd757207 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java @@ -17,7 +17,7 @@ /** * Vector that stores boolean values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Vector.java.st} instead. */ public sealed interface BooleanVector extends Vector permits ConstantBooleanVector, BooleanArrayVector, BooleanBigArrayVector, ConstantNullVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java index 1544cc3355cd0..56cfc725801ab 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java @@ -13,7 +13,7 @@ /** * Block view of a {@link BooleanVector}. Cannot represent multi-values or nulls. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBlock.java.st} instead. */ public final class BooleanVectorBlock extends AbstractVectorBlock implements BooleanBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java index effb90267702f..340ead5a42894 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java @@ -11,7 +11,7 @@ /** * Builder for {@link BooleanVector}s that grows as needed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBuilder.java.st} instead. */ final class BooleanVectorBuilder extends AbstractVectorBuilder implements BooleanVector.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java index c84029b4ceeb4..21835281393a5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java @@ -13,7 +13,7 @@ * Builder for {@link BooleanVector}s that never grows. Prefer this to * {@link BooleanVectorBuilder} if you know the precise size up front because * it's faster. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorFixedBuilder.java.st} instead. */ public final class BooleanVectorFixedBuilder implements BooleanVector.FixedBuilder { private final BlockFactory blockFactory; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java index 5bcb1b0ec5095..a85b75d8fdc2a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java @@ -21,7 +21,7 @@ /** * Block implementation that stores values in a {@link BytesRefArrayVector}. * Does not take ownership of the given {@link BytesRefArray} and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayBlock.java.st} instead. */ final class BytesRefArrayBlock extends AbstractArrayBlock implements BytesRefBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java index e72c900718735..509ee7e583e4c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java @@ -21,7 +21,7 @@ /** * Vector implementation that stores an array of BytesRef values. * Does not take ownership of the given {@link BytesRefArray} and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayVector.java.st} instead. */ final class BytesRefArrayVector extends AbstractVector implements BytesRefVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java index 6661895722725..2353012ef314b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java @@ -20,7 +20,7 @@ /** * Block that stores BytesRef values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Block.java.st} instead. */ public sealed interface BytesRefBlock extends Block permits BytesRefArrayBlock, BytesRefVectorBlock, ConstantNullBlock, OrdinalBytesRefBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java index 0a2b350780405..2d724df2d3275 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java @@ -15,7 +15,7 @@ /** * Block build of BytesRefBlocks. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockBuilder.java.st} instead. */ final class BytesRefBlockBuilder extends AbstractBlockBuilder implements BytesRefBlock.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefLookup.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefLookup.java index 3ec62902fe048..98967fdac3fbe 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefLookup.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefLookup.java @@ -15,7 +15,7 @@ /** * Generic {@link Block#lookup} implementation {@link BytesRefBlock}s. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Lookup.java.st} instead. */ final class BytesRefLookup implements ReleasableIterator { private final BytesRef firstScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java index a3432a2913fde..1bca89f531c14 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java @@ -18,7 +18,7 @@ /** * Vector that stores BytesRef values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Vector.java.st} instead. */ public sealed interface BytesRefVector extends Vector permits ConstantBytesRefVector, BytesRefArrayVector, ConstantNullVector, OrdinalBytesRefVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java index 6c0334e147aa0..10cc1b5503a64 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java @@ -14,7 +14,7 @@ /** * Block view of a {@link BytesRefVector}. Cannot represent multi-values or nulls. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBlock.java.st} instead. */ public final class BytesRefVectorBlock extends AbstractVectorBlock implements BytesRefBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java index cd5851e9e49ef..1962784d2a1e3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java @@ -14,7 +14,7 @@ /** * Builder for {@link BytesRefVector}s that grows as needed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBuilder.java.st} instead. */ final class BytesRefVectorBuilder extends AbstractVectorBuilder implements BytesRefVector.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java index f36fbd7a20316..6ef344b8cc40d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java @@ -13,7 +13,7 @@ /** * Vector implementation that stores a constant boolean value. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ConstantVector.java.st} instead. */ final class ConstantBooleanVector extends AbstractVector implements BooleanVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java index 216b7db26b6fb..4bb8ee4a5a392 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java @@ -15,7 +15,7 @@ /** * Vector implementation that stores a constant BytesRef value. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ConstantVector.java.st} instead. */ final class ConstantBytesRefVector extends AbstractVector implements BytesRefVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java index 3c91eaf7171b5..b2f145e6918e1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java @@ -13,7 +13,7 @@ /** * Vector implementation that stores a constant double value. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ConstantVector.java.st} instead. */ final class ConstantDoubleVector extends AbstractVector implements DoubleVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantFloatVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantFloatVector.java index a9ee5e811da64..09b34f0b57494 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantFloatVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantFloatVector.java @@ -13,7 +13,7 @@ /** * Vector implementation that stores a constant float value. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ConstantVector.java.st} instead. */ final class ConstantFloatVector extends AbstractVector implements FloatVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java index 20c1b07a3f102..1131096edf036 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java @@ -13,7 +13,7 @@ /** * Vector implementation that stores a constant int value. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ConstantVector.java.st} instead. */ final class ConstantIntVector extends AbstractVector implements IntVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java index 6e3decb9ad540..a7e22ee58526b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java @@ -13,7 +13,7 @@ /** * Vector implementation that stores a constant long value. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ConstantVector.java.st} instead. */ final class ConstantLongVector extends AbstractVector implements LongVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java index 20bd42da98c71..83c7b85a7ff5a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java @@ -18,7 +18,7 @@ /** * Block implementation that stores values in a {@link DoubleArrayVector}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayBlock.java.st} instead. */ final class DoubleArrayBlock extends AbstractArrayBlock implements DoubleBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java index 6426f8f39ecd9..5c375634011c6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java @@ -19,7 +19,7 @@ /** * Vector implementation that stores an array of double values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayVector.java.st} instead. */ final class DoubleArrayVector extends AbstractVector implements DoubleVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java index 8dc9efa755c17..9ae9e9c98ad98 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java @@ -20,7 +20,7 @@ /** * Block implementation that stores values in a {@link DoubleBigArrayVector}. Does not take ownership of the given * {@link DoubleArray} and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayBlock.java.st} instead. */ public final class DoubleBigArrayBlock extends AbstractArrayBlock implements DoubleBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java index e98d0da358cdf..9c811e4aec056 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java @@ -20,7 +20,7 @@ /** * Vector implementation that defers to an enclosed {@link DoubleArray}. * Does not take ownership of the array and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayVector.java.st} instead. */ public final class DoubleBigArrayVector extends AbstractVector implements DoubleVector, Releasable { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java index 04df6253662a9..d5511246afb34 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java @@ -19,7 +19,7 @@ /** * Block that stores double values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Block.java.st} instead. */ public sealed interface DoubleBlock extends Block permits DoubleArrayBlock, DoubleVectorBlock, ConstantNullBlock, DoubleBigArrayBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java index 8ecc9b91e0ffe..5896bbd2c51e5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java @@ -16,7 +16,7 @@ /** * Block build of DoubleBlocks. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockBuilder.java.st} instead. */ final class DoubleBlockBuilder extends AbstractBlockBuilder implements DoubleBlock.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleLookup.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleLookup.java index bcb8a414f7c57..e8d69edb92c20 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleLookup.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleLookup.java @@ -14,7 +14,7 @@ /** * Generic {@link Block#lookup} implementation {@link DoubleBlock}s. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Lookup.java.st} instead. */ final class DoubleLookup implements ReleasableIterator { private final DoubleBlock values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java index ec4b9cadc074e..b478c5ffbe043 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java @@ -17,7 +17,7 @@ /** * Vector that stores double values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Vector.java.st} instead. */ public sealed interface DoubleVector extends Vector permits ConstantDoubleVector, DoubleArrayVector, DoubleBigArrayVector, ConstantNullVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java index 7e680f8e59ff0..f6350bd4586ca 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java @@ -13,7 +13,7 @@ /** * Block view of a {@link DoubleVector}. Cannot represent multi-values or nulls. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBlock.java.st} instead. */ public final class DoubleVectorBlock extends AbstractVectorBlock implements DoubleBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java index f4e7be406e1ca..c5c180003b071 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java @@ -11,7 +11,7 @@ /** * Builder for {@link DoubleVector}s that grows as needed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBuilder.java.st} instead. */ final class DoubleVectorBuilder extends AbstractVectorBuilder implements DoubleVector.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java index e84040578acf7..2ce356220f257 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java @@ -13,7 +13,7 @@ * Builder for {@link DoubleVector}s that never grows. Prefer this to * {@link DoubleVectorBuilder} if you know the precise size up front because * it's faster. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorFixedBuilder.java.st} instead. */ public final class DoubleVectorFixedBuilder implements DoubleVector.FixedBuilder { private final BlockFactory blockFactory; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayBlock.java index c0941557dc4fe..749041d80d668 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayBlock.java @@ -18,7 +18,7 @@ /** * Block implementation that stores values in a {@link FloatArrayVector}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayBlock.java.st} instead. */ final class FloatArrayBlock extends AbstractArrayBlock implements FloatBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayVector.java index fd47f40463552..f10e9dc39bbfd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayVector.java @@ -19,7 +19,7 @@ /** * Vector implementation that stores an array of float values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayVector.java.st} instead. */ final class FloatArrayVector extends AbstractVector implements FloatVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBigArrayBlock.java index a2191ba9fc69a..4a67a2e6ec12f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBigArrayBlock.java @@ -20,7 +20,7 @@ /** * Block implementation that stores values in a {@link FloatBigArrayVector}. Does not take ownership of the given * {@link FloatArray} and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayBlock.java.st} instead. */ public final class FloatBigArrayBlock extends AbstractArrayBlock implements FloatBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBigArrayVector.java index 4e481bc6bcaaf..1aa5aa82e3447 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBigArrayVector.java @@ -20,7 +20,7 @@ /** * Vector implementation that defers to an enclosed {@link FloatArray}. * Does not take ownership of the array and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayVector.java.st} instead. */ public final class FloatBigArrayVector extends AbstractVector implements FloatVector, Releasable { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlock.java index 0679e38b63219..61cda16381940 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlock.java @@ -19,7 +19,7 @@ /** * Block that stores float values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Block.java.st} instead. */ public sealed interface FloatBlock extends Block permits FloatArrayBlock, FloatVectorBlock, ConstantNullBlock, FloatBigArrayBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlockBuilder.java index 8504912adc057..809f74899c9c2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlockBuilder.java @@ -16,7 +16,7 @@ /** * Block build of FloatBlocks. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockBuilder.java.st} instead. */ final class FloatBlockBuilder extends AbstractBlockBuilder implements FloatBlock.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatLookup.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatLookup.java index 9e0018e527c4d..25e39a649e948 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatLookup.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatLookup.java @@ -14,7 +14,7 @@ /** * Generic {@link Block#lookup} implementation {@link FloatBlock}s. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Lookup.java.st} instead. */ final class FloatLookup implements ReleasableIterator { private final FloatBlock values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVector.java index 5f49efd2779ea..30fd4d69f221f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVector.java @@ -17,7 +17,7 @@ /** * Vector that stores float values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Vector.java.st} instead. */ public sealed interface FloatVector extends Vector permits ConstantFloatVector, FloatArrayVector, FloatBigArrayVector, ConstantNullVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorBlock.java index d5df1e8ea6e57..6c2846183cd2d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorBlock.java @@ -13,7 +13,7 @@ /** * Block view of a {@link FloatVector}. Cannot represent multi-values or nulls. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBlock.java.st} instead. */ public final class FloatVectorBlock extends AbstractVectorBlock implements FloatBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorBuilder.java index 9cec6355ec982..72db1bef16996 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorBuilder.java @@ -11,7 +11,7 @@ /** * Builder for {@link FloatVector}s that grows as needed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBuilder.java.st} instead. */ final class FloatVectorBuilder extends AbstractVectorBuilder implements FloatVector.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorFixedBuilder.java index 9c4f2b3986c7e..d18d24809301f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorFixedBuilder.java @@ -13,7 +13,7 @@ * Builder for {@link FloatVector}s that never grows. Prefer this to * {@link FloatVectorBuilder} if you know the precise size up front because * it's faster. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorFixedBuilder.java.st} instead. */ public final class FloatVectorFixedBuilder implements FloatVector.FixedBuilder { private final BlockFactory blockFactory; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java index 8ced678bc90b0..0be8b6db78343 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java @@ -18,7 +18,7 @@ /** * Block implementation that stores values in a {@link IntArrayVector}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayBlock.java.st} instead. */ final class IntArrayBlock extends AbstractArrayBlock implements IntBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java index c3d6691dec3ca..9db51c61bbf1d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java @@ -19,7 +19,7 @@ /** * Vector implementation that stores an array of int values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayVector.java.st} instead. */ final class IntArrayVector extends AbstractVector implements IntVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java index 247720489a3fd..93555c913af1b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java @@ -20,7 +20,7 @@ /** * Block implementation that stores values in a {@link IntBigArrayVector}. Does not take ownership of the given * {@link IntArray} and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayBlock.java.st} instead. */ public final class IntBigArrayBlock extends AbstractArrayBlock implements IntBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java index b6c73205ad12e..3480b11bfe257 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java @@ -20,7 +20,7 @@ /** * Vector implementation that defers to an enclosed {@link IntArray}. * Does not take ownership of the array and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayVector.java.st} instead. */ public final class IntBigArrayVector extends AbstractVector implements IntVector, Releasable { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java index 6af61695929df..a0677816d10ab 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java @@ -19,7 +19,7 @@ /** * Block that stores int values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Block.java.st} instead. */ public sealed interface IntBlock extends Block permits IntArrayBlock, IntVectorBlock, ConstantNullBlock, IntBigArrayBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java index 31449b6f1cd72..cf8f84d7449ee 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java @@ -16,7 +16,7 @@ /** * Block build of IntBlocks. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockBuilder.java.st} instead. */ final class IntBlockBuilder extends AbstractBlockBuilder implements IntBlock.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntLookup.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntLookup.java index b7ea15cd9d818..83a6d92f43586 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntLookup.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntLookup.java @@ -14,7 +14,7 @@ /** * Generic {@link Block#lookup} implementation {@link IntBlock}s. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Lookup.java.st} instead. */ final class IntLookup implements ReleasableIterator { private final IntBlock values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java index da1a92b21c657..afd7aea269772 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java @@ -17,7 +17,7 @@ /** * Vector that stores int values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Vector.java.st} instead. */ public sealed interface IntVector extends Vector permits ConstantIntVector, IntArrayVector, IntBigArrayVector, ConstantNullVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java index 57e69a3aa9acc..a18b2e8ab2384 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java @@ -13,7 +13,7 @@ /** * Block view of a {@link IntVector}. Cannot represent multi-values or nulls. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBlock.java.st} instead. */ public final class IntVectorBlock extends AbstractVectorBlock implements IntBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java index 09bbb32cefe79..96301b1ab8d1c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java @@ -11,7 +11,7 @@ /** * Builder for {@link IntVector}s that grows as needed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBuilder.java.st} instead. */ final class IntVectorBuilder extends AbstractVectorBuilder implements IntVector.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java index 6af564735c073..56f92f9d0eb6e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java @@ -13,7 +13,7 @@ * Builder for {@link IntVector}s that never grows. Prefer this to * {@link IntVectorBuilder} if you know the precise size up front because * it's faster. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorFixedBuilder.java.st} instead. */ public final class IntVectorFixedBuilder implements IntVector.FixedBuilder { private final BlockFactory blockFactory; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java index fb631ab326ce7..9b9b7a694ebb2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java @@ -18,7 +18,7 @@ /** * Block implementation that stores values in a {@link LongArrayVector}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayBlock.java.st} instead. */ final class LongArrayBlock extends AbstractArrayBlock implements LongBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java index 748f25a0005fb..ff9179343536e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java @@ -19,7 +19,7 @@ /** * Vector implementation that stores an array of long values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayVector.java.st} instead. */ final class LongArrayVector extends AbstractVector implements LongVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java index ec600d02bc720..a9e98c82c8b5d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java @@ -20,7 +20,7 @@ /** * Block implementation that stores values in a {@link LongBigArrayVector}. Does not take ownership of the given * {@link LongArray} and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayBlock.java.st} instead. */ public final class LongBigArrayBlock extends AbstractArrayBlock implements LongBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java index dae27331afc1e..a05a76b1d1d4c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java @@ -20,7 +20,7 @@ /** * Vector implementation that defers to an enclosed {@link LongArray}. * Does not take ownership of the array and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayVector.java.st} instead. */ public final class LongBigArrayVector extends AbstractVector implements LongVector, Releasable { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java index 090efd9a31579..9fae8d3cbcddd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java @@ -19,7 +19,7 @@ /** * Block that stores long values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Block.java.st} instead. */ public sealed interface LongBlock extends Block permits LongArrayBlock, LongVectorBlock, ConstantNullBlock, LongBigArrayBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java index bf25347edd989..58d3dbfe0cb38 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java @@ -16,7 +16,7 @@ /** * Block build of LongBlocks. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockBuilder.java.st} instead. */ final class LongBlockBuilder extends AbstractBlockBuilder implements LongBlock.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongLookup.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongLookup.java index ca1b06d70b1d1..3422784c4df60 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongLookup.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongLookup.java @@ -14,7 +14,7 @@ /** * Generic {@link Block#lookup} implementation {@link LongBlock}s. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Lookup.java.st} instead. */ final class LongLookup implements ReleasableIterator { private final LongBlock values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java index 3773e602b8d03..3b3badab91a40 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java @@ -17,7 +17,7 @@ /** * Vector that stores long values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Vector.java.st} instead. */ public sealed interface LongVector extends Vector permits ConstantLongVector, LongArrayVector, LongBigArrayVector, ConstantNullVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java index edf58dae1c756..26a2cab5704b5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java @@ -13,7 +13,7 @@ /** * Block view of a {@link LongVector}. Cannot represent multi-values or nulls. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBlock.java.st} instead. */ public final class LongVectorBlock extends AbstractVectorBlock implements LongBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java index eb4e54781a020..8709b2ca6bdef 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java @@ -11,7 +11,7 @@ /** * Builder for {@link LongVector}s that grows as needed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBuilder.java.st} instead. */ final class LongVectorBuilder extends AbstractVectorBuilder implements LongVector.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java index 33cf0e5dc82e2..2fe289de7fd77 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java @@ -13,7 +13,7 @@ * Builder for {@link LongVector}s that never grows. Prefer this to * {@link LongVectorBuilder} if you know the precise size up front because * it's faster. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorFixedBuilder.java.st} instead. */ public final class LongVectorFixedBuilder implements LongVector.FixedBuilder { private final BlockFactory blockFactory; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/DoubleBucketedSort.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/DoubleBucketedSort.java index 63318a2189908..ca89e6f999641 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/DoubleBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/DoubleBucketedSort.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -101,7 +102,7 @@ public void collect(double value, int bucket) { // Gathering mode long requiredSize = rootIndex + bucketSize; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); assert 0 <= next && next < bucketSize @@ -257,19 +258,25 @@ private void swap(long lhs, long rhs) { /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = bigArrays.grow(values, minSize); + assert oldMax % bucketSize == 0; + + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.DOUBLE_PAGE_SIZE, Double.BYTES); + // Round up to the next full bucket. + newSize = (newSize + bucketSize - 1) / bucketSize; + values = bigArrays.resize(values, newSize * bucketSize); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % getBucketSize())); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = getBucketSize() - 1; for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/FloatBucketedSort.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/FloatBucketedSort.java index b490fe193c33f..2bf8edd99f48c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/FloatBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/FloatBucketedSort.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.FloatArray; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -101,7 +102,7 @@ public void collect(float value, int bucket) { // Gathering mode long requiredSize = rootIndex + bucketSize; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); assert 0 <= next && next < bucketSize @@ -257,19 +258,25 @@ private void swap(long lhs, long rhs) { /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = bigArrays.grow(values, minSize); + assert oldMax % bucketSize == 0; + + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.FLOAT_PAGE_SIZE, Float.BYTES); + // Round up to the next full bucket. + newSize = (newSize + bucketSize - 1) / bucketSize; + values = bigArrays.resize(values, newSize * bucketSize); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % getBucketSize())); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = getBucketSize() - 1; for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/IntBucketedSort.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/IntBucketedSort.java index 04a635d75fe52..257dfe2ebb0bd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/IntBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/IntBucketedSort.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.IntArray; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -101,7 +102,7 @@ public void collect(int value, int bucket) { // Gathering mode long requiredSize = rootIndex + bucketSize; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); assert 0 <= next && next < bucketSize @@ -257,19 +258,25 @@ private void swap(long lhs, long rhs) { /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = bigArrays.grow(values, minSize); + assert oldMax % bucketSize == 0; + + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.INT_PAGE_SIZE, Integer.BYTES); + // Round up to the next full bucket. + newSize = (newSize + bucketSize - 1) / bucketSize; + values = bigArrays.resize(values, newSize * bucketSize); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % getBucketSize())); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = getBucketSize() - 1; for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/LongBucketedSort.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/LongBucketedSort.java index e08c25256944b..c27467ebb60ff 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/LongBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/LongBucketedSort.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -101,7 +102,7 @@ public void collect(long value, int bucket) { // Gathering mode long requiredSize = rootIndex + bucketSize; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); assert 0 <= next && next < bucketSize @@ -257,19 +258,25 @@ private void swap(long lhs, long rhs) { /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = bigArrays.grow(values, minSize); + assert oldMax % bucketSize == 0; + + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.LONG_PAGE_SIZE, Long.BYTES); + // Round up to the next full bucket. + newSize = (newSize + bucketSize - 1) / bucketSize; + values = bigArrays.resize(values, newSize * bucketSize); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % getBucketSize())); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = getBucketSize() - 1; for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java index ca5cd1bda44d0..90ca8e567e8be 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java @@ -18,7 +18,7 @@ /** * {@link AggregatorFunction} implementation for {@link CountDistinctBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class CountDistinctBooleanAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java index 9512f4e76c49c..bbfb2a34f920c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java @@ -12,23 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public CountDistinctBooleanAggregatorFunctionSupplier() { + } - public CountDistinctBooleanAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return CountDistinctBooleanAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return CountDistinctBooleanGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public CountDistinctBooleanAggregatorFunction aggregator(DriverContext driverContext) { + public CountDistinctBooleanAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return CountDistinctBooleanAggregatorFunction.create(driverContext, channels); } @Override public CountDistinctBooleanGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return CountDistinctBooleanGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java index 4cdecd9944f7b..fb172567d7021 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class CountDistinctBooleanGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java index 38dadda1eba0c..77c386bd99175 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link CountDistinctBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class CountDistinctBytesRefAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java index b05c529c2ce9b..cb92d715c91d6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java @@ -12,26 +12,34 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int precision; - public CountDistinctBytesRefAggregatorFunctionSupplier(List channels, int precision) { - this.channels = channels; + public CountDistinctBytesRefAggregatorFunctionSupplier(int precision) { this.precision = precision; } @Override - public CountDistinctBytesRefAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return CountDistinctBytesRefAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return CountDistinctBytesRefGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public CountDistinctBytesRefAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return CountDistinctBytesRefAggregatorFunction.create(driverContext, channels, precision); } @Override public CountDistinctBytesRefGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return CountDistinctBytesRefGroupingAggregatorFunction.create(channels, driverContext, precision); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java index 2261a60ff247e..5d4096de08417 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class CountDistinctBytesRefGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java index 1d985fbd1dff6..f42fcfafef2d7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link CountDistinctDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class CountDistinctDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java index 0a15ebb07ecf4..f4d9c2425b4ef 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java @@ -12,26 +12,34 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int precision; - public CountDistinctDoubleAggregatorFunctionSupplier(List channels, int precision) { - this.channels = channels; + public CountDistinctDoubleAggregatorFunctionSupplier(int precision) { this.precision = precision; } @Override - public CountDistinctDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return CountDistinctDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return CountDistinctDoubleGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public CountDistinctDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return CountDistinctDoubleAggregatorFunction.create(driverContext, channels, precision); } @Override public CountDistinctDoubleGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return CountDistinctDoubleGroupingAggregatorFunction.create(channels, driverContext, precision); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java index c769a157e5ecb..d6d335dc6d0f0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class CountDistinctDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunction.java index 36d2aaf3e3d4f..5a65d460ad27e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link CountDistinctFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class CountDistinctFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionSupplier.java index 4c2aad00a7a72..f4c941d8d7f59 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionSupplier.java @@ -12,26 +12,34 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int precision; - public CountDistinctFloatAggregatorFunctionSupplier(List channels, int precision) { - this.channels = channels; + public CountDistinctFloatAggregatorFunctionSupplier(int precision) { this.precision = precision; } @Override - public CountDistinctFloatAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return CountDistinctFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return CountDistinctFloatGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public CountDistinctFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return CountDistinctFloatAggregatorFunction.create(driverContext, channels, precision); } @Override public CountDistinctFloatGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return CountDistinctFloatGroupingAggregatorFunction.create(channels, driverContext, precision); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunction.java index 0b1c93aad5e2b..d9e745113689f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class CountDistinctFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java index 05bebca924f7e..44c9751f2bee7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link CountDistinctIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class CountDistinctIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java index fec5b7df48a21..8a09acde91568 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java @@ -12,26 +12,34 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int precision; - public CountDistinctIntAggregatorFunctionSupplier(List channels, int precision) { - this.channels = channels; + public CountDistinctIntAggregatorFunctionSupplier(int precision) { this.precision = precision; } @Override - public CountDistinctIntAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return CountDistinctIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return CountDistinctIntGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public CountDistinctIntAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return CountDistinctIntAggregatorFunction.create(driverContext, channels, precision); } @Override - public CountDistinctIntGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + public CountDistinctIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return CountDistinctIntGroupingAggregatorFunction.create(channels, driverContext, precision); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java index 7642ca7dcc6a0..ae06526aa5317 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class CountDistinctIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java index 9e62525fa2bb0..864051e7b3197 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link CountDistinctLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class CountDistinctLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java index e3cc788215d39..1443fb6d66e66 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java @@ -12,26 +12,34 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int precision; - public CountDistinctLongAggregatorFunctionSupplier(List channels, int precision) { - this.channels = channels; + public CountDistinctLongAggregatorFunctionSupplier(int precision) { this.precision = precision; } @Override - public CountDistinctLongAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return CountDistinctLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return CountDistinctLongGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public CountDistinctLongAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return CountDistinctLongAggregatorFunction.create(driverContext, channels, precision); } @Override - public CountDistinctLongGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + public CountDistinctLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return CountDistinctLongGroupingAggregatorFunction.create(channels, driverContext, precision); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java index 00d0e955ba88a..685cb2f0e5dcf 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class CountDistinctLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunction.java index 01763200f2d2c..30d61d45f0759 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunction.java @@ -18,7 +18,7 @@ /** * {@link AggregatorFunction} implementation for {@link MaxBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MaxBooleanAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionSupplier.java index e5bbf63ddee07..e8ccdb92e5198 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MaxBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MaxBooleanAggregatorFunctionSupplier() { + } - public MaxBooleanAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MaxBooleanAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MaxBooleanGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MaxBooleanAggregatorFunction aggregator(DriverContext driverContext) { + public MaxBooleanAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MaxBooleanAggregatorFunction.create(driverContext, channels); } @Override - public MaxBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MaxBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MaxBooleanGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanGroupingAggregatorFunction.java index dd7760273bfa6..915e481f2661f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MaxBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MaxBooleanGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunction.java index 73b927cd9c521..936b18f35aae0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link AggregatorFunction} implementation for {@link MaxBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MaxBytesRefAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionSupplier.java index 7c8af2e0c7e6d..bc52373bb933f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MaxBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MaxBytesRefAggregatorFunctionSupplier() { + } - public MaxBytesRefAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MaxBytesRefAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MaxBytesRefGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MaxBytesRefAggregatorFunction aggregator(DriverContext driverContext) { + public MaxBytesRefAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MaxBytesRefAggregatorFunction.create(driverContext, channels); } @Override - public MaxBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MaxBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MaxBytesRefGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunction.java index fcb87428e9b7d..f15976bcdc61d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MaxBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MaxBytesRefGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java index 04d24d49cbff8..3af21d1c19dff 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link MaxDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MaxDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java index af878fc778985..417cb4b7c9c37 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MaxDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MaxDoubleAggregatorFunctionSupplier() { + } - public MaxDoubleAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MaxDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MaxDoubleGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MaxDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public MaxDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MaxDoubleAggregatorFunction.create(driverContext, channels); } @Override - public MaxDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MaxDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MaxDoubleGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java index 42588ea81367c..c8f28f0dfd865 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MaxDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MaxDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunction.java index ce22983bff72b..e16e28e182aaa 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link MaxFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MaxFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionSupplier.java index a3aa44f432430..3279506e75afa 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MaxFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MaxFloatAggregatorFunctionSupplier() { + } - public MaxFloatAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MaxFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MaxFloatGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MaxFloatAggregatorFunction aggregator(DriverContext driverContext) { + public MaxFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MaxFloatAggregatorFunction.create(driverContext, channels); } @Override - public MaxFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MaxFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MaxFloatGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunction.java index 006ee147b15e1..89d3c8dae28ba 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MaxFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MaxFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java index 6a91b574da769..a4c0fbfb75b9a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link MaxIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MaxIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java index 5e0a4e2172696..0e1dca2b52f0d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java @@ -12,22 +12,30 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MaxIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MaxIntAggregatorFunctionSupplier() { + } - public MaxIntAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MaxIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MaxIntGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MaxIntAggregatorFunction aggregator(DriverContext driverContext) { + public MaxIntAggregatorFunction aggregator(DriverContext driverContext, List channels) { return MaxIntAggregatorFunction.create(driverContext, channels); } @Override - public MaxIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MaxIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MaxIntGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java index faea5a63eac93..4d86001ac669d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MaxIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MaxIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunction.java index 7f6d47ce1c876..c0beac5a31aa8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link AggregatorFunction} implementation for {@link MaxIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MaxIpAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionSupplier.java index 1fb734c243477..fa84acd602af4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionSupplier.java @@ -12,22 +12,30 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MaxIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxIpAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MaxIpAggregatorFunctionSupplier() { + } - public MaxIpAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MaxIpAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MaxIpGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MaxIpAggregatorFunction aggregator(DriverContext driverContext) { + public MaxIpAggregatorFunction aggregator(DriverContext driverContext, List channels) { return MaxIpAggregatorFunction.create(driverContext, channels); } @Override - public MaxIpGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MaxIpGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MaxIpGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunction.java index f5715949094f7..8b25dcc293159 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MaxIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MaxIpGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java index 97d12d1ef6852..71fa9b04b4937 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link MaxLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MaxLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java index f4d17da186d58..7683622aadd12 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java @@ -12,22 +12,30 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MaxLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MaxLongAggregatorFunctionSupplier() { + } - public MaxLongAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MaxLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MaxLongGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MaxLongAggregatorFunction aggregator(DriverContext driverContext) { + public MaxLongAggregatorFunction aggregator(DriverContext driverContext, List channels) { return MaxLongAggregatorFunction.create(driverContext, channels); } @Override - public MaxLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MaxLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MaxLongGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java index a5f115ad0d2b1..9573945dc7d53 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MaxLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MaxLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java index 611314318eba7..a442e49a8ad6e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link MedianAbsoluteDeviationDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MedianAbsoluteDeviationDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java index 4720ce08fa282..08ae3c3fe8664 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java @@ -12,23 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MedianAbsoluteDeviationDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier() { + } - public MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MedianAbsoluteDeviationDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MedianAbsoluteDeviationDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public MedianAbsoluteDeviationDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MedianAbsoluteDeviationDoubleAggregatorFunction.create(driverContext, channels); } @Override public MedianAbsoluteDeviationDoubleGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java index bdb56e9e3b1a0..291737cf1c21b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MedianAbsoluteDeviationDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MedianAbsoluteDeviationDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunction.java index e20badf2ce38a..2bcd5be4fd0d3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link MedianAbsoluteDeviationFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MedianAbsoluteDeviationFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionSupplier.java index 1fad0faafad4e..d63c9ce2dcdcd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionSupplier.java @@ -12,23 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MedianAbsoluteDeviationFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MedianAbsoluteDeviationFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MedianAbsoluteDeviationFloatAggregatorFunctionSupplier() { + } - public MedianAbsoluteDeviationFloatAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MedianAbsoluteDeviationFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MedianAbsoluteDeviationFloatGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MedianAbsoluteDeviationFloatAggregatorFunction aggregator(DriverContext driverContext) { + public MedianAbsoluteDeviationFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MedianAbsoluteDeviationFloatAggregatorFunction.create(driverContext, channels); } @Override public MedianAbsoluteDeviationFloatGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return MedianAbsoluteDeviationFloatGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunction.java index 12b4c12591169..fe44034fbd6b8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MedianAbsoluteDeviationFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MedianAbsoluteDeviationFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java index df0d24d442283..058eba52c6cdd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link MedianAbsoluteDeviationIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MedianAbsoluteDeviationIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java index e72918359b2f6..c496749ff19e5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java @@ -12,23 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MedianAbsoluteDeviationIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MedianAbsoluteDeviationIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MedianAbsoluteDeviationIntAggregatorFunctionSupplier() { + } - public MedianAbsoluteDeviationIntAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MedianAbsoluteDeviationIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MedianAbsoluteDeviationIntGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MedianAbsoluteDeviationIntAggregatorFunction aggregator(DriverContext driverContext) { + public MedianAbsoluteDeviationIntAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MedianAbsoluteDeviationIntAggregatorFunction.create(driverContext, channels); } @Override public MedianAbsoluteDeviationIntGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return MedianAbsoluteDeviationIntGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java index 6f0f18c0dea00..b79c1829c2c22 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MedianAbsoluteDeviationIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MedianAbsoluteDeviationIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java index e0ace94a1da49..56d21b5877c73 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link MedianAbsoluteDeviationLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MedianAbsoluteDeviationLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java index aa79691ba220e..25af01363494d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java @@ -12,23 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MedianAbsoluteDeviationLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MedianAbsoluteDeviationLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MedianAbsoluteDeviationLongAggregatorFunctionSupplier() { + } - public MedianAbsoluteDeviationLongAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MedianAbsoluteDeviationLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MedianAbsoluteDeviationLongGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MedianAbsoluteDeviationLongAggregatorFunction aggregator(DriverContext driverContext) { + public MedianAbsoluteDeviationLongAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MedianAbsoluteDeviationLongAggregatorFunction.create(driverContext, channels); } @Override public MedianAbsoluteDeviationLongGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return MedianAbsoluteDeviationLongGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java index 910678df997d1..acefb4a683811 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MedianAbsoluteDeviationLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MedianAbsoluteDeviationLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunction.java index 4d91d3794aecb..c6645a1945783 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunction.java @@ -18,7 +18,7 @@ /** * {@link AggregatorFunction} implementation for {@link MinBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MinBooleanAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionSupplier.java index f66dc6e67e0fd..53f80570e3976 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MinBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MinBooleanAggregatorFunctionSupplier() { + } - public MinBooleanAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MinBooleanAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MinBooleanGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MinBooleanAggregatorFunction aggregator(DriverContext driverContext) { + public MinBooleanAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MinBooleanAggregatorFunction.create(driverContext, channels); } @Override - public MinBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MinBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MinBooleanGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanGroupingAggregatorFunction.java index 71e636001cd5f..ffad2a58c5d41 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MinBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MinBooleanGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunction.java index 01ee21f82ab53..5a421b5c1fe4f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link AggregatorFunction} implementation for {@link MinBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MinBytesRefAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionSupplier.java index cb6ab0d06d401..2588947976980 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MinBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MinBytesRefAggregatorFunctionSupplier() { + } - public MinBytesRefAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MinBytesRefAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MinBytesRefGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MinBytesRefAggregatorFunction aggregator(DriverContext driverContext) { + public MinBytesRefAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MinBytesRefAggregatorFunction.create(driverContext, channels); } @Override - public MinBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MinBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MinBytesRefGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunction.java index 1650c6c513fdd..01adcea310b46 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MinBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MinBytesRefGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java index a436cdcdbef6d..a92183e3e7af7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link MinDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MinDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java index f91cdfe54c89e..3af1017b5de2c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MinDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MinDoubleAggregatorFunctionSupplier() { + } - public MinDoubleAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MinDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MinDoubleGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MinDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public MinDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MinDoubleAggregatorFunction.create(driverContext, channels); } @Override - public MinDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MinDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MinDoubleGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java index 729c77a225049..5c19d45fa16b8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MinDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MinDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunction.java index ec6757e59d074..439f463ce5665 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link MinFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MinFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionSupplier.java index a8ccc70f9996a..c120706ebba29 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MinFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MinFloatAggregatorFunctionSupplier() { + } - public MinFloatAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MinFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MinFloatGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MinFloatAggregatorFunction aggregator(DriverContext driverContext) { + public MinFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return MinFloatAggregatorFunction.create(driverContext, channels); } @Override - public MinFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MinFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MinFloatGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunction.java index d3d59935e62d5..e92d9b44d18c8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MinFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MinFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java index f76dcec81d871..4fbb44cca85a5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link MinIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MinIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java index e09102a1a88ec..c44b47bad0cfa 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java @@ -12,22 +12,30 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MinIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MinIntAggregatorFunctionSupplier() { + } - public MinIntAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MinIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MinIntGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MinIntAggregatorFunction aggregator(DriverContext driverContext) { + public MinIntAggregatorFunction aggregator(DriverContext driverContext, List channels) { return MinIntAggregatorFunction.create(driverContext, channels); } @Override - public MinIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MinIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MinIntGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java index 7095608ca50cc..7cf3a99f15e2e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MinIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MinIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunction.java index 795299d9332fc..ec20f57699760 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link AggregatorFunction} implementation for {@link MinIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MinIpAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionSupplier.java index 591a8501f874d..a00ebdb43e1ac 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionSupplier.java @@ -12,22 +12,30 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MinIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinIpAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MinIpAggregatorFunctionSupplier() { + } - public MinIpAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MinIpAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MinIpGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MinIpAggregatorFunction aggregator(DriverContext driverContext) { + public MinIpAggregatorFunction aggregator(DriverContext driverContext, List channels) { return MinIpAggregatorFunction.create(driverContext, channels); } @Override - public MinIpGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MinIpGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MinIpGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunction.java index 0cb4154009a90..abb8e1cd89fcd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MinIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MinIpGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java index 4fc968bab2eff..20852b134ea32 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link MinLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MinLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java index 85805767c9168..850ae6284e0f5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java @@ -12,22 +12,30 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MinLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public MinLongAggregatorFunctionSupplier() { + } - public MinLongAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return MinLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return MinLongGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public MinLongAggregatorFunction aggregator(DriverContext driverContext) { + public MinLongAggregatorFunction aggregator(DriverContext driverContext, List channels) { return MinLongAggregatorFunction.create(driverContext, channels); } @Override - public MinLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public MinLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return MinLongGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java index 0498c4b8d866b..87971c66fcda8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MinLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MinLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java index 9ece01135e0a9..51cbd31e56b85 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link PercentileDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class PercentileDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java index 996cebd805aa8..bd50841421a6a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java @@ -12,26 +12,34 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link PercentileDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class PercentileDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final double percentile; - public PercentileDoubleAggregatorFunctionSupplier(List channels, double percentile) { - this.channels = channels; + public PercentileDoubleAggregatorFunctionSupplier(double percentile) { this.percentile = percentile; } @Override - public PercentileDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return PercentileDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return PercentileDoubleGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public PercentileDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return PercentileDoubleAggregatorFunction.create(driverContext, channels, percentile); } @Override - public PercentileDoubleGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + public PercentileDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return PercentileDoubleGroupingAggregatorFunction.create(channels, driverContext, percentile); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java index 7767f4228bdcb..d455c4ad0a15a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link PercentileDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class PercentileDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunction.java index 434989adf47b2..b4b36ec039b72 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link PercentileFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class PercentileFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionSupplier.java index 1d1678f15448c..d705ad8da70d8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionSupplier.java @@ -12,25 +12,34 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link PercentileFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class PercentileFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final double percentile; - public PercentileFloatAggregatorFunctionSupplier(List channels, double percentile) { - this.channels = channels; + public PercentileFloatAggregatorFunctionSupplier(double percentile) { this.percentile = percentile; } @Override - public PercentileFloatAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return PercentileFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return PercentileFloatGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public PercentileFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return PercentileFloatAggregatorFunction.create(driverContext, channels, percentile); } @Override - public PercentileFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public PercentileFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return PercentileFloatGroupingAggregatorFunction.create(channels, driverContext, percentile); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunction.java index b65876df06031..5b2649a57167a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link PercentileFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class PercentileFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java index eb4ae96f5dea5..730c27ea27bf7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link PercentileIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class PercentileIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java index 7ce62dd7b600b..d925ef91ed6ef 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java @@ -12,25 +12,34 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link PercentileIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class PercentileIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final double percentile; - public PercentileIntAggregatorFunctionSupplier(List channels, double percentile) { - this.channels = channels; + public PercentileIntAggregatorFunctionSupplier(double percentile) { this.percentile = percentile; } @Override - public PercentileIntAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return PercentileIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return PercentileIntGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public PercentileIntAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return PercentileIntAggregatorFunction.create(driverContext, channels, percentile); } @Override - public PercentileIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public PercentileIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return PercentileIntGroupingAggregatorFunction.create(channels, driverContext, percentile); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java index dbbb85b4728f4..2aef3e76b00f4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link PercentileIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class PercentileIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java index 837f7efb32441..510b69cb6fc89 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link PercentileLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class PercentileLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java index 7e32bfc9d9937..36d2ed23cee94 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java @@ -12,25 +12,34 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link PercentileLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class PercentileLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final double percentile; - public PercentileLongAggregatorFunctionSupplier(List channels, double percentile) { - this.channels = channels; + public PercentileLongAggregatorFunctionSupplier(double percentile) { this.percentile = percentile; } @Override - public PercentileLongAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return PercentileLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return PercentileLongGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public PercentileLongAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return PercentileLongAggregatorFunction.create(driverContext, channels, percentile); } @Override - public PercentileLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public PercentileLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return PercentileLongGroupingAggregatorFunction.create(channels, driverContext, percentile); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java index 57934ba6e6fe9..3330e630235da 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link PercentileLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class PercentileLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java index 8806e1ed865c2..d2dd780bf43a5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java @@ -12,25 +12,33 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link RateDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class RateDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final long unitInMillis; - public RateDoubleAggregatorFunctionSupplier(List channels, long unitInMillis) { - this.channels = channels; + public RateDoubleAggregatorFunctionSupplier(long unitInMillis) { this.unitInMillis = unitInMillis; } @Override - public AggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + throw new UnsupportedOperationException("non-grouping aggregator is not supported"); + } + + @Override + public List groupingIntermediateStateDesc() { + return RateDoubleGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { throw new UnsupportedOperationException("non-grouping aggregator is not supported"); } @Override - public RateDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public RateDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return RateDoubleGroupingAggregatorFunction.create(channels, driverContext, unitInMillis); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java index 2fca5c1d19c5e..5a2f4203cf49f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link RateDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class RateDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatAggregatorFunctionSupplier.java index 4b1546314a9cb..be8456b28b3fe 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatAggregatorFunctionSupplier.java @@ -12,25 +12,33 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link RateFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class RateFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final long unitInMillis; - public RateFloatAggregatorFunctionSupplier(List channels, long unitInMillis) { - this.channels = channels; + public RateFloatAggregatorFunctionSupplier(long unitInMillis) { this.unitInMillis = unitInMillis; } @Override - public AggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + throw new UnsupportedOperationException("non-grouping aggregator is not supported"); + } + + @Override + public List groupingIntermediateStateDesc() { + return RateFloatGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { throw new UnsupportedOperationException("non-grouping aggregator is not supported"); } @Override - public RateFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public RateFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return RateFloatGroupingAggregatorFunction.create(channels, driverContext, unitInMillis); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatGroupingAggregatorFunction.java index 628503f12900e..1048c93223b9e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatGroupingAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link RateFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class RateFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java index a98f0217ef90e..c9c6ce5a55bed 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java @@ -12,25 +12,33 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link RateIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class RateIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final long unitInMillis; - public RateIntAggregatorFunctionSupplier(List channels, long unitInMillis) { - this.channels = channels; + public RateIntAggregatorFunctionSupplier(long unitInMillis) { this.unitInMillis = unitInMillis; } @Override - public AggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + throw new UnsupportedOperationException("non-grouping aggregator is not supported"); + } + + @Override + public List groupingIntermediateStateDesc() { + return RateIntGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { throw new UnsupportedOperationException("non-grouping aggregator is not supported"); } @Override - public RateIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public RateIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return RateIntGroupingAggregatorFunction.create(channels, driverContext, unitInMillis); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java index 2f030544da612..00ea9986de165 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link RateIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class RateIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java index b8100dbbe4455..a1f503b726aa4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java @@ -12,25 +12,33 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link RateLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class RateLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final long unitInMillis; - public RateLongAggregatorFunctionSupplier(List channels, long unitInMillis) { - this.channels = channels; + public RateLongAggregatorFunctionSupplier(long unitInMillis) { this.unitInMillis = unitInMillis; } @Override - public AggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + throw new UnsupportedOperationException("non-grouping aggregator is not supported"); + } + + @Override + public List groupingIntermediateStateDesc() { + return RateLongGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { throw new UnsupportedOperationException("non-grouping aggregator is not supported"); } @Override - public RateLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public RateLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return RateLongGroupingAggregatorFunction.create(channels, driverContext, unitInMillis); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java index fd272e47fa6a3..30cc10bf67077 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link RateLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class RateLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunction.java index dd6cc89401a99..3b08c42b7acbf 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link AggregatorFunction} implementation for {@link StdDevDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class StdDevDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java index 313eed4ae97ae..5310a11c1fddb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link StdDevDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class StdDevDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public StdDevDoubleAggregatorFunctionSupplier() { + } - public StdDevDoubleAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return StdDevDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return StdDevDoubleGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public StdDevDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public StdDevDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return StdDevDoubleAggregatorFunction.create(driverContext, channels); } @Override - public StdDevDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public StdDevDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return StdDevDoubleGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java index da49c254e353a..d15c35f9324a9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link StdDevDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class StdDevDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunction.java index bf8c4854f6b93..4f2937c2db07d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link AggregatorFunction} implementation for {@link StdDevFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class StdDevFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java index 25dfa54895eda..52ffb0f5d580d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link StdDevFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class StdDevFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public StdDevFloatAggregatorFunctionSupplier() { + } - public StdDevFloatAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return StdDevFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return StdDevFloatGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public StdDevFloatAggregatorFunction aggregator(DriverContext driverContext) { + public StdDevFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return StdDevFloatAggregatorFunction.create(driverContext, channels); } @Override - public StdDevFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public StdDevFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return StdDevFloatGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java index bf994aaf2840e..ec7a319cd0752 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link StdDevFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class StdDevFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunction.java index 4a5585a7dd454..9fe0798f53d76 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link AggregatorFunction} implementation for {@link StdDevIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class StdDevIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java index 5a762d6606a25..2f43a867bf83e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link StdDevIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class StdDevIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public StdDevIntAggregatorFunctionSupplier() { + } - public StdDevIntAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return StdDevIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return StdDevIntGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public StdDevIntAggregatorFunction aggregator(DriverContext driverContext) { + public StdDevIntAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return StdDevIntAggregatorFunction.create(driverContext, channels); } @Override - public StdDevIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public StdDevIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return StdDevIntGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java index 139cc24d3541f..747d0a53c139c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link StdDevIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class StdDevIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunction.java index b5ed31116a90c..60cee4bca2c14 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link AggregatorFunction} implementation for {@link StdDevLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class StdDevLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java index 09b996201ef16..364fc4820c283 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link StdDevLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class StdDevLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public StdDevLongAggregatorFunctionSupplier() { + } - public StdDevLongAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return StdDevLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return StdDevLongGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public StdDevLongAggregatorFunction aggregator(DriverContext driverContext) { + public StdDevLongAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return StdDevLongAggregatorFunction.create(driverContext, channels); } @Override - public StdDevLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public StdDevLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return StdDevLongGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java index da7a5f4bdea0d..dff7e3a204732 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link StdDevLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class StdDevLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java index 4d24579203df1..70c3191a2236b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link SumDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SumDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java index b68bed30013c6..6fbe13d696ec9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SumDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SumDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SumDoubleAggregatorFunctionSupplier() { + } - public SumDoubleAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SumDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SumDoubleGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SumDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public SumDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return SumDoubleAggregatorFunction.create(driverContext, channels); } @Override - public SumDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public SumDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return SumDoubleGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java index 71b282c58aca2..ca7b452e121d0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SumDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SumDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunction.java index 50f41b5edc05f..d2259391a2b43 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link SumFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SumFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionSupplier.java index 515122ec08ac0..9b9d863fc8171 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SumFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SumFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SumFloatAggregatorFunctionSupplier() { + } - public SumFloatAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SumFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SumFloatGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SumFloatAggregatorFunction aggregator(DriverContext driverContext) { + public SumFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return SumFloatAggregatorFunction.create(driverContext, channels); } @Override - public SumFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public SumFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return SumFloatGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunction.java index 664f616acee9d..ea5a876b2432b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SumFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SumFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java index 95bd95ac474ad..6d645c5e5c1ce 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link SumIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SumIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java index dcb48944dc557..ef48162d214b6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java @@ -12,22 +12,30 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SumIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SumIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SumIntAggregatorFunctionSupplier() { + } - public SumIntAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SumIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SumIntGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SumIntAggregatorFunction aggregator(DriverContext driverContext) { + public SumIntAggregatorFunction aggregator(DriverContext driverContext, List channels) { return SumIntAggregatorFunction.create(driverContext, channels); } @Override - public SumIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public SumIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return SumIntGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java index 2f369374d8cdb..d734f42df7038 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SumIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SumIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java index fac21d99bf713..5852a590d265d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link SumLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SumLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java index b4d36aa526075..fe666c535f63a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java @@ -12,22 +12,30 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SumLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SumLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SumLongAggregatorFunctionSupplier() { + } - public SumLongAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SumLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SumLongGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SumLongAggregatorFunction aggregator(DriverContext driverContext) { + public SumLongAggregatorFunction aggregator(DriverContext driverContext, List channels) { return SumLongAggregatorFunction.create(driverContext, channels); } @Override - public SumLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public SumLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return SumLongGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java index c8c0990de4e54..a2feb10cff580 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SumLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SumLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunction.java index b8d06787f7f68..c3424b6c982b5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunction.java @@ -18,7 +18,7 @@ /** * {@link AggregatorFunction} implementation for {@link TopBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class TopBooleanAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionSupplier.java index 74beed084543f..5ee84e44bad68 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionSupplier.java @@ -12,29 +12,37 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link TopBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int limit; private final boolean ascending; - public TopBooleanAggregatorFunctionSupplier(List channels, int limit, - boolean ascending) { - this.channels = channels; + public TopBooleanAggregatorFunctionSupplier(int limit, boolean ascending) { this.limit = limit; this.ascending = ascending; } @Override - public TopBooleanAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return TopBooleanAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return TopBooleanGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public TopBooleanAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return TopBooleanAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public TopBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return TopBooleanGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanGroupingAggregatorFunction.java index cd35595eeadb0..108e0a0704ec8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link TopBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class TopBooleanGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunction.java index 9ef460be5796b..a0927f5835a8d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link TopBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class TopBytesRefAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionSupplier.java index 8c77d2116bf69..89417c4a98ad6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionSupplier.java @@ -12,29 +12,37 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link TopBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int limit; private final boolean ascending; - public TopBytesRefAggregatorFunctionSupplier(List channels, int limit, - boolean ascending) { - this.channels = channels; + public TopBytesRefAggregatorFunctionSupplier(int limit, boolean ascending) { this.limit = limit; this.ascending = ascending; } @Override - public TopBytesRefAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return TopBytesRefAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return TopBytesRefGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public TopBytesRefAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return TopBytesRefAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public TopBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return TopBytesRefGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunction.java index aa2d6094c8c3f..45514ee343668 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link TopBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class TopBytesRefGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java index 210bc76483a81..ca80c91c58cb0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java @@ -19,7 +19,7 @@ /** * {@link AggregatorFunction} implementation for {@link TopDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class TopDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java index 36a8763b4a870..0aa10d3cc48ed 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java @@ -12,28 +12,37 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link TopDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int limit; private final boolean ascending; - public TopDoubleAggregatorFunctionSupplier(List channels, int limit, boolean ascending) { - this.channels = channels; + public TopDoubleAggregatorFunctionSupplier(int limit, boolean ascending) { this.limit = limit; this.ascending = ascending; } @Override - public TopDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return TopDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return TopDoubleGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public TopDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return TopDoubleAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public TopDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return TopDoubleGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java index 6b76ff7772ad1..956c7b8ae5595 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link TopDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class TopDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java index f7fdb406acadb..b569590b781bd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java @@ -19,7 +19,7 @@ /** * {@link AggregatorFunction} implementation for {@link TopFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class TopFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java index e01df8329a315..52e2dbc304955 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java @@ -12,28 +12,37 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link TopFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int limit; private final boolean ascending; - public TopFloatAggregatorFunctionSupplier(List channels, int limit, boolean ascending) { - this.channels = channels; + public TopFloatAggregatorFunctionSupplier(int limit, boolean ascending) { this.limit = limit; this.ascending = ascending; } @Override - public TopFloatAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return TopFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return TopFloatGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public TopFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return TopFloatAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public TopFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return TopFloatGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java index ffaf858645440..712277798e65c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link TopFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class TopFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java index 1ea40134f7260..0d279e672dd9d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java @@ -19,7 +19,7 @@ /** * {@link AggregatorFunction} implementation for {@link TopIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class TopIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java index 4481f2d5afaa8..88919a4b25ce4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java @@ -12,28 +12,36 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link TopIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int limit; private final boolean ascending; - public TopIntAggregatorFunctionSupplier(List channels, int limit, boolean ascending) { - this.channels = channels; + public TopIntAggregatorFunctionSupplier(int limit, boolean ascending) { this.limit = limit; this.ascending = ascending; } @Override - public TopIntAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return TopIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return TopIntGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public TopIntAggregatorFunction aggregator(DriverContext driverContext, List channels) { return TopIntAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public TopIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return TopIntGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java index a3453126e055e..0581c0f244964 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java @@ -18,7 +18,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link TopIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class TopIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunction.java index 8c216c90504c1..e723dc2451b5e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link TopIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class TopIpAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionSupplier.java index 8f630c0306170..461f9809b673e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionSupplier.java @@ -12,28 +12,36 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link TopIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopIpAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int limit; private final boolean ascending; - public TopIpAggregatorFunctionSupplier(List channels, int limit, boolean ascending) { - this.channels = channels; + public TopIpAggregatorFunctionSupplier(int limit, boolean ascending) { this.limit = limit; this.ascending = ascending; } @Override - public TopIpAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return TopIpAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return TopIpGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public TopIpAggregatorFunction aggregator(DriverContext driverContext, List channels) { return TopIpAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopIpGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public TopIpGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return TopIpGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunction.java index 74a6987962b78..bfcdadfa54814 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link TopIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class TopIpGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java index 85df0f7edc843..30f115c85ba1b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java @@ -19,7 +19,7 @@ /** * {@link AggregatorFunction} implementation for {@link TopLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class TopLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java index 1a39c7b5580ec..cefd6082c22ec 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java @@ -12,28 +12,36 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link TopLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; - private final int limit; private final boolean ascending; - public TopLongAggregatorFunctionSupplier(List channels, int limit, boolean ascending) { - this.channels = channels; + public TopLongAggregatorFunctionSupplier(int limit, boolean ascending) { this.limit = limit; this.ascending = ascending; } @Override - public TopLongAggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return TopLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return TopLongGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public TopLongAggregatorFunction aggregator(DriverContext driverContext, List channels) { return TopLongAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public TopLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return TopLongGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java index b4a4b7154e626..d5ebeca1f174c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link TopLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class TopLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java index abf73c07d4ab6..d3dfad9786561 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java @@ -18,7 +18,7 @@ /** * {@link AggregatorFunction} implementation for {@link ValuesBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class ValuesBooleanAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java index 20d5a5fda7726..80279a8d6c731 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link ValuesBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public ValuesBooleanAggregatorFunctionSupplier() { + } - public ValuesBooleanAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return ValuesBooleanAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return ValuesBooleanGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public ValuesBooleanAggregatorFunction aggregator(DriverContext driverContext) { + public ValuesBooleanAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return ValuesBooleanAggregatorFunction.create(driverContext, channels); } @Override - public ValuesBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public ValuesBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return ValuesBooleanGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java index b51da118e0f8d..81da1f65e9bee 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link ValuesBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class ValuesBooleanGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java index ecc6424ba8501..6a4d7ad713f46 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link ValuesBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class ValuesBytesRefAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java index 31de817edf868..16fa41876122a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link ValuesBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public ValuesBytesRefAggregatorFunctionSupplier() { + } - public ValuesBytesRefAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return ValuesBytesRefAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return ValuesBytesRefGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public ValuesBytesRefAggregatorFunction aggregator(DriverContext driverContext) { + public ValuesBytesRefAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return ValuesBytesRefAggregatorFunction.create(driverContext, channels); } @Override - public ValuesBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public ValuesBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return ValuesBytesRefGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java index bdce606f92168..6db44ffce8faf 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link ValuesBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class ValuesBytesRefGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java index 2fa8ed31ec427..17252bd237f49 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java @@ -19,7 +19,7 @@ /** * {@link AggregatorFunction} implementation for {@link ValuesDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class ValuesDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java index 049deda37c460..0a70a3d71ef9c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link ValuesDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public ValuesDoubleAggregatorFunctionSupplier() { + } - public ValuesDoubleAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return ValuesDoubleAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return ValuesDoubleGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public ValuesDoubleAggregatorFunction aggregator(DriverContext driverContext) { + public ValuesDoubleAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return ValuesDoubleAggregatorFunction.create(driverContext, channels); } @Override - public ValuesDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public ValuesDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return ValuesDoubleGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java index 5b8c2ac802663..893d8fcd2ea5d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link ValuesDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class ValuesDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunction.java index 8b61c6d07eed6..55c58fbc2af16 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunction.java @@ -19,7 +19,7 @@ /** * {@link AggregatorFunction} implementation for {@link ValuesFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class ValuesFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionSupplier.java index b4b0c2f1a0444..f8c395b01b5ce 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link ValuesFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public ValuesFloatAggregatorFunctionSupplier() { + } - public ValuesFloatAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return ValuesFloatAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return ValuesFloatGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public ValuesFloatAggregatorFunction aggregator(DriverContext driverContext) { + public ValuesFloatAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return ValuesFloatAggregatorFunction.create(driverContext, channels); } @Override - public ValuesFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public ValuesFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return ValuesFloatGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunction.java index f50c5a67d15a5..8afd75384aa87 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link ValuesFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class ValuesFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java index 7f12bbc18b202..a65e8b1db8fe7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java @@ -19,7 +19,7 @@ /** * {@link AggregatorFunction} implementation for {@link ValuesIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class ValuesIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java index 21402b5913813..f4aa9722bff7a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link ValuesIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public ValuesIntAggregatorFunctionSupplier() { + } - public ValuesIntAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return ValuesIntAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return ValuesIntGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public ValuesIntAggregatorFunction aggregator(DriverContext driverContext) { + public ValuesIntAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return ValuesIntAggregatorFunction.create(driverContext, channels); } @Override - public ValuesIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public ValuesIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return ValuesIntGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java index c90fcedb291cf..468320a69fc98 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java @@ -18,7 +18,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link ValuesIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class ValuesIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java index 7e8c256d90f93..7781392f8c29c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java @@ -19,7 +19,7 @@ /** * {@link AggregatorFunction} implementation for {@link ValuesLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class ValuesLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java index a025bd0ade17a..9f3bbf2b3122a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java @@ -12,22 +12,31 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link ValuesLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public ValuesLongAggregatorFunctionSupplier() { + } - public ValuesLongAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return ValuesLongAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return ValuesLongGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public ValuesLongAggregatorFunction aggregator(DriverContext driverContext) { + public ValuesLongAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return ValuesLongAggregatorFunction.create(driverContext, channels); } @Override - public ValuesLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public ValuesLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, + List channels) { return ValuesLongGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java index 8a79cd7d942ee..cc6e7121c5afb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link ValuesLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class ValuesLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunction.java index a205c728db5fc..5beffde65284e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialCentroidCartesianPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialCentroidCartesianPointDocValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier.java index 3228340beeb43..593e7c9d42916 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier.java @@ -9,28 +9,36 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialCentroidCartesianPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier() { + } - public SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialCentroidCartesianPointDocValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override public SpatialCentroidCartesianPointDocValuesAggregatorFunction aggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialCentroidCartesianPointDocValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java index cc2fb38bb925c..80c5643ea0a6c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java @@ -25,7 +25,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialCentroidCartesianPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunction.java index e20a3fb1cfa35..fd5b519bc3d53 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunction.java @@ -26,7 +26,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialCentroidCartesianPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialCentroidCartesianPointSourceValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier.java index 9b0d7c5f64cd7..8ae5fc6180d97 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier.java @@ -9,29 +9,36 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialCentroidCartesianPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier() { + } - public SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier( - List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialCentroidCartesianPointSourceValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override public SpatialCentroidCartesianPointSourceValuesAggregatorFunction aggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialCentroidCartesianPointSourceValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java index 6ae2b444efe98..f767d5e39d1d1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java @@ -28,7 +28,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialCentroidCartesianPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunction.java index b2c237a904796..22e3a6fa69801 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialCentroidGeoPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialCentroidGeoPointDocValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier.java index 46c2777e8c77a..ae38d6d91ab82 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier.java @@ -9,28 +9,36 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialCentroidGeoPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier() { + } - public SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialCentroidGeoPointDocValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SpatialCentroidGeoPointDocValuesAggregatorFunction aggregator( - DriverContext driverContext) { + public SpatialCentroidGeoPointDocValuesAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return SpatialCentroidGeoPointDocValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java index 0cce9b7cf1cd5..ce99c9086cca3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java @@ -25,7 +25,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialCentroidGeoPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunction.java index db61420fb8cbe..f583accfb9f71 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunction.java @@ -26,7 +26,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialCentroidGeoPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialCentroidGeoPointSourceValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier.java index 02b975f03890f..e10e2b50ef615 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier.java @@ -9,28 +9,36 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialCentroidGeoPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier() { + } - public SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialCentroidGeoPointSourceValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override public SpatialCentroidGeoPointSourceValuesAggregatorFunction aggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialCentroidGeoPointSourceValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java index 6c4d6635846df..bb07444e913ae 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java @@ -28,7 +28,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialCentroidGeoPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunction.java index 21306036fbf50..18d466442f750 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialExtentCartesianPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialExtentCartesianPointDocValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java index 751ea3b4c4a9d..0d41ea3bf7e80 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java @@ -9,28 +9,36 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentCartesianPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier() { + } - public SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialExtentCartesianPointDocValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override public SpatialExtentCartesianPointDocValuesAggregatorFunction aggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentCartesianPointDocValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java index a5191e57959b8..c528cef4d3863 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentCartesianPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunction.java index 6610168e1df21..e7068b708a984 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialExtentCartesianPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialExtentCartesianPointSourceValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java index 7f4d1d69ae928..1cccb66bfa0ea 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java @@ -9,28 +9,36 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentCartesianPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier() { + } - public SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialExtentCartesianPointSourceValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override public SpatialExtentCartesianPointSourceValuesAggregatorFunction aggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentCartesianPointSourceValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java index 4e06158952fc3..7ee441fe88f16 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentCartesianPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunction.java index 3471aafc3a53b..4d1bd972434b1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialExtentCartesianShapeDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialExtentCartesianShapeDocValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier.java index b53d779912fc9..40432cfb548e1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier.java @@ -9,28 +9,36 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentCartesianShapeDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier() { + } - public SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialExtentCartesianShapeDocValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override public SpatialExtentCartesianShapeDocValuesAggregatorFunction aggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentCartesianShapeDocValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction.java index aa3c1a7ba56ae..48161b3ea4bf3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentCartesianShapeDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunction.java index 014a2d454f576..62440eba29355 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialExtentCartesianShapeSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialExtentCartesianShapeSourceValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier.java index c8b1372d44b68..dfb2aaee9aff9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier.java @@ -9,28 +9,36 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentCartesianShapeSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier() { + } - public SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialExtentCartesianShapeSourceValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override public SpatialExtentCartesianShapeSourceValuesAggregatorFunction aggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentCartesianShapeSourceValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction.java index d932038a26ec7..77893dd350b86 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentCartesianShapeSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunction.java index 4e76d3dbe0298..49b9ca1bad69d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialExtentGeoPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialExtentGeoPointDocValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java index f72a4cc648ec8..1f2dfb378498f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java @@ -9,27 +9,36 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentGeoPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier() { + } - public SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialExtentGeoPointDocValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SpatialExtentGeoPointDocValuesAggregatorFunction aggregator(DriverContext driverContext) { + public SpatialExtentGeoPointDocValuesAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return SpatialExtentGeoPointDocValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialExtentGeoPointDocValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java index 9a97a37b22ca1..235bd10c3e8e2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentGeoPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialExtentGeoPointDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunction.java index 05bcc79db4f34..7d31bea612321 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialExtentGeoPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialExtentGeoPointSourceValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java index 1af20d72d08b0..e97d858511c04 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java @@ -9,28 +9,36 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentGeoPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier() { + } - public SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialExtentGeoPointSourceValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SpatialExtentGeoPointSourceValuesAggregatorFunction aggregator( - DriverContext driverContext) { + public SpatialExtentGeoPointSourceValuesAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return SpatialExtentGeoPointSourceValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java index 1231e24382887..d1c715d5b5f35 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentGeoPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunction.java index fefef6edf6dc7..d181ae1305c7e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialExtentGeoShapeDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialExtentGeoShapeDocValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier.java index d104c74bc5072..9582411551572 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier.java @@ -9,27 +9,36 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentGeoShapeDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier() { + } - public SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialExtentGeoShapeDocValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SpatialExtentGeoShapeDocValuesAggregatorFunction aggregator(DriverContext driverContext) { + public SpatialExtentGeoShapeDocValuesAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return SpatialExtentGeoShapeDocValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction.java index 7d286eba12ffc..00df4fe3282e6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentGeoShapeDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunction.java index a16f8911d7816..b72a8f23eb5ca 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialExtentGeoShapeSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialExtentGeoShapeSourceValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier.java index 1eeb17367d852..be425646e90ed 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier.java @@ -9,28 +9,36 @@ import java.lang.String; import java.util.List; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentGeoShapeSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final List channels; + public SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier() { + } - public SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier(List channels) { - this.channels = channels; + @Override + public List nonGroupingIntermediateStateDesc() { + return SpatialExtentGeoShapeSourceValuesAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.intermediateStateDesc(); } @Override - public SpatialExtentGeoShapeSourceValuesAggregatorFunction aggregator( - DriverContext driverContext) { + public SpatialExtentGeoShapeSourceValuesAggregatorFunction aggregator(DriverContext driverContext, + List channels) { return SpatialExtentGeoShapeSourceValuesAggregatorFunction.create(driverContext, channels); } @Override public SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { + DriverContext driverContext, List channels) { return SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.create(channels, driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.java index 8c768496e5905..c9be8deaf649c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentGeoShapeSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/EsqlRefCountingListener.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/EsqlRefCountingListener.java index 69df0fb8ceff1..2dfc60744be2e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/EsqlRefCountingListener.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/EsqlRefCountingListener.java @@ -34,7 +34,8 @@ public EsqlRefCountingListener(ActionListener delegate) { } public ActionListener acquire() { - return refs.acquireListener().delegateResponse((l, e) -> { + var listener = ActionListener.assertAtLeastOnce(refs.acquireListener()); + return listener.delegateResponse((l, e) -> { failureCollector.unwrapAndCollect(e); l.onFailure(e); }); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AbstractArrayState.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AbstractArrayState.java index 5fa1394e8cf96..9886e0c1af306 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AbstractArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AbstractArrayState.java @@ -37,6 +37,7 @@ public boolean hasValue(int groupId) { * idempotent and fast if already tracking so it's safe to, say, call it once * for every block of values that arrives containing {@code null}. */ + @Override public final void enableGroupIdTracking(SeenGroupIds seenGroupIds) { if (seen == null) { seen = seenGroupIds.seenGroupIds(bigArrays); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AggregatorFunctionSupplier.java index 9f2395960477d..e192d1b2de7f8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AggregatorFunctionSupplier.java @@ -10,19 +10,25 @@ import org.elasticsearch.compute.Describable; import org.elasticsearch.compute.operator.DriverContext; +import java.util.List; + /** * Builds aggregation implementations, closing over any state required to do so. */ public interface AggregatorFunctionSupplier extends Describable { - AggregatorFunction aggregator(DriverContext driverContext); + List nonGroupingIntermediateStateDesc(); + + List groupingIntermediateStateDesc(); + + AggregatorFunction aggregator(DriverContext driverContext, List channels); - GroupingAggregatorFunction groupingAggregator(DriverContext driverContext); + GroupingAggregatorFunction groupingAggregator(DriverContext driverContext, List channels); - default Aggregator.Factory aggregatorFactory(AggregatorMode mode) { + default Aggregator.Factory aggregatorFactory(AggregatorMode mode, List channels) { return new Aggregator.Factory() { @Override public Aggregator apply(DriverContext driverContext) { - return new Aggregator(aggregator(driverContext), mode); + return new Aggregator(aggregator(driverContext, channels), mode); } @Override @@ -32,11 +38,11 @@ public String describe() { }; } - default GroupingAggregator.Factory groupingAggregatorFactory(AggregatorMode mode) { + default GroupingAggregator.Factory groupingAggregatorFactory(AggregatorMode mode, List channels) { return new GroupingAggregator.Factory() { @Override public GroupingAggregator apply(DriverContext driverContext) { - return new GroupingAggregator(groupingAggregator(driverContext), mode); + return new GroupingAggregator(groupingAggregator(driverContext, channels), mode); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/BytesRefArrayState.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/BytesRefArrayState.java index eb0a992c8610f..18b92c5447076 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/BytesRefArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/BytesRefArrayState.java @@ -138,7 +138,8 @@ boolean hasValue(int groupId) { * stores a flag to know if optimizations can be made. *

*/ - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { this.groupIdTrackingEnabled = true; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountAggregatorFunction.java index c6416f6d075db..a9d21babfbd9c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountAggregatorFunction.java @@ -19,15 +19,25 @@ import java.util.List; public class CountAggregatorFunction implements AggregatorFunction { - public static AggregatorFunctionSupplier supplier(List channels) { + public static AggregatorFunctionSupplier supplier() { return new AggregatorFunctionSupplier() { @Override - public AggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return CountAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return CountGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { return CountAggregatorFunction.create(channels); } @Override - public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext, List channels) { return CountGroupingAggregatorFunction.create(driverContext, channels); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionSupplier.java index ed63a283b3568..eab897fe24fc2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionSupplier.java @@ -11,6 +11,8 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import java.util.List; + /** * A {@link AggregatorFunctionSupplier} that wraps another, filtering which positions * are supplied to the aggregator. @@ -20,8 +22,18 @@ public record FilteredAggregatorFunctionSupplier(AggregatorFunctionSupplier next AggregatorFunctionSupplier { @Override - public AggregatorFunction aggregator(DriverContext driverContext) { - AggregatorFunction next = this.next.aggregator(driverContext); + public List nonGroupingIntermediateStateDesc() { + return next.nonGroupingIntermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return next.groupingIntermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { + AggregatorFunction next = this.next.aggregator(driverContext, channels); EvalOperator.ExpressionEvaluator filter = null; try { filter = this.filter.get(driverContext); @@ -35,8 +47,8 @@ public AggregatorFunction aggregator(DriverContext driverContext) { } @Override - public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { - GroupingAggregatorFunction next = this.next.groupingAggregator(driverContext); + public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext, List channels) { + GroupingAggregatorFunction next = this.next.groupingAggregator(driverContext, channels); EvalOperator.ExpressionEvaluator filter = null; try { filter = this.filter.get(driverContext); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregatorState.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregatorState.java index 7c644342598dc..0e65164665808 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregatorState.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/GroupingAggregatorState.java @@ -17,4 +17,5 @@ public interface GroupingAggregatorState extends Releasable { /** Extracts an intermediate view of the contents of this state. */ void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext); + void enableGroupIdTracking(SeenGroupIds seenGroupIds); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/HllStates.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/HllStates.java index 3d8d04d7dc7e3..64a970c2acc07 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/HllStates.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/HllStates.java @@ -138,7 +138,8 @@ static class GroupingState implements GroupingAggregatorState { this.hll = new HyperLogLogPlusPlus(HyperLogLogPlusPlus.precisionFromThreshold(precision), bigArrays, 1); } - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { // Nothing to do } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxBytesRefAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxBytesRefAggregator.java index 144214f93571e..049642c350917 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxBytesRefAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxBytesRefAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -71,7 +70,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(selected, driverContext); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final BytesRefArrayState internalState; private GroupingState(BigArrays bigArrays, CircuitBreaker breaker) { @@ -90,7 +89,8 @@ public void combine(int groupId, GroupingState otherState, int otherGroupId) { } } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { internalState.toIntermediate(blocks, offset, selected, driverContext); } @@ -98,7 +98,8 @@ Block toBlock(IntVector selected, DriverContext driverContext) { return internalState.toValuesBlock(selected, driverContext); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { internalState.enableGroupIdTracking(seen); } @@ -108,7 +109,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final BreakingBytesRefBuilder internalState; private boolean seen; @@ -128,7 +129,8 @@ public void add(BytesRef value) { } } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = driverContext.blockFactory().newConstantBytesRefBlockWith(internalState.bytesRefView(), 1); blocks[offset + 1] = driverContext.blockFactory().newConstantBooleanBlockWith(seen, 1); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxIpAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxIpAggregator.java index 1ddce7674ae7b..43b4a4a2fe0a1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxIpAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxIpAggregator.java @@ -15,7 +15,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @Aggregator({ @IntermediateState(name = "max", type = "BYTES_REF"), @IntermediateState(name = "seen", type = "BOOLEAN") }) @@ -67,7 +66,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(selected, driverContext); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final BytesRef scratch = new BytesRef(); private final IpArrayState internalState; @@ -87,7 +86,8 @@ public void combine(int groupId, GroupingState otherState, int otherGroupId) { } } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { internalState.toIntermediate(blocks, offset, selected, driverContext); } @@ -95,7 +95,8 @@ Block toBlock(IntVector selected, DriverContext driverContext) { return internalState.toValuesBlock(selected, driverContext); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { internalState.enableGroupIdTracking(seen); } @@ -105,7 +106,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final BytesRef internalState; private boolean seen; @@ -121,7 +122,8 @@ public void add(BytesRef value) { } } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = driverContext.blockFactory().newConstantBytesRefBlockWith(internalState, 1); blocks[offset + 1] = driverContext.blockFactory().newConstantBooleanBlockWith(seen, 1); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MinBytesRefAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MinBytesRefAggregator.java index 830900702a371..677b38a9af3a7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MinBytesRefAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MinBytesRefAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -71,7 +70,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(selected, driverContext); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final BytesRefArrayState internalState; private GroupingState(BigArrays bigArrays, CircuitBreaker breaker) { @@ -90,7 +89,8 @@ public void combine(int groupId, GroupingState otherState, int otherGroupId) { } } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { internalState.toIntermediate(blocks, offset, selected, driverContext); } @@ -98,7 +98,8 @@ Block toBlock(IntVector selected, DriverContext driverContext) { return internalState.toValuesBlock(selected, driverContext); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { internalState.enableGroupIdTracking(seen); } @@ -108,7 +109,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final BreakingBytesRefBuilder internalState; private boolean seen; @@ -128,7 +129,8 @@ public void add(BytesRef value) { } } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = driverContext.blockFactory().newConstantBytesRefBlockWith(internalState.bytesRefView(), 1); blocks[offset + 1] = driverContext.blockFactory().newConstantBooleanBlockWith(seen, 1); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MinIpAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MinIpAggregator.java index 8313756851c1f..c4ee93db89cf8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MinIpAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MinIpAggregator.java @@ -15,7 +15,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @Aggregator({ @IntermediateState(name = "max", type = "BYTES_REF"), @IntermediateState(name = "seen", type = "BOOLEAN") }) @@ -67,7 +66,7 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(selected, driverContext); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final BytesRef scratch = new BytesRef(); private final IpArrayState internalState; @@ -87,7 +86,8 @@ public void combine(int groupId, GroupingState otherState, int otherGroupId) { } } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { internalState.toIntermediate(blocks, offset, selected, driverContext); } @@ -95,7 +95,8 @@ Block toBlock(IntVector selected, DriverContext driverContext) { return internalState.toValuesBlock(selected, driverContext); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { internalState.enableGroupIdTracking(seen); } @@ -105,7 +106,7 @@ public void close() { } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final BytesRef internalState; private boolean seen; @@ -121,7 +122,8 @@ public void add(BytesRef value) { } } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = driverContext.blockFactory().newConstantBytesRefBlockWith(internalState, 1); blocks[offset + 1] = driverContext.blockFactory().newConstantBooleanBlockWith(seen, 1); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/QuantileStates.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/QuantileStates.java index 329e798dcb3f0..d5ea72ed23e5e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/QuantileStates.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/QuantileStates.java @@ -146,7 +146,8 @@ void add(int groupId, TDigestState other) { } } - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { // We always enable. } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/StdDevStates.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/StdDevStates.java index bff8903fd3bec..5b48498d83294 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/StdDevStates.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/StdDevStates.java @@ -204,7 +204,8 @@ public void close() { Releasables.close(states); } - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBooleanAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBooleanAggregator.java index 252436ad9634f..e19d3107172e3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBooleanAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBooleanAggregator.java @@ -17,7 +17,6 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -84,11 +83,12 @@ public static Block evaluateFinal(GroupingState state, IntVector selected, Drive return state.toBlock(driverContext.blockFactory(), selected); } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private boolean seenFalse; private boolean seenTrue; - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } @@ -113,14 +113,15 @@ Block toBlock(BlockFactory blockFactory) { public void close() {} } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final BitArray values; private GroupingState(BigArrays bigArrays) { values = new BitArray(1, bigArrays); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -155,7 +156,8 @@ Block toBlock(BlockFactory blockFactory, IntVector selected) { } } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we don't need to track which values have been seen because we don't do anything special for groups without values } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st index ad0ffc1d7e993..58a0a35e79d5d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st @@ -39,7 +39,7 @@ import org.elasticsearch.core.Releasables; * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayState.java.st} instead. *

*/ final class $Type$ArrayState extends AbstractArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-FallibleArrayState.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-FallibleArrayState.java.st index 3c57ab948a79f..5235e308ddf47 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-FallibleArrayState.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-FallibleArrayState.java.st @@ -40,7 +40,7 @@ import org.elasticsearch.core.Releasables; * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleArrayState.java.st} instead. *

*/ final class $Type$FallibleArrayState extends AbstractFallibleArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-FallibleState.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-FallibleState.java.st index 27609383e4f61..9eab53b1edcf2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-FallibleState.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-FallibleState.java.st @@ -13,7 +13,7 @@ import org.elasticsearch.compute.operator.DriverContext; /** * Aggregator state for a single $type$. * It stores a third boolean to store if the aggregation failed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleState.java.st} instead. */ final class $Type$FallibleState implements AggregatorState { private $type$ value; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st index 2581d3ebbf80b..a0b4ed8bd6337 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st @@ -338,7 +338,8 @@ public class Rate$Type$Aggregator { } } - void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-State.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-State.java.st index 7e0949c86faaa..8894dc9c80cff 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-State.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-State.java.st @@ -12,7 +12,7 @@ import org.elasticsearch.compute.operator.DriverContext; /** * Aggregator state for a single $type$. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-State.java.st} instead. */ final class $Type$State implements AggregatorState { private $type$ value; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-TopAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-TopAggregator.java.st index 18d573eea4a4c..761b70791e946 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-TopAggregator.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-TopAggregator.java.st @@ -28,7 +28,6 @@ import org.elasticsearch.compute.data.$Type$Block; $endif$ import org.elasticsearch.compute.data.sort.$Name$BucketedSort; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.sort.SortOrder; @@ -99,7 +98,7 @@ $endif$ return state.toBlock(driverContext.blockFactory(), selected); } - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { private final $Name$BucketedSort sort; private GroupingState(BigArrays bigArrays, int limit, boolean ascending) { @@ -120,7 +119,8 @@ $endif$ sort.merge(groupId, other.sort, otherGroupId); } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -128,7 +128,8 @@ $endif$ return sort.toBlock(blockFactory, selected); } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } @@ -138,7 +139,7 @@ $endif$ } } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { private final GroupingState internalState; private SingleState(BigArrays bigArrays, int limit, boolean ascending) { @@ -153,7 +154,8 @@ $endif$ internalState.merge(0, other, 0); } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st index 1cef234b2238f..3006af595be1f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st @@ -35,7 +35,6 @@ $if(long)$ import org.elasticsearch.compute.data.LongBlock; $endif$ import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasable; $if(BytesRef)$ import org.elasticsearch.core.Releasables; @@ -155,7 +154,7 @@ $endif$ return state.toBlock(driverContext.blockFactory(), selected); } - public static class SingleState implements Releasable { + public static class SingleState implements AggregatorState { $if(BytesRef)$ private final BytesRefHash values; @@ -171,7 +170,8 @@ $else$ $endif$ } - void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory()); } @@ -228,7 +228,7 @@ $endif$ * an {@code O(n^2)} operation for collection to support a {@code O(1)} * collector operation. But at least it's fairly simple. */ - public static class GroupingState implements Releasable { + public static class GroupingState implements GroupingAggregatorState { $if(long||double)$ private final LongLongHash values; @@ -263,7 +263,8 @@ $elseif(int||float)$ $endif$ } - void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { blocks[offset] = toBlock(driverContext.blockFactory(), selected); } @@ -324,7 +325,8 @@ $endif$ } } - void enableGroupIdTracking(SeenGroupIds seen) { + @Override + public void enableGroupIdTracking(SeenGroupIds seen) { // we figure out seen values from nulls on the values block } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st index 2a3d1143236ac..d87ff9ba66442 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st @@ -59,7 +59,7 @@ import java.util.BitSet; $endif$ /** * Maps a {@link $Type$Block} column to group ids. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockHash.java.st} instead. */ final class $Type$BlockHash extends BlockHash { private final int channel; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java index 47d927fda91b5..c3b07d069cf11 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java @@ -260,7 +260,8 @@ boolean hasValue(int index) { } /** Needed for generated code that does null tracking, which we do not need because we use count */ - final void enableGroupIdTracking(SeenGroupIds ignore) {} + @Override + public final void enableGroupIdTracking(SeenGroupIds ignore) {} private void ensureCapacity(int groupId) { if (groupId >= xValues.size()) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlockBuilder.java new file mode 100644 index 0000000000000..d5eecc3e6ed70 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlockBuilder.java @@ -0,0 +1,165 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.core.Releasables; +import org.elasticsearch.index.mapper.BlockLoader; + +public class AggregateMetricDoubleBlockBuilder extends AbstractBlockBuilder implements BlockLoader.AggregateMetricDoubleBuilder { + + private DoubleBlockBuilder minBuilder; + private DoubleBlockBuilder maxBuilder; + private DoubleBlockBuilder sumBuilder; + private IntBlockBuilder countBuilder; + + public AggregateMetricDoubleBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + super(blockFactory); + minBuilder = null; + maxBuilder = null; + sumBuilder = null; + countBuilder = null; + try { + minBuilder = new DoubleBlockBuilder(estimatedSize, blockFactory); + maxBuilder = new DoubleBlockBuilder(estimatedSize, blockFactory); + sumBuilder = new DoubleBlockBuilder(estimatedSize, blockFactory); + countBuilder = new IntBlockBuilder(estimatedSize, blockFactory); + } finally { + if (countBuilder == null) { + Releasables.closeWhileHandlingException(minBuilder, maxBuilder, sumBuilder, countBuilder); + } + } + } + + @Override + protected int valuesLength() { + throw new UnsupportedOperationException("Not available on aggregate_metric_double"); + } + + @Override + protected void growValuesArray(int newSize) { + throw new UnsupportedOperationException("Not available on aggregate_metric_double"); + } + + @Override + protected int elementSize() { + throw new UnsupportedOperationException("Not available on aggregate_metric_double"); + } + + @Override + public Block.Builder copyFrom(Block block, int beginInclusive, int endExclusive) { + Block minBlock; + Block maxBlock; + Block sumBlock; + Block countBlock; + if (block.areAllValuesNull()) { + minBlock = block; + maxBlock = block; + sumBlock = block; + countBlock = block; + } else { + CompositeBlock composite = (CompositeBlock) block; + minBlock = composite.getBlock(Metric.MIN.getIndex()); + maxBlock = composite.getBlock(Metric.MAX.getIndex()); + sumBlock = composite.getBlock(Metric.SUM.getIndex()); + countBlock = composite.getBlock(Metric.COUNT.getIndex()); + } + minBuilder.copyFrom(minBlock, beginInclusive, endExclusive); + maxBuilder.copyFrom(maxBlock, beginInclusive, endExclusive); + sumBuilder.copyFrom(sumBlock, beginInclusive, endExclusive); + countBuilder.copyFrom(countBlock, beginInclusive, endExclusive); + return this; + } + + @Override + public AbstractBlockBuilder appendNull() { + minBuilder.appendNull(); + maxBuilder.appendNull(); + sumBuilder.appendNull(); + countBuilder.appendNull(); + return this; + } + + @Override + public Block.Builder mvOrdering(Block.MvOrdering mvOrdering) { + minBuilder.mvOrdering(mvOrdering); + maxBuilder.mvOrdering(mvOrdering); + sumBuilder.mvOrdering(mvOrdering); + countBuilder.mvOrdering(mvOrdering); + return this; + } + + @Override + public Block build() { + Block[] blocks = new Block[4]; + boolean success = false; + try { + finish(); + blocks[Metric.MIN.getIndex()] = minBuilder.build(); + blocks[Metric.MAX.getIndex()] = maxBuilder.build(); + blocks[Metric.SUM.getIndex()] = sumBuilder.build(); + blocks[Metric.COUNT.getIndex()] = countBuilder.build(); + CompositeBlock block = new CompositeBlock(blocks); + success = true; + return block; + } finally { + if (success == false) { + Releasables.closeExpectNoException(blocks); + } + } + } + + @Override + protected void extraClose() { + Releasables.closeExpectNoException(minBuilder, maxBuilder, sumBuilder, countBuilder); + } + + @Override + public BlockLoader.DoubleBuilder min() { + return minBuilder; + } + + @Override + public BlockLoader.DoubleBuilder max() { + return maxBuilder; + } + + @Override + public BlockLoader.DoubleBuilder sum() { + return sumBuilder; + } + + @Override + public BlockLoader.IntBuilder count() { + return countBuilder; + } + + public enum Metric { + MIN(0), + MAX(1), + SUM(2), + COUNT(3); + + private final int index; + + Metric(int index) { + this.index = index; + } + + public int getIndex() { + return index; + } + } + + public record AggregateMetricDoubleLiteral(Double min, Double max, Double sum, Integer count) { + public AggregateMetricDoubleLiteral { + min = min.isNaN() ? null : min; + max = max.isNaN() ? null : max; + sum = sum.isNaN() ? null : sum; + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java index f66ae42106ca2..55053f509591d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java @@ -432,6 +432,39 @@ public Block newConstantNullBlock(int positions) { return b; } + public AggregateMetricDoubleBlockBuilder newAggregateMetricDoubleBlockBuilder(int estimatedSize) { + return new AggregateMetricDoubleBlockBuilder(estimatedSize, this); + } + + public final Block newConstantAggregateMetricDoubleBlock( + AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral value, + int positions + ) { + try (AggregateMetricDoubleBlockBuilder builder = newAggregateMetricDoubleBlockBuilder(positions)) { + if (value.min() != null) { + builder.min().appendDouble(value.min()); + } else { + builder.min().appendNull(); + } + if (value.max() != null) { + builder.max().appendDouble(value.max()); + } else { + builder.max().appendNull(); + } + if (value.sum() != null) { + builder.sum().appendDouble(value.sum()); + } else { + builder.sum().appendNull(); + } + if (value.count() != null) { + builder.count().appendInt(value.count()); + } else { + builder.count().appendNull(); + } + return builder.build(); + } + } + /** * Returns the maximum number of bytes that a Block should be backed by a primitive array before switching to using BigArrays. */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java index 3df389135e9d3..8773a3b9785e0 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Randomness; +import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -233,6 +234,14 @@ private static Block constantBlock(BlockFactory blockFactory, ElementType type, case BYTES_REF -> blockFactory.newConstantBytesRefBlockWith(toBytesRef(val), size); case DOUBLE -> blockFactory.newConstantDoubleBlockWith((double) val, size); case BOOLEAN -> blockFactory.newConstantBooleanBlockWith((boolean) val, size); + case COMPOSITE -> { + if (val instanceof AggregateMetricDoubleLiteral aggregateMetricDoubleLiteral) { + yield blockFactory.newConstantAggregateMetricDoubleBlock(aggregateMetricDoubleLiteral, size); + } + throw new UnsupportedOperationException( + "Composite block but received value that wasn't AggregateMetricDoubleLiteral [" + val + "]" + ); + } default -> throw new UnsupportedOperationException("unsupported element type [" + type + "]"); }; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java index b83e2d1efc259..6dfe4c9229e76 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java @@ -91,17 +91,22 @@ public int getTotalValueCount() { @Override public int getFirstValueIndex(int position) { - throw new UnsupportedOperationException("Composite block"); + return blocks[0].getFirstValueIndex(position); } @Override public int getValueCount(int position) { - throw new UnsupportedOperationException("Composite block"); + return blocks[0].getValueCount(position); } @Override public boolean isNull(int position) { - throw new UnsupportedOperationException("Composite block"); + for (Block block : blocks) { + if (block.isNull(position) == false) { + return false; + } + } + return true; } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java index f38c6d70991f9..cdf6711e14058 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java @@ -33,7 +33,7 @@ public enum ElementType { /** * Composite blocks which contain array of sub-blocks. */ - COMPOSITE("Composite", (blockFactory, estimatedSize) -> { throw new UnsupportedOperationException("can't build composite blocks"); }), + COMPOSITE("Composite", BlockFactory::newAggregateMetricDoubleBlockBuilder), /** * Intermediate blocks which don't support retrieving elements. @@ -73,6 +73,8 @@ public static ElementType fromJava(Class type) { elementType = BYTES_REF; } else if (type == Boolean.class) { elementType = BOOLEAN; + } else if (type == AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral.class) { + elementType = COMPOSITE; } else if (type == null || type == Void.class) { elementType = NULL; } else { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st index 16e2a62b9d030..707b19165bb3b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st @@ -29,7 +29,7 @@ import java.util.BitSet; $if(BytesRef)$ * Does not take ownership of the given {@link BytesRefArray} and does not adjust circuit breakers to account for it. $endif$ - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayBlock.java.st} instead. */ final class $Type$ArrayBlock extends AbstractArrayBlock implements $Type$Block { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index 3bb13674ce477..521e09d909a1c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -36,7 +36,7 @@ $endif$ $if(BytesRef)$ * Does not take ownership of the given {@link BytesRefArray} and does not adjust circuit breakers to account for it. $endif$ - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayVector.java.st} instead. */ final class $Type$ArrayVector extends AbstractVector implements $Type$Vector { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st index 23632bf41349c..387730de84556 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st @@ -20,7 +20,7 @@ import java.util.BitSet; /** * Block implementation that stores values in a {@link $Type$BigArrayVector}. Does not take ownership of the given * {@link $Array$} and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayBlock.java.st} instead. */ public final class $Type$BigArrayBlock extends AbstractArrayBlock implements $Type$Block { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st index 106d0769ebb07..a1145311fabc3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st @@ -20,7 +20,7 @@ import java.io.IOException; /** * Vector implementation that defers to an enclosed {@link $if(boolean)$Bit$else$$Type$$endif$Array}. * Does not take ownership of the array and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayVector.java.st} instead. */ public final class $Type$BigArrayVector extends AbstractVector implements $Type$Vector, Releasable { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st index 6c1616c370721..670f9fa15842d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st @@ -22,7 +22,7 @@ import java.io.IOException; /** * Block that stores $type$ values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Block.java.st} instead. */ $if(BytesRef)$ public sealed interface BytesRefBlock extends Block permits BytesRefArrayBlock, BytesRefVectorBlock, ConstantNullBlock, diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index d60e1de179d20..6553011e5b413 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -25,7 +25,7 @@ $endif$ /** * Block build of $Type$Blocks. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockBuilder.java.st} instead. */ final class $Type$BlockBuilder extends AbstractBlockBuilder implements $Type$Block.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st index 5d0d4c8a956f3..ebac760031678 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st @@ -22,7 +22,7 @@ import org.elasticsearch.core.ReleasableIterator; $endif$ /** * Vector implementation that stores a constant $type$ value. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ConstantVector.java.st} instead. */ final class Constant$Type$Vector extends AbstractVector implements $Type$Vector { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Lookup.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Lookup.java.st index 668752fe3f59f..ad3d93a76ad40 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Lookup.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Lookup.java.st @@ -17,7 +17,7 @@ import org.elasticsearch.core.Releasables; /** * Generic {@link Block#lookup} implementation {@link $Type$Block}s. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Lookup.java.st} instead. */ final class $Type$Lookup implements ReleasableIterator<$Type$Block> { $if(BytesRef)$ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st index c556cba7ef2e4..47a7dc5735fd2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st @@ -20,7 +20,7 @@ import java.io.IOException; /** * Vector that stores $type$ values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Vector.java.st} instead. */ $if(BytesRef)$ public sealed interface BytesRefVector extends Vector permits ConstantBytesRefVector, BytesRefArrayVector, ConstantNullVector, diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st index d4c6859e64b2a..5ab410e843eca 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st @@ -16,7 +16,7 @@ import org.elasticsearch.core.Releasables; /** * Block view of a {@link $Type$Vector}. Cannot represent multi-values or nulls. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBlock.java.st} instead. */ public final class $Type$VectorBlock extends AbstractVectorBlock implements $Type$Block { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st index da074c75f7c4d..4169728a6a596 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st @@ -19,7 +19,7 @@ $endif$ /** * Builder for {@link $Type$Vector}s that grows as needed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBuilder.java.st} instead. */ final class $Type$VectorBuilder extends AbstractVectorBuilder implements $Type$Vector.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st index c08478829c818..8bfc48972f995 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st @@ -13,7 +13,7 @@ import org.apache.lucene.util.RamUsageEstimator; * Builder for {@link $Type$Vector}s that never grows. Prefer this to * {@link $Type$VectorBuilder} if you know the precise size up front because * it's faster. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorFixedBuilder.java.st} instead. */ public final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { private final BlockFactory blockFactory; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java index 6dca94b9bc79a..63d79a9198622 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java @@ -8,10 +8,12 @@ package org.elasticsearch.compute.data.sort; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ByteUtils; import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -29,6 +31,11 @@ /** * Aggregates the top N variable length {@link BytesRef} values per bucket. * See {@link BucketedSort} for more information. + *

+ * This is substantially different from {@link IpBucketedSort} because + * this has to handle variable length byte strings. To do that it allocates + * a heap of {@link BreakingBytesRefBuilder}s. + *

*/ public class BytesRefBucketedSort implements Releasable { private final BucketedSortCommon common; @@ -123,7 +130,7 @@ public void collect(BytesRef value, int bucket) { // Gathering mode long requiredSize = common.endIndex(rootIndex); if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); common.assertValidNextOffset(next); @@ -271,13 +278,23 @@ private void swap(long lhs, long rhs) { /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long requiredSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = common.bigArrays.grow(values, requiredSize); + assert oldMax % common.bucketSize == 0; + + long newSize = BigArrays.overSize( + ((long) bucket + 1) * common.bucketSize, + PageCacheRecycler.OBJECT_PAGE_SIZE, + RamUsageEstimator.NUM_BYTES_OBJECT_REF + ); + // Round up to the next full bucket. + newSize = (newSize + common.bucketSize - 1) / common.bucketSize; + values = common.bigArrays.resize(values, newSize * common.bucketSize); // Set the next gather offsets for all newly allocated buckets. - fillGatherOffsets(oldMax - (oldMax % common.bucketSize)); + fillGatherOffsets(oldMax); } /** @@ -296,6 +313,7 @@ private void fillGatherOffsets(long startingAt) { bytes.grow(Integer.BYTES); bytes.setLength(Integer.BYTES); ByteUtils.writeIntLE(nextOffset, bytes.bytes(), 0); + checkInvariant(Math.toIntExact(bucketRoot / common.bucketSize)); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/IpBucketedSort.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/IpBucketedSort.java index 4eb31ea30db22..4392d3994886c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/IpBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/IpBucketedSort.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ByteArray; import org.elasticsearch.common.util.ByteUtils; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -26,6 +27,11 @@ /** * Aggregates the top N IP values per bucket. * See {@link BucketedSort} for more information. + *

+ * This is substantially different from {@link BytesRefBucketedSort} because + * this takes advantage of IPs having a fixed length and allocates a dense + * storage for them. + *

*/ public class IpBucketedSort implements Releasable { private static final int IP_LENGTH = 16; // Bytes. It's ipv6. @@ -101,7 +107,7 @@ public void collect(BytesRef value, int bucket) { // Gathering mode long requiredSize = common.endIndex(rootIndex) * IP_LENGTH; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); common.assertValidNextOffset(next); @@ -268,17 +274,23 @@ private void swap(long lhs, long rhs) { * Allocate storage for more buckets and store the "next gather offset" * for those new buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size() / IP_LENGTH; - values = common.bigArrays.grow(values, minSize); + assert oldMax % common.bucketSize == 0; + + int bucketBytes = common.bucketSize * IP_LENGTH; + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketBytes, PageCacheRecycler.BYTE_PAGE_SIZE, 1); + // Round up to the next full bucket. + newSize = (newSize + bucketBytes - 1) / bucketBytes; + values = common.bigArrays.resize(values, newSize * bucketBytes); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % common.bucketSize)); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = common.bucketSize - 1; for (long bucketRoot = startingAt; bucketRoot < values.size() / IP_LENGTH; bucketRoot += common.bucketSize) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/X-BucketedSort.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/X-BucketedSort.java.st index 6587743e34b6f..095d48021e9c1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/X-BucketedSort.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/X-BucketedSort.java.st @@ -10,6 +10,7 @@ package org.elasticsearch.compute.data.sort; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.$Type$Array; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; @@ -101,7 +102,7 @@ public class $Type$BucketedSort implements Releasable { // Gathering mode long requiredSize = rootIndex + bucketSize; if (values.size() < requiredSize) { - grow(requiredSize); + grow(bucket); } int next = getNextGatherOffset(rootIndex); assert 0 <= next && next < bucketSize @@ -261,19 +262,25 @@ $endif$ /** * Allocate storage for more buckets and store the "next gather offset" - * for those new buckets. + * for those new buckets. We always grow the storage by whole bucket's + * worth of slots at a time. We never allocate space for partial buckets. */ - private void grow(long minSize) { + private void grow(int bucket) { long oldMax = values.size(); - values = bigArrays.grow(values, minSize); + assert oldMax % bucketSize == 0; + + long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.$TYPE$_PAGE_SIZE, $BYTES$); + // Round up to the next full bucket. + newSize = (newSize + bucketSize - 1) / bucketSize; + values = bigArrays.resize(values, newSize * bucketSize); // Set the next gather offsets for all newly allocated buckets. - setNextGatherOffsets(oldMax - (oldMax % getBucketSize())); + fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ - private void setNextGatherOffsets(long startingAt) { + private void fillGatherOffsets(long startingAt) { int nextOffset = getBucketSize() - 1; for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) { setNextGatherOffset(bucketRoot, nextOffset); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java index 8fbb946587470..841789e8ada3c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java @@ -698,6 +698,11 @@ public BytesRefBlock constantBytes(BytesRef value) { public BlockLoader.SingletonOrdinalsBuilder singletonOrdinalsBuilder(SortedDocValues ordinals, int count) { return new SingletonOrdinalsBuilder(factory, ordinals, count); } + + @Override + public BlockLoader.AggregateMetricDoubleBuilder aggregateMetricDoubleBuilder(int count) { + return factory.newAggregateMetricDoubleBlockBuilder(count); + } } // TODO tests that mix source loaded fields and doc values in the same block diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java index 09d04d36f8313..d0b4aaad22a3e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java @@ -188,7 +188,10 @@ protected final XContentBuilder innerToXContent(XContentBuilder builder) throws if (builder.humanReadable()) { builder.field("process_time", TimeValue.timeValueNanos(processNanos)); } - return builder.field("pages_processed", pagesProcessed).field("rows_received", rowsReceived).field("rows_emitted", rowsEmitted); + builder.field("pages_processed", pagesProcessed); + builder.field("rows_received", rowsReceived); + builder.field("rows_emitted", rowsEmitted); + return builder; } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java index df522e931ca07..0d5d86fb186e1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.seqno.LocalCheckpointTracker; @@ -34,6 +35,11 @@ * to reduce communication overhead and fetches a {@code Fetched} at a time. * It's the responsibility of subclasses to transform that {@code Fetched} into * output. + *

+ * This operator will also take care of merging response headers from the thread context into the main thread, + * which must be the one that closes this. + *

+ * * @see #performAsync(Page, ActionListener) */ public abstract class AsyncOperator implements Operator { @@ -45,7 +51,8 @@ public abstract class AsyncOperator implements Operator { private final DriverContext driverContext; private final int maxOutstandingRequests; - private final LongAdder totalTimeInNanos = new LongAdder(); + private final ResponseHeadersCollector responseHeadersCollector; + private final LongAdder processNanos = new LongAdder(); private boolean finished = false; private volatile boolean closed = false; @@ -66,9 +73,10 @@ public abstract class AsyncOperator implements Operator { * * @param maxOutstandingRequests the maximum number of outstanding requests */ - public AsyncOperator(DriverContext driverContext, int maxOutstandingRequests) { + public AsyncOperator(DriverContext driverContext, ThreadContext threadContext, int maxOutstandingRequests) { this.driverContext = driverContext; this.maxOutstandingRequests = maxOutstandingRequests; + this.responseHeadersCollector = new ResponseHeadersCollector(threadContext); } @Override @@ -97,8 +105,9 @@ public void addInput(Page input) { }); final long startNanos = System.nanoTime(); performAsync(input, ActionListener.runAfter(listener, () -> { + responseHeadersCollector.collect(); driverContext.removeAsyncAction(); - totalTimeInNanos.add(System.nanoTime() - startNanos); + processNanos.add(System.nanoTime() - startNanos); })); success = true; } finally { @@ -172,6 +181,7 @@ public final void close() { finish(); closed = true; discardResults(); + responseHeadersCollector.finish(); doClose(); } @@ -231,15 +241,11 @@ public IsBlockedResult isBlocked() { @Override public final Operator.Status status() { - return status( - Math.max(0L, checkpoint.getMaxSeqNo()), - Math.max(0L, checkpoint.getProcessedCheckpoint()), - TimeValue.timeValueNanos(totalTimeInNanos.sum()).millis() - ); + return status(Math.max(0L, checkpoint.getMaxSeqNo()), Math.max(0L, checkpoint.getProcessedCheckpoint()), processNanos.sum()); } - protected Operator.Status status(long receivedPages, long completedPages, long totalTimeInMillis) { - return new Status(receivedPages, completedPages, totalTimeInMillis); + protected Operator.Status status(long receivedPages, long completedPages, long processNanos) { + return new Status(receivedPages, completedPages, processNanos); } public static class Status implements Operator.Status { @@ -251,25 +257,31 @@ public static class Status implements Operator.Status { final long receivedPages; final long completedPages; - final long totalTimeInMillis; + final long processNanos; - protected Status(long receivedPages, long completedPages, long totalTimeInMillis) { + protected Status(long receivedPages, long completedPages, long processNanos) { this.receivedPages = receivedPages; this.completedPages = completedPages; - this.totalTimeInMillis = totalTimeInMillis; + this.processNanos = processNanos; } protected Status(StreamInput in) throws IOException { this.receivedPages = in.readVLong(); this.completedPages = in.readVLong(); - this.totalTimeInMillis = in.readVLong(); + this.processNanos = in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ASYNC_NANOS) + ? in.readVLong() + : TimeValue.timeValueMillis(in.readVLong()).nanos(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(receivedPages); out.writeVLong(completedPages); - out.writeVLong(totalTimeInMillis); + out.writeVLong( + out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_ASYNC_NANOS) + ? processNanos + : TimeValue.timeValueNanos(processNanos).millis() + ); } public long receivedPages() { @@ -280,8 +292,8 @@ public long completedPages() { return completedPages; } - public long totalTimeInMillis() { - return totalTimeInMillis; + public long procesNanos() { + return processNanos; } @Override @@ -297,12 +309,12 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } protected final XContentBuilder innerToXContent(XContentBuilder builder) throws IOException { + builder.field("process_nanos", processNanos); + if (builder.humanReadable()) { + builder.field("process_time", TimeValue.timeValueNanos(processNanos)); + } builder.field("received_pages", receivedPages); builder.field("completed_pages", completedPages); - builder.field("total_time_in_millis", totalTimeInMillis); - if (totalTimeInMillis >= 0) { - builder.field("total_time", TimeValue.timeValueMillis(totalTimeInMillis)); - } return builder; } @@ -311,14 +323,12 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Status status = (Status) o; - return receivedPages == status.receivedPages - && completedPages == status.completedPages - && totalTimeInMillis == status.totalTimeInMillis; + return receivedPages == status.receivedPages && completedPages == status.completedPages && processNanos == status.processNanos; } @Override public int hashCode() { - return Objects.hash(receivedPages, completedPages, totalTimeInMillis); + return Objects.hash(receivedPages, completedPages, processNanos); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ChangePointOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ChangePointOperator.java new file mode 100644 index 0000000000000..2693c13a5383a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ChangePointOperator.java @@ -0,0 +1,236 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.ml.aggs.MlAggsHelper; +import org.elasticsearch.xpack.ml.aggs.changepoint.ChangePointDetector; +import org.elasticsearch.xpack.ml.aggs.changepoint.ChangeType; + +import java.util.ArrayList; +import java.util.Deque; +import java.util.LinkedList; +import java.util.List; + +/** + * Find spikes, dips and change point in a list of values. + *

+ * Warning: this operator cannot handle large amounts of data! It buffers all + * data that is passed to it, runs the change point detector on the data (which + * is a compute-heavy process), and then outputs all data with the change points. + */ +public class ChangePointOperator implements Operator { + + public static final int INPUT_VALUE_COUNT_LIMIT = 1000; + + public record Factory(int channel, String sourceText, int sourceLine, int sourceColumn) implements OperatorFactory { + @Override + public Operator get(DriverContext driverContext) { + return new ChangePointOperator(driverContext, channel, sourceText, sourceLine, sourceColumn); + } + + @Override + public String describe() { + return "ChangePointOperator[channel=" + channel + "]"; + } + } + + private final DriverContext driverContext; + private final int channel; + private final String sourceText; + private final int sourceLine; + private final int sourceColumn; + + private final Deque inputPages; + private final Deque outputPages; + private boolean finished; + private Warnings warnings; + + // TODO: make org.elasticsearch.xpack.esql.core.tree.Source available here + // (by modularizing esql-core) and use that instead of the individual fields. + public ChangePointOperator(DriverContext driverContext, int channel, String sourceText, int sourceLine, int sourceColumn) { + this.driverContext = driverContext; + this.channel = channel; + this.sourceText = sourceText; + this.sourceLine = sourceLine; + this.sourceColumn = sourceColumn; + + finished = false; + inputPages = new LinkedList<>(); + outputPages = new LinkedList<>(); + warnings = null; + } + + @Override + public boolean needsInput() { + return finished == false; + } + + @Override + public void addInput(Page page) { + inputPages.add(page); + } + + @Override + public void finish() { + if (finished == false) { + finished = true; + createOutputPages(); + } + } + + @Override + public boolean isFinished() { + return finished && outputPages.isEmpty(); + } + + @Override + public Page getOutput() { + if (finished == false || outputPages.isEmpty()) { + return null; + } + return outputPages.removeFirst(); + } + + private void createOutputPages() { + int valuesCount = 0; + for (Page page : inputPages) { + valuesCount += page.getPositionCount(); + } + boolean tooManyValues = valuesCount > INPUT_VALUE_COUNT_LIMIT; + if (tooManyValues) { + valuesCount = INPUT_VALUE_COUNT_LIMIT; + } + + List values = new ArrayList<>(valuesCount); + List bucketIndexes = new ArrayList<>(valuesCount); + int valuesIndex = 0; + boolean hasNulls = false; + boolean hasMultivalued = false; + for (Page inputPage : inputPages) { + Block inputBlock = inputPage.getBlock(channel); + for (int i = 0; i < inputBlock.getPositionCount() && valuesIndex < valuesCount; i++) { + Object value = BlockUtils.toJavaObject(inputBlock, i); + if (value == null) { + hasNulls = true; + valuesIndex++; + } else if (value instanceof List) { + hasMultivalued = true; + valuesIndex++; + } else { + values.add(((Number) value).doubleValue()); + bucketIndexes.add(valuesIndex++); + } + } + } + + MlAggsHelper.DoubleBucketValues bucketValues = new MlAggsHelper.DoubleBucketValues( + null, + values.stream().mapToDouble(Double::doubleValue).toArray(), + bucketIndexes.stream().mapToInt(Integer::intValue).toArray() + ); + ChangeType changeType = ChangePointDetector.getChangeType(bucketValues); + int changePointIndex = changeType.changePoint(); + + BlockFactory blockFactory = driverContext.blockFactory(); + int pageStartIndex = 0; + while (inputPages.isEmpty() == false) { + Page inputPage = inputPages.peek(); + Page outputPage; + Block changeTypeBlock = null; + Block changePvalueBlock = null; + boolean success = false; + try { + if (pageStartIndex <= changePointIndex && changePointIndex < pageStartIndex + inputPage.getPositionCount()) { + try ( + BytesRefBlock.Builder changeTypeBlockBuilder = blockFactory.newBytesRefBlockBuilder(inputPage.getPositionCount()); + DoubleBlock.Builder pvalueBlockBuilder = blockFactory.newDoubleBlockBuilder(inputPage.getPositionCount()) + ) { + for (int i = 0; i < inputPage.getPositionCount(); i++) { + if (pageStartIndex + i == changePointIndex) { + changeTypeBlockBuilder.appendBytesRef(new BytesRef(changeType.getWriteableName())); + pvalueBlockBuilder.appendDouble(changeType.pValue()); + } else { + changeTypeBlockBuilder.appendNull(); + pvalueBlockBuilder.appendNull(); + } + } + changeTypeBlock = changeTypeBlockBuilder.build(); + changePvalueBlock = pvalueBlockBuilder.build(); + } + } else { + changeTypeBlock = blockFactory.newConstantNullBlock(inputPage.getPositionCount()); + changePvalueBlock = blockFactory.newConstantNullBlock(inputPage.getPositionCount()); + } + + outputPage = inputPage.appendBlocks(new Block[] { changeTypeBlock, changePvalueBlock }); + success = true; + } finally { + if (success == false) { + Releasables.closeExpectNoException(changeTypeBlock, changePvalueBlock); + } + } + + inputPages.removeFirst(); + outputPages.add(outputPage); + pageStartIndex += inputPage.getPositionCount(); + } + + if (changeType instanceof ChangeType.Indeterminable indeterminable) { + warnings(false).registerException(new IllegalArgumentException(indeterminable.getReason())); + } + if (tooManyValues) { + warnings(true).registerException( + new IllegalArgumentException("too many values; keeping only first " + INPUT_VALUE_COUNT_LIMIT + " values") + ); + } + if (hasNulls) { + warnings(true).registerException(new IllegalArgumentException("values contain nulls; skipping them")); + } + if (hasMultivalued) { + warnings(true).registerException( + new IllegalArgumentException( + "values contains multivalued entries; skipping them (please consider reducing them with e.g. MV_AVG or MV_SUM)" + ) + ); + } + } + + @Override + public void close() { + for (Page page : inputPages) { + page.releaseBlocks(); + } + for (Page page : outputPages) { + page.releaseBlocks(); + } + } + + @Override + public String toString() { + return "ChangePointOperator[channel=" + channel + "]"; + } + + private Warnings warnings(boolean onlyWarnings) { + if (warnings == null) { + if (onlyWarnings) { + this.warnings = Warnings.createOnlyWarnings(driverContext.warningsMode(), sourceLine, sourceColumn, sourceText); + } else { + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), sourceLine, sourceColumn, sourceText); + } + } + return warnings; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java index 78572f55cd5eb..c0d220fda5d4e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java @@ -52,6 +52,13 @@ public class Driver implements Releasable, Describable { private final String sessionId; + /** + * Description of the task this driver is running. This description should be + * short and meaningful as a grouping identifier. We use the phase of the + * query right now: "data", "node_reduce", "final". + */ + private final String taskDescription; + /** * The wall clock time when this driver was created in milliseconds since epoch. * Compared to {@link #startNanos} this is less accurate and is measured by a @@ -96,6 +103,10 @@ public class Driver implements Releasable, Describable { /** * Creates a new driver with a chain of operators. * @param sessionId session Id + * @param taskDescription Description of the task this driver is running. This + * description should be short and meaningful as a grouping + * identifier. We use the phase of the query right now: + * "data", "node_reduce", "final". * @param driverContext the driver context * @param source source operator * @param intermediateOperators the chain of operators to execute @@ -105,6 +116,7 @@ public class Driver implements Releasable, Describable { */ public Driver( String sessionId, + String taskDescription, long startTime, long startNanos, DriverContext driverContext, @@ -116,6 +128,7 @@ public Driver( Releasable releasable ) { this.sessionId = sessionId; + this.taskDescription = taskDescription; this.startTime = startTime; this.startNanos = startNanos; this.driverContext = driverContext; @@ -129,6 +142,7 @@ public Driver( this.status = new AtomicReference<>( new DriverStatus( sessionId, + taskDescription, startTime, System.currentTimeMillis(), 0, @@ -150,6 +164,7 @@ public Driver( * @param releasable a {@link Releasable} to invoked once the chain of operators has run to completion */ public Driver( + String taskDescription, DriverContext driverContext, SourceOperator source, List intermediateOperators, @@ -158,6 +173,7 @@ public Driver( ) { this( "unset", + taskDescription, System.currentTimeMillis(), System.nanoTime(), driverContext, @@ -485,6 +501,7 @@ public DriverProfile profile() { throw new IllegalStateException("can only get profile from finished driver"); } return new DriverProfile( + status.taskDescription(), status.started(), status.lastUpdated(), finishNanos - startNanos, @@ -531,6 +548,7 @@ private void updateStatus(long extraCpuNanos, int extraIterations, DriverStatus. return new DriverStatus( sessionId, + taskDescription, startTime, now, prev.cpuNanos() + extraCpuNanos, diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java index 59ecdde230413..9cc9f2335ffdd 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java @@ -27,6 +27,13 @@ * Profile results from a single {@link Driver}. */ public class DriverProfile implements Writeable, ChunkedToXContentObject { + /** + * Description of the task this driver is running. This description should be + * short and meaningful as a grouping identifier. We use the phase of the + * query right now: "data", "node_reduce", "final". + */ + private final String taskDescription; + /** * Millis since epoch when the driver started. */ @@ -62,6 +69,7 @@ public class DriverProfile implements Writeable, ChunkedToXContentObject { private final DriverSleeps sleeps; public DriverProfile( + String taskDescription, long startMillis, long stopMillis, long tookNanos, @@ -70,6 +78,7 @@ public DriverProfile( List operators, DriverSleeps sleeps ) { + this.taskDescription = taskDescription; this.startMillis = startMillis; this.stopMillis = stopMillis; this.tookNanos = tookNanos; @@ -80,6 +89,12 @@ public DriverProfile( } public DriverProfile(StreamInput in) throws IOException { + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION) + || in.getTransportVersion().isPatchFrom(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION_90)) { + this.taskDescription = in.readString(); + } else { + this.taskDescription = ""; + } if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.startMillis = in.readVLong(); this.stopMillis = in.readVLong(); @@ -102,6 +117,10 @@ public DriverProfile(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION) + || out.getTransportVersion().isPatchFrom(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION_90)) { + out.writeString(taskDescription); + } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeVLong(startMillis); out.writeVLong(stopMillis); @@ -115,6 +134,13 @@ public void writeTo(StreamOutput out) throws IOException { sleeps.writeTo(out); } + /** + * Description of the task this driver is running. + */ + public String taskDescription() { + return taskDescription; + } + /** * Millis since epoch when the driver started. */ @@ -169,6 +195,7 @@ public DriverSleeps sleeps() { @Override public Iterator toXContentChunked(ToXContent.Params params) { return Iterators.concat(ChunkedToXContentHelper.startObject(), Iterators.single((b, p) -> { + b.field("task_description", taskDescription); b.timestampFieldsFromUnixEpochMillis("start_millis", "start", startMillis); b.timestampFieldsFromUnixEpochMillis("stop_millis", "stop", stopMillis); b.field("took_nanos", tookNanos); @@ -197,7 +224,8 @@ public boolean equals(Object o) { return false; } DriverProfile that = (DriverProfile) o; - return startMillis == that.startMillis + return taskDescription.equals(that.taskDescription) + && startMillis == that.startMillis && stopMillis == that.stopMillis && tookNanos == that.tookNanos && cpuNanos == that.cpuNanos @@ -208,7 +236,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(startMillis, stopMillis, tookNanos, cpuNanos, iterations, operators, sleeps); + return Objects.hash(taskDescription, startMillis, stopMillis, tookNanos, cpuNanos, iterations, operators, sleeps); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverScheduler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverScheduler.java index 05fe38007a929..9d82f73f3105f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverScheduler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverScheduler.java @@ -7,7 +7,9 @@ package org.elasticsearch.compute.operator; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import java.util.List; import java.util.concurrent.Executor; @@ -21,7 +23,7 @@ */ final class DriverScheduler { private final AtomicReference delayedTask = new AtomicReference<>(); - private final AtomicReference scheduledTask = new AtomicReference<>(); + private final AtomicReference scheduledTask = new AtomicReference<>(); private final AtomicBoolean completing = new AtomicBoolean(); void addOrRunDelayedTask(Runnable task) { @@ -35,22 +37,32 @@ void addOrRunDelayedTask(Runnable task) { } } - void scheduleOrRunTask(Executor executor, Runnable task) { - final Runnable existing = scheduledTask.getAndSet(task); + void scheduleOrRunTask(Executor executor, AbstractRunnable task) { + final AbstractRunnable existing = scheduledTask.getAndSet(task); assert existing == null : existing; final Executor executorToUse = completing.get() ? EsExecutors.DIRECT_EXECUTOR_SERVICE : executor; - executorToUse.execute(() -> { - final Runnable next = scheduledTask.getAndSet(null); - if (next != null) { - assert next == task; - next.run(); + executorToUse.execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + assert e instanceof EsRejectedExecutionException : new AssertionError(e); + if (scheduledTask.getAndUpdate(t -> t == task ? null : t) == task) { + task.onFailure(e); + } + } + + @Override + protected void doRun() { + AbstractRunnable toRun = scheduledTask.getAndSet(null); + if (toRun == task) { + task.run(); + } } }); } void runPendingTasks() { completing.set(true); - for (var taskHolder : List.of(delayedTask, scheduledTask)) { + for (var taskHolder : List.of(scheduledTask, delayedTask)) { final Runnable task = taskHolder.getAndSet(null); if (task != null) { task.run(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java index 42e3908231206..41d0aee14fe60 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java @@ -42,6 +42,11 @@ public class DriverStatus implements Task.Status { */ private final String sessionId; + /** + * Description of the task this driver is running. + */ + private final String taskDescription; + /** * Milliseconds since epoch when this driver started. */ @@ -83,6 +88,7 @@ public class DriverStatus implements Task.Status { DriverStatus( String sessionId, + String taskDescription, long started, long lastUpdated, long cpuTime, @@ -93,6 +99,7 @@ public class DriverStatus implements Task.Status { DriverSleeps sleeps ) { this.sessionId = sessionId; + this.taskDescription = taskDescription; this.started = started; this.lastUpdated = lastUpdated; this.cpuNanos = cpuTime; @@ -105,6 +112,12 @@ public class DriverStatus implements Task.Status { public DriverStatus(StreamInput in) throws IOException { this.sessionId = in.readString(); + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION) + || in.getTransportVersion().isPatchFrom(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION_90)) { + this.taskDescription = in.readString(); + } else { + this.taskDescription = ""; + } this.started = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readLong() : 0; this.lastUpdated = in.readLong(); this.cpuNanos = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readVLong() : 0; @@ -122,6 +135,10 @@ public DriverStatus(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(sessionId); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION) + || out.getTransportVersion().isPatchFrom(TransportVersions.ESQL_DRIVER_TASK_DESCRIPTION_90)) { + out.writeString(taskDescription); + } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0)) { out.writeLong(started); } @@ -150,6 +167,15 @@ public String sessionId() { return sessionId; } + /** + * Description of the task this driver is running. This description should be + * short and meaningful as a grouping identifier. We use the phase of the + * query right now: "data", "node_reduce", "final". + */ + public String taskDescription() { + return taskDescription; + } + /** * When this {@link Driver} was started. */ @@ -211,7 +237,8 @@ public List activeOperators() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field("sessionId", sessionId); + builder.field("session_id", sessionId); + builder.field("task_description", taskDescription); builder.field("started", DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(started)); builder.field("last_updated", DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(lastUpdated)); builder.field("cpu_nanos", cpuNanos); @@ -240,6 +267,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; DriverStatus that = (DriverStatus) o; return sessionId.equals(that.sessionId) + && taskDescription.equals(that.taskDescription) && started == that.started && lastUpdated == that.lastUpdated && cpuNanos == that.cpuNanos @@ -252,7 +280,18 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(sessionId, started, lastUpdated, cpuNanos, iterations, status, completedOperators, activeOperators, sleeps); + return Objects.hash( + sessionId, + taskDescription, + started, + lastUpdated, + cpuNanos, + iterations, + status, + completedOperators, + activeOperators, + sleeps + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FailureCollector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FailureCollector.java index 337075edbdcf6..7040f8712e616 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FailureCollector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FailureCollector.java @@ -9,26 +9,35 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.transport.TransportException; +import java.util.EnumMap; +import java.util.List; +import java.util.Map; import java.util.Queue; -import java.util.concurrent.Semaphore; +import java.util.concurrent.ArrayBlockingQueue; /** * {@code FailureCollector} is responsible for collecting exceptions that occur in the compute engine. - * The collected exceptions are categorized into task-cancelled and non-task-cancelled exceptions. - * To limit memory usage, this class collects only the first 10 exceptions in each category by default. - * When returning the accumulated failure to the caller, this class prefers non-task-cancelled exceptions - * over task-cancelled ones as they are more useful for diagnosing issues. + * The collected exceptions are categorized into client (4xx), server (5xx), shard-unavailable errors, + * and cancellation errors. To limit memory usage, this class collects only the first 10 exceptions in + * each category by default. When returning the accumulated failures to the caller, this class prefers + * client (4xx) errors over server (5xx) errors, shard-unavailable errors, and cancellation errors, + * as they are more useful for diagnosing issues. */ public final class FailureCollector { - private final Queue cancelledExceptions = ConcurrentCollections.newQueue(); - private final Semaphore cancelledExceptionsPermits; - private final Queue nonCancelledExceptions = ConcurrentCollections.newQueue(); - private final Semaphore nonCancelledExceptionsPermits; + private enum Category { + CLIENT, + SERVER, + SHARD_UNAVAILABLE, + CANCELLATION + } + + private final Map> categories; + private final int maxExceptions; private volatile boolean hasFailure = false; private Exception finalFailure = null; @@ -41,11 +50,14 @@ public FailureCollector(int maxExceptions) { if (maxExceptions <= 0) { throw new IllegalArgumentException("maxExceptions must be at least one"); } - this.cancelledExceptionsPermits = new Semaphore(maxExceptions); - this.nonCancelledExceptionsPermits = new Semaphore(maxExceptions); + this.maxExceptions = maxExceptions; + this.categories = new EnumMap<>(Category.class); + for (Category c : Category.values()) { + this.categories.put(c, new ArrayBlockingQueue<>(maxExceptions)); + } } - private static Exception unwrapTransportException(TransportException te) { + public static Exception unwrapTransportException(TransportException te) { final Throwable cause = te.getCause(); if (cause == null) { return te; @@ -56,16 +68,24 @@ private static Exception unwrapTransportException(TransportException te) { } } - public void unwrapAndCollect(Exception e) { - e = e instanceof TransportException te ? unwrapTransportException(te) : e; + private static Category getErrorCategory(Exception e) { if (ExceptionsHelper.unwrap(e, TaskCancelledException.class) != null) { - if (nonCancelledExceptions.isEmpty() && cancelledExceptionsPermits.tryAcquire()) { - cancelledExceptions.add(e); + return Category.CANCELLATION; + } else if (TransportActions.isShardNotAvailableException(e)) { + return Category.SHARD_UNAVAILABLE; + } else { + final int status = ExceptionsHelper.status(e).getStatus(); + if (400 <= status && status < 500) { + return Category.CLIENT; + } else { + return Category.SERVER; } - } else if (nonCancelledExceptionsPermits.tryAcquire()) { - nonCancelledExceptions.add(e); - cancelledExceptions.clear(); } + } + + public void unwrapAndCollect(Exception e) { + e = e instanceof TransportException te ? unwrapTransportException(te) : e; + categories.get(getErrorCategory(e)).offer(e); hasFailure = true; } @@ -77,8 +97,8 @@ public boolean hasFailure() { } /** - * Returns the accumulated failure, preferring non-task-cancelled exceptions over task-cancelled ones. - * Once this method builds the failure, incoming failures are discarded. + * Returns the accumulated failure, preferring client (4xx) errors over server (5xx) errors and cancellation errors, + * as they are more useful for diagnosing issues. Once this method builds the failure, incoming failures are discarded. * * @return the accumulated failure, or {@code null} if no failure has been collected */ @@ -98,21 +118,19 @@ private Exception buildFailure() { assert hasFailure; assert Thread.holdsLock(this); Exception first = null; - for (Exception e : nonCancelledExceptions) { - if (first == null) { - first = e; - } else if (first != e) { - first.addSuppressed(e); + int collected = 0; + for (Category category : List.of(Category.CLIENT, Category.SERVER, Category.SHARD_UNAVAILABLE, Category.CANCELLATION)) { + if (first != null && category == Category.CANCELLATION) { + continue; // do not add cancellation errors if other errors present } - } - if (first != null) { - return first; - } - for (Exception e : cancelledExceptions) { - if (first == null) { - first = e; - } else if (first != e) { - first.addSuppressed(e); + for (Exception e : categories.get(category)) { + if (++collected <= maxExceptions) { + if (first == null) { + first = e; + } else if (first != e) { + first.addSuppressed(e); + } + } } } assert first != null; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java index 1e9ea88b2f1d7..3b011d4a682ff 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java @@ -41,22 +41,24 @@ */ public final class TimeSeriesAggregationOperatorFactories { + public record SupplierWithChannels(AggregatorFunctionSupplier supplier, List channels) {} + public record Initial( int tsHashChannel, int timeBucketChannel, List groupings, - List rates, - List nonRates, + List rates, + List nonRates, int maxPageSize ) implements Operator.OperatorFactory { @Override public Operator get(DriverContext driverContext) { List aggregators = new ArrayList<>(groupings.size() + rates.size() + nonRates.size()); - for (AggregatorFunctionSupplier f : rates) { - aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INITIAL)); + for (SupplierWithChannels f : rates) { + aggregators.add(f.supplier.groupingAggregatorFactory(AggregatorMode.INITIAL, f.channels)); } - for (AggregatorFunctionSupplier f : nonRates) { - aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INITIAL)); + for (SupplierWithChannels f : nonRates) { + aggregators.add(f.supplier.groupingAggregatorFactory(AggregatorMode.INITIAL, f.channels)); } aggregators.addAll(valuesAggregatorForGroupings(groupings, timeBucketChannel)); return new HashAggregationOperator( @@ -76,18 +78,18 @@ public record Intermediate( int tsHashChannel, int timeBucketChannel, List groupings, - List rates, - List nonRates, + List rates, + List nonRates, int maxPageSize ) implements Operator.OperatorFactory { @Override public Operator get(DriverContext driverContext) { List aggregators = new ArrayList<>(groupings.size() + rates.size() + nonRates.size()); - for (AggregatorFunctionSupplier f : rates) { - aggregators.add(f.groupingAggregatorFactory(AggregatorMode.FINAL)); + for (SupplierWithChannels f : rates) { + aggregators.add(f.supplier.groupingAggregatorFactory(AggregatorMode.FINAL, f.channels)); } - for (AggregatorFunctionSupplier f : nonRates) { - aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INTERMEDIATE)); + for (SupplierWithChannels f : nonRates) { + aggregators.add(f.supplier.groupingAggregatorFactory(AggregatorMode.INTERMEDIATE, f.channels)); } aggregators.addAll(valuesAggregatorForGroupings(groupings, timeBucketChannel)); List hashGroups = List.of( @@ -109,18 +111,18 @@ public String describe() { public record Final( List groupings, - List outerRates, - List nonRates, + List outerRates, + List nonRates, int maxPageSize ) implements Operator.OperatorFactory { @Override public Operator get(DriverContext driverContext) { List aggregators = new ArrayList<>(outerRates.size() + nonRates.size()); - for (AggregatorFunctionSupplier f : outerRates) { - aggregators.add(f.groupingAggregatorFactory(AggregatorMode.SINGLE)); + for (SupplierWithChannels f : outerRates) { + aggregators.add(f.supplier.groupingAggregatorFactory(AggregatorMode.SINGLE, f.channels)); } - for (AggregatorFunctionSupplier f : nonRates) { - aggregators.add(f.groupingAggregatorFactory(AggregatorMode.FINAL)); + for (SupplierWithChannels f : nonRates) { + aggregators.add(f.supplier.groupingAggregatorFactory(AggregatorMode.FINAL, f.channels)); } return new HashAggregationOperator( aggregators, @@ -139,17 +141,17 @@ static List valuesAggregatorForGroupings(List aggregators = new ArrayList<>(); for (BlockHash.GroupSpec g : groupings) { if (g.channel() != timeBucketChannel) { - final List channels = List.of(g.channel()); // TODO: perhaps introduce a specialized aggregator for this? var aggregatorSupplier = (switch (g.elementType()) { - case BYTES_REF -> new org.elasticsearch.compute.aggregation.ValuesBytesRefAggregatorFunctionSupplier(channels); - case DOUBLE -> new org.elasticsearch.compute.aggregation.ValuesDoubleAggregatorFunctionSupplier(channels); - case INT -> new org.elasticsearch.compute.aggregation.ValuesIntAggregatorFunctionSupplier(channels); - case LONG -> new org.elasticsearch.compute.aggregation.ValuesLongAggregatorFunctionSupplier(channels); - case BOOLEAN -> new org.elasticsearch.compute.aggregation.ValuesBooleanAggregatorFunctionSupplier(channels); + case BYTES_REF -> new org.elasticsearch.compute.aggregation.ValuesBytesRefAggregatorFunctionSupplier(); + case DOUBLE -> new org.elasticsearch.compute.aggregation.ValuesDoubleAggregatorFunctionSupplier(); + case INT -> new org.elasticsearch.compute.aggregation.ValuesIntAggregatorFunctionSupplier(); + case LONG -> new org.elasticsearch.compute.aggregation.ValuesLongAggregatorFunctionSupplier(); + case BOOLEAN -> new org.elasticsearch.compute.aggregation.ValuesBooleanAggregatorFunctionSupplier(); case FLOAT, NULL, DOC, COMPOSITE, UNKNOWN -> throw new IllegalArgumentException("unsupported grouping type"); }); - aggregators.add(aggregatorSupplier.groupingAggregatorFactory(AggregatorMode.SINGLE)); + final List channels = List.of(g.channel()); + aggregators.add(aggregatorSupplier.groupingAggregatorFactory(AggregatorMode.SINGLE, channels)); } } return aggregators; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Warnings.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Warnings.java index ec697219563a4..9ecb83853ec29 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Warnings.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Warnings.java @@ -31,8 +31,9 @@ public void registerException(Exception exception) { * @param sourceText The source text that caused the warning. Same as `source.text()` * @return A warnings collector object */ + // TODO: rename to createWarningsTreatedAsNull public static Warnings createWarnings(DriverContext.WarningsMode warningsMode, int lineNumber, int columnNumber, String sourceText) { - return createWarnings(warningsMode, lineNumber, columnNumber, sourceText, "treating result as null"); + return createWarnings(warningsMode, lineNumber, columnNumber, sourceText, "evaluation of [{}] failed, treating result as null"); } /** @@ -50,7 +51,26 @@ public static Warnings createWarningsTreatedAsFalse( int columnNumber, String sourceText ) { - return createWarnings(warningsMode, lineNumber, columnNumber, sourceText, "treating result as false"); + return createWarnings(warningsMode, lineNumber, columnNumber, sourceText, "evaluation of [{}] failed, treating result as false"); + } + + /** + * Create a new warnings object based on the given mode which warns that + * evaluation resulted in warnings. + * @param warningsMode The warnings collection strategy to use + * @param lineNumber The line number of the source text. Same as `source.getLineNumber()` + * @param columnNumber The column number of the source text. Same as `source.getColumnNumber()` + * @param sourceText The source text that caused the warning. Same as `source.text()` + * @return A warnings collector object + */ + // TODO: rename to createWarnings + public static Warnings createOnlyWarnings( + DriverContext.WarningsMode warningsMode, + int lineNumber, + int columnNumber, + String sourceText + ) { + return createWarnings(warningsMode, lineNumber, columnNumber, sourceText, "warnings during evaluation of [{}]"); } private static Warnings createWarnings( @@ -78,23 +98,27 @@ private static Warnings createWarnings( private Warnings(int lineNumber, int columnNumber, String sourceText, String first) { this.location = format("Line {}:{}: ", lineNumber, columnNumber); - this.first = format( - null, - "{}evaluation of [{}] failed, {}. Only first {} failures recorded.", - location, - sourceText, - first, - MAX_ADDED_WARNINGS - ); + this.first = format(null, "{}" + first + ". Only first {} failures recorded.", location, sourceText, MAX_ADDED_WARNINGS); } public void registerException(Exception exception) { + registerException(exception.getClass(), exception.getMessage()); + } + + /** + * Register an exception to be included in the warnings. + *

+ * This overload avoids the need to instantiate the exception, which can be expensive. + * Instead, it asks only the required pieces to build the warning. + *

+ */ + public void registerException(Class exceptionClass, String message) { if (addedWarnings < MAX_ADDED_WARNINGS) { if (addedWarnings == 0) { addWarning(first); } // location needs to be added to the exception too, since the headers are deduplicated - addWarning(location + exception.getClass().getName() + ": " + exception.getMessage()); + addWarning(location + exceptionClass.getName() + ": " + message); addedWarnings++; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeBuffer.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeBuffer.java index ce400ddbdd6f9..23c98a1df193d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeBuffer.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeBuffer.java @@ -47,7 +47,17 @@ void addPage(Page page) { notifyNotEmpty(); } if (noMoreInputs) { - discardPages(); + // O(N) but acceptable because it only occurs with the stop API, and the queue size should be very small. + if (queue.removeIf(p -> p == page)) { + page.releaseBlocks(); + final int size = queueSize.decrementAndGet(); + if (size == maxSize - 1) { + notifyNotFull(); + } + if (size == 0) { + completionFuture.onResponse(null); + } + } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index ac02273a48ee4..dd36a6f455e8b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -366,7 +367,13 @@ private void doFetchPageAsync(boolean allSourcesFinished, ActionListener 0) { // This doesn't fully protect ESQL from OOM, but reduces the likelihood. - blockFactory.breaker().addEstimateBytesAndMaybeBreak(reservedBytes, "fetch page"); + try { + blockFactory.breaker().addEstimateBytesAndMaybeBreak(reservedBytes, "fetch page"); + } catch (Exception e) { + assert e instanceof CircuitBreakingException : new AssertionError(e); + listener.onFailure(e); + return; + } listener = ActionListener.runAfter(listener, () -> blockFactory.breaker().addWithoutBreaking(-reservedBytes)); } transportService.sendChildRequest( diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java index db9a62da5d9ea..68f684cdf9dcd 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java @@ -7,19 +7,16 @@ package org.elasticsearch.compute.operator.exchange; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.RefCountingRunnable; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.compute.EsqlRefCountingListener; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.FailureCollector; import org.elasticsearch.compute.operator.IsBlockedResult; import org.elasticsearch.core.Releasable; +import org.elasticsearch.tasks.TaskCancelledException; -import java.util.List; import java.util.Map; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; @@ -38,10 +35,9 @@ public final class ExchangeSourceHandler { private final PendingInstances outstandingSinks; private final PendingInstances outstandingSources; - // Collect failures that occur while fetching pages from the remote sink with `failFast=true`. - // The exchange source will stop fetching and abort as soon as any failure is added to this failure collector. - // The final failure collected will be notified to callers via the {@code completionListener}. - private final FailureCollector failure = new FailureCollector(); + // Track if this exchange source should abort. There is no need to track the actual failure since the actual failure + // should be notified via #addRemoteSink(RemoteSink, boolean, Runnable, int, ActionListener). + private volatile boolean aborted = false; private final AtomicInteger nextSinkId = new AtomicInteger(); private final Map remoteSinks = ConcurrentCollections.newConcurrentMap(); @@ -52,35 +48,18 @@ public final class ExchangeSourceHandler { * @param maxBufferSize the maximum size of the exchange buffer. A larger buffer reduces ``pauses`` but uses more memory, * which could otherwise be allocated for other purposes. * @param fetchExecutor the executor used to fetch pages. - * @param completionListener a listener that will be notified when the exchange source handler fails or completes */ - public ExchangeSourceHandler(int maxBufferSize, Executor fetchExecutor, ActionListener completionListener) { + public ExchangeSourceHandler(int maxBufferSize, Executor fetchExecutor) { this.buffer = new ExchangeBuffer(maxBufferSize); this.fetchExecutor = fetchExecutor; this.outstandingSinks = new PendingInstances(() -> buffer.finish(false)); - final PendingInstances closingSinks = new PendingInstances(() -> {}); - closingSinks.trackNewInstance(); - this.outstandingSources = new PendingInstances(() -> finishEarly(true, ActionListener.running(closingSinks::finishInstance))); - buffer.addCompletionListener(ActionListener.running(() -> { - final ActionListener listener = ActionListener.assertAtLeastOnce(completionListener); - try (RefCountingRunnable refs = new RefCountingRunnable(() -> { - final Exception e = failure.getFailure(); - if (e != null) { - listener.onFailure(e); - } else { - listener.onResponse(null); - } - })) { - closingSinks.completion.addListener(refs.acquireListener()); - for (PendingInstances pending : List.of(outstandingSinks, outstandingSources)) { - // Create an outstanding instance and then finish to complete the completionListener - // if we haven't registered any instances of exchange sinks or exchange sources before. - pending.trackNewInstance(); - pending.completion.addListener(refs.acquireListener()); - pending.finishInstance(); - } - } - })); + this.outstandingSources = new PendingInstances(() -> finishEarly(true, ActionListener.noop())); + } + + private void checkFailure() { + if (aborted) { + throw new TaskCancelledException("remote sinks failed"); + } } private class ExchangeSourceImpl implements ExchangeSource { @@ -90,13 +69,6 @@ private class ExchangeSourceImpl implements ExchangeSource { outstandingSources.trackNewInstance(); } - private void checkFailure() { - Exception e = failure.getFailure(); - if (e != null) { - throw ExceptionsHelper.convertToRuntime(e); - } - } - @Override public Page pollPage() { checkFailure(); @@ -201,7 +173,7 @@ void fetchPage() { while (loopControl.isRunning()) { loopControl.exiting(); // finish other sinks if one of them failed or source no longer need pages. - boolean toFinishSinks = buffer.noMoreInputs() || failure.hasFailure(); + boolean toFinishSinks = buffer.noMoreInputs() || aborted; remoteSink.fetchPageAsync(toFinishSinks, ActionListener.wrap(resp -> { Page page = resp.takePage(); if (page != null) { @@ -231,7 +203,7 @@ void fetchPage() { void onSinkFailed(Exception e) { if (failFast) { - failure.unwrapAndCollect(e); + aborted = true; } buffer.waitForReading().listener().onResponse(null); // resume the Driver if it is being blocked on reading if (finished == false) { @@ -260,12 +232,12 @@ void onSinkComplete() { * - If {@code false}, failures from this remote sink will not cause the exchange source to abort. * Callers must handle these failures notified via {@code listener}. * - If {@code true}, failures from this remote sink will cause the exchange source to abort. - * Callers can safely ignore failures notified via this listener, as they are collected and - * reported by the exchange source. + * * @param onPageFetched a callback that will be called when a page is fetched from the remote sink * @param instances the number of concurrent ``clients`` that this handler should use to fetch pages. * More clients reduce latency, but add overhead. - * @param listener a listener that will be notified when the sink fails or completes + * @param listener a listener that will be notified when the sink fails or completes. Callers must handle failures notified via + * this listener. * @see ExchangeSinkHandler#fetchPageAsync(boolean, ActionListener) */ public void addRemoteSink( @@ -280,11 +252,17 @@ public void addRemoteSink( final ActionListener sinkListener = ActionListener.assertAtLeastOnce( ActionListener.notifyOnce(ActionListener.runBefore(listener, () -> remoteSinks.remove(sinkId))) ); + final Releasable emptySink = addEmptySink(); fetchExecutor.execute(new AbstractRunnable() { + @Override + public void onAfter() { + emptySink.close(); + } + @Override public void onFailure(Exception e) { if (failFast) { - failure.unwrapAndCollect(e); + aborted = true; } buffer.waitForReading().listener().onResponse(null); // resume the Driver if it is being blocked on reading remoteSink.close(ActionListener.running(() -> sinkListener.onFailure(e))); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/QueryList.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/QueryList.java index 5d359e2fb612f..f05d552c3e628 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/QueryList.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/QueryList.java @@ -36,6 +36,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.List; import java.util.function.IntFunction; @@ -47,13 +48,19 @@ public abstract class QueryList { protected final SearchExecutionContext searchExecutionContext; protected final MappedFieldType field; protected final Block block; - protected final boolean onlySingleValues; + @Nullable + protected final OnlySingleValueParams onlySingleValueParams; - protected QueryList(MappedFieldType field, SearchExecutionContext searchExecutionContext, Block block, boolean onlySingleValues) { + protected QueryList( + MappedFieldType field, + SearchExecutionContext searchExecutionContext, + Block block, + OnlySingleValueParams onlySingleValueParams + ) { this.searchExecutionContext = searchExecutionContext; this.field = field; this.block = block; - this.onlySingleValues = onlySingleValues; + this.onlySingleValueParams = onlySingleValueParams; } /** @@ -66,19 +73,27 @@ int getPositionCount() { /** * Returns a copy of this query list that only returns queries for single-valued positions. * That is, it returns `null` queries for either multivalued or null positions. + *

+ * Whenever a multi-value position is encountered, whether in the input block or in the queried index, a warning is emitted. + *

*/ - public abstract QueryList onlySingleValues(); + public abstract QueryList onlySingleValues(Warnings warnings, String multiValueWarningMessage); final Query getQuery(int position) { final int valueCount = block.getValueCount(position); - if (onlySingleValues && valueCount != 1) { + if (onlySingleValueParams != null && valueCount != 1) { + if (valueCount > 1) { + onlySingleValueParams.warnings.registerException( + new IllegalArgumentException(onlySingleValueParams.multiValueWarningMessage) + ); + } return null; } final int firstValueIndex = block.getFirstValueIndex(position); Query query = doGetQuery(position, firstValueIndex, valueCount); - if (onlySingleValues) { + if (onlySingleValueParams != null) { query = wrapSingleValueQuery(query); } @@ -92,13 +107,16 @@ final Query getQuery(int position) { abstract Query doGetQuery(int position, int firstValueIndex, int valueCount); private Query wrapSingleValueQuery(Query query) { + assert onlySingleValueParams != null : "Requested to wrap single value query without single value params"; + SingleValueMatchQuery singleValueQuery = new SingleValueMatchQuery( searchExecutionContext.getForField(field, MappedFieldType.FielddataOperation.SEARCH), // Not emitting warnings for multivalued fields not matching - Warnings.NOOP_WARNINGS + onlySingleValueParams.warnings, + onlySingleValueParams.multiValueWarningMessage ); - Query rewrite = singleValueQuery; + Query rewrite; try { rewrite = singleValueQuery.rewrite(searchExecutionContext.searcher()); if (rewrite instanceof MatchAllDocsQuery) { @@ -106,8 +124,7 @@ private Query wrapSingleValueQuery(Query query) { return query; } } catch (IOException e) { - // ignore - // TODO: Should we do something with the exception? + throw new UncheckedIOException("Error while rewriting SingleValueQuery", e); } BooleanQuery.Builder builder = new BooleanQuery.Builder(); @@ -152,7 +169,7 @@ public static QueryList rawTermQueryList(MappedFieldType field, SearchExecutionC case COMPOSITE -> throw new IllegalArgumentException("can't read values from [composite] block"); case UNKNOWN -> throw new IllegalArgumentException("can't read values from [" + block + "]"); }; - return new TermQueryList(field, searchExecutionContext, block, false, blockToJavaObject); + return new TermQueryList(field, searchExecutionContext, block, null, blockToJavaObject); } /** @@ -162,7 +179,7 @@ public static QueryList rawTermQueryList(MappedFieldType field, SearchExecutionC public static QueryList ipTermQueryList(MappedFieldType field, SearchExecutionContext searchExecutionContext, BytesRefBlock block) { BytesRef scratch = new BytesRef(); byte[] ipBytes = new byte[InetAddressPoint.BYTES]; - return new TermQueryList(field, searchExecutionContext, block, false, offset -> { + return new TermQueryList(field, searchExecutionContext, block, null, offset -> { final var bytes = block.getBytesRef(offset, scratch); if (ipBytes.length != bytes.length) { // Lucene only support 16-byte IP addresses, even IPv4 is encoded in 16 bytes @@ -182,7 +199,7 @@ public static QueryList dateTermQueryList(MappedFieldType field, SearchExecution field, searchExecutionContext, block, - false, + null, field instanceof RangeFieldMapper.RangeFieldType rangeFieldType ? offset -> rangeFieldType.dateTimeFormatter().formatMillis(block.getLong(offset)) : block::getLong @@ -193,7 +210,7 @@ public static QueryList dateTermQueryList(MappedFieldType field, SearchExecution * Returns a list of geo_shape queries for the given field and the input block. */ public static QueryList geoShapeQueryList(MappedFieldType field, SearchExecutionContext searchExecutionContext, Block block) { - return new GeoShapeQueryList(field, searchExecutionContext, block, false); + return new GeoShapeQueryList(field, searchExecutionContext, block, null); } private static class TermQueryList extends QueryList { @@ -203,16 +220,22 @@ private TermQueryList( MappedFieldType field, SearchExecutionContext searchExecutionContext, Block block, - boolean onlySingleValues, + OnlySingleValueParams onlySingleValueParams, IntFunction blockValueReader ) { - super(field, searchExecutionContext, block, onlySingleValues); + super(field, searchExecutionContext, block, onlySingleValueParams); this.blockValueReader = blockValueReader; } @Override - public TermQueryList onlySingleValues() { - return new TermQueryList(field, searchExecutionContext, block, true, blockValueReader); + public TermQueryList onlySingleValues(Warnings warnings, String multiValueWarningMessage) { + return new TermQueryList( + field, + searchExecutionContext, + block, + new OnlySingleValueParams(warnings, multiValueWarningMessage), + blockValueReader + ); } @Override @@ -241,17 +264,22 @@ private GeoShapeQueryList( MappedFieldType field, SearchExecutionContext searchExecutionContext, Block block, - boolean onlySingleValues + OnlySingleValueParams onlySingleValueParams ) { - super(field, searchExecutionContext, block, onlySingleValues); + super(field, searchExecutionContext, block, onlySingleValueParams); this.blockValueReader = blockToGeometry(block); this.shapeQuery = shapeQuery(); } @Override - public GeoShapeQueryList onlySingleValues() { - return new GeoShapeQueryList(field, searchExecutionContext, block, true); + public GeoShapeQueryList onlySingleValues(Warnings warnings, String multiValueWarningMessage) { + return new GeoShapeQueryList( + field, + searchExecutionContext, + block, + new OnlySingleValueParams(warnings, multiValueWarningMessage) + ); } @Override @@ -295,4 +323,6 @@ private IntFunction shapeQuery() { throw new IllegalArgumentException("Unsupported field type for geo_match ENRICH: " + field.typeName()); } } + + protected record OnlySingleValueParams(Warnings warnings, String multiValueWarningMessage) {} } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/querydsl/query/SingleValueMatchQuery.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/querydsl/query/SingleValueMatchQuery.java index b948d0f409dbb..65ec5765e8731 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/querydsl/query/SingleValueMatchQuery.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/querydsl/query/SingleValueMatchQuery.java @@ -46,15 +46,14 @@ public final class SingleValueMatchQuery extends Query { * This avoids reporting warnings when queries are not matching multi-values */ private static final int MULTI_VALUE_MATCH_COST = 1000; - private static final IllegalArgumentException MULTI_VALUE_EXCEPTION = new IllegalArgumentException( - "single-value function encountered multi-value" - ); private final IndexFieldData fieldData; private final Warnings warnings; + private final String multiValueExceptionMessage; - public SingleValueMatchQuery(IndexFieldData fieldData, Warnings warnings) { + public SingleValueMatchQuery(IndexFieldData fieldData, Warnings warnings, String multiValueExceptionMessage) { this.fieldData = fieldData; this.warnings = warnings; + this.multiValueExceptionMessage = multiValueExceptionMessage; } @Override @@ -123,7 +122,7 @@ private ScorerSupplier scorerSupplier( return false; } if (sortedNumerics.docValueCount() != 1) { - warnings.registerException(MULTI_VALUE_EXCEPTION); + registerMultiValueException(); return false; } return true; @@ -158,7 +157,7 @@ private ScorerSupplier scorerSupplier( return false; } if (sortedSetDocValues.docValueCount() != 1) { - warnings.registerException(MULTI_VALUE_EXCEPTION); + registerMultiValueException(); return false; } return true; @@ -187,7 +186,7 @@ private ScorerSupplier scorerSupplier( return false; } if (sortedBinaryDocValues.docValueCount() != 1) { - warnings.registerException(MULTI_VALUE_EXCEPTION); + registerMultiValueException(); return false; } return true; @@ -267,6 +266,10 @@ public long cost() { } } + private void registerMultiValueException() { + warnings.registerException(IllegalArgumentException.class, multiValueExceptionMessage); + } + private static class PredicateScorerSupplier extends ScorerSupplier { private final float score; private final ScoreMode scoreMode; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java index a5061b8cf6d32..401fa0d14cd9f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java @@ -123,7 +123,7 @@ public void testQueryOperator() throws IOException { } }); DriverContext driverContext = driverContext(); - drivers.add(new Driver(driverContext, factory.get(driverContext), List.of(), docCollector, () -> {})); + drivers.add(new Driver("test", driverContext, factory.get(driverContext), List.of(), docCollector, () -> {})); } OperatorTestCase.runDriver(drivers); Set expectedDocIds = searchForDocIds(reader, query); @@ -197,14 +197,14 @@ public String toString() { ElementType.BYTES_REF, 0, gField, - List.of(CountAggregatorFunction.supplier(List.of(1)).groupingAggregatorFactory(INITIAL)), + List.of(CountAggregatorFunction.supplier().groupingAggregatorFactory(INITIAL, List.of(1))), randomPageSize(), driverContext ) ); operators.add( new HashAggregationOperator( - List.of(CountAggregatorFunction.supplier(List.of(1, 2)).groupingAggregatorFactory(FINAL)), + List.of(CountAggregatorFunction.supplier().groupingAggregatorFactory(FINAL, List.of(1, 2))), () -> BlockHash.build( List.of(new BlockHash.GroupSpec(0, ElementType.BYTES_REF)), driverContext.blockFactory(), @@ -215,6 +215,7 @@ public String toString() { ) ); Driver driver = new Driver( + "test", driverContext, luceneOperatorFactory(reader, new MatchAllDocsQuery(), LuceneOperator.NO_LIMIT).get(driverContext), operators, @@ -248,6 +249,7 @@ public void testLimitOperator() { DriverContext driverContext = driverContext(); try ( var driver = new Driver( + "test", driverContext, new SequenceLongBlockSourceOperator(driverContext.blockFactory(), values, 100), List.of((new LimitOperator.Factory(limit)).get(driverContext)), @@ -335,6 +337,7 @@ public void testHashLookup() { var actualPrimeOrds = new ArrayList<>(); try ( var driver = new Driver( + "test", driverContext, new SequenceLongBlockSourceOperator(driverContext.blockFactory(), values, 100), List.of( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java index 3eaf85c27e596..abac7a4cd47e3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java @@ -47,10 +47,10 @@ import static org.hamcrest.Matchers.hasSize; public abstract class AggregatorFunctionTestCase extends ForkingOperatorTestCase { - protected abstract AggregatorFunctionSupplier aggregatorFunction(List inputChannels); + protected abstract AggregatorFunctionSupplier aggregatorFunction(); protected final int aggregatorIntermediateBlockCount() { - try (var agg = aggregatorFunction(List.of()).aggregator(driverContext())) { + try (var agg = aggregatorFunction().aggregator(driverContext(), List.of())) { return agg.intermediateBlockCount(); } } @@ -69,8 +69,8 @@ private Operator.OperatorFactory simpleWithMode( Function wrap ) { List channels = mode.isInputPartial() ? range(0, aggregatorIntermediateBlockCount()).boxed().toList() : List.of(0); - AggregatorFunctionSupplier supplier = aggregatorFunction(channels); - Aggregator.Factory factory = wrap.apply(supplier).aggregatorFactory(mode); + AggregatorFunctionSupplier supplier = aggregatorFunction(); + Aggregator.Factory factory = wrap.apply(supplier).aggregatorFactory(mode, channels); return new AggregationOperator.AggregationOperatorFactory(List.of(factory), mode); } @@ -111,6 +111,7 @@ public final void testIgnoresNulls() { try ( Driver d = new Driver( + "test", driverContext, new NullInsertingSourceOperator(new CannedSourceOperator(input.iterator()), blockFactory), List.of(simple().get(driverContext)), @@ -223,7 +224,7 @@ public void testSomeFiltered() { // Returns an intermediate state that is equivalent to what the local execution planner will emit // if it determines that certain shards have no relevant data. List nullIntermediateState(BlockFactory blockFactory) { - try (var agg = aggregatorFunction(List.of()).aggregator(driverContext())) { + try (var agg = aggregatorFunction().aggregator(driverContext(), List.of())) { var method = agg.getClass().getMethod("intermediateStateDesc"); @SuppressWarnings("unchecked") List intermediateStateDescs = (List) method.invoke(null); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java index 452fa206a5590..e30082c843b19 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return CountAggregatorFunction.supplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return CountAggregatorFunction.supplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java index 1c0f3c4f64cb5..d91fdce409835 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctBooleanAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctBooleanAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java index c39fe32620ff9..f86c296878772 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java @@ -25,8 +25,8 @@ public class CountDistinctBooleanGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctBooleanAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctBooleanAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java index e8e51c2adf291..d01cbb39bf470 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java @@ -32,8 +32,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctBytesRefAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctBytesRefAggregatorFunctionSupplier(40000); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java index dd739d2189ba8..c430249ffceb4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java @@ -27,8 +27,8 @@ public class CountDistinctBytesRefGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctBytesRefAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctBytesRefAggregatorFunctionSupplier(40000); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java index a3e7a6a6d70f5..d1e845fba40ca 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java @@ -28,8 +28,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctDoubleAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctDoubleAggregatorFunctionSupplier(40000); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java index 7b6f928d57ddb..9b45c8dd6e50b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java @@ -26,8 +26,8 @@ public class CountDistinctDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctDoubleAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctDoubleAggregatorFunctionSupplier(40000); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionTests.java index bbd61455a3053..7c0d7c1e3d2fd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionTests.java @@ -28,8 +28,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctFloatAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctFloatAggregatorFunctionSupplier(40000); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunctionTests.java index 6b4a8f2900aaa..d536affb34a0d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunctionTests.java @@ -26,8 +26,8 @@ public class CountDistinctFloatGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctFloatAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctFloatAggregatorFunctionSupplier(40000); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java index 5bd9ecc931cf2..8657caafef409 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java @@ -34,8 +34,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctIntAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctIntAggregatorFunctionSupplier(40000); } @Override @@ -65,6 +65,7 @@ public void testRejectsDouble() { BlockFactory blockFactory = driverContext.blockFactory(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java index cfd3357a14c03..88f594b5a6d6d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java @@ -26,8 +26,8 @@ public class CountDistinctIntGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctIntAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctIntAggregatorFunctionSupplier(40000); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java index 70662efae688f..55f522f31b28a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java @@ -35,8 +35,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctLongAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctLongAggregatorFunctionSupplier(40000); } @Override @@ -66,6 +66,7 @@ public void testRejectsDouble() { BlockFactory blockFactory = driverContext.blockFactory(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java index 55be7fe9a8ed3..db08fd0428e7b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java @@ -25,8 +25,8 @@ public class CountDistinctLongGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new CountDistinctLongAggregatorFunctionSupplier(inputChannels, 40000); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new CountDistinctLongAggregatorFunctionSupplier(40000); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java index 06c267ff2d6ab..06a0666586290 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java @@ -25,8 +25,8 @@ public class CountGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return CountAggregatorFunction.supplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return CountAggregatorFunction.supplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java index 35ecced470e01..a4411d92c6c29 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java @@ -28,10 +28,10 @@ public class FilteredAggregatorFunctionTests extends AggregatorFunctionTestCase private final List unclosed = Collections.synchronizedList(new ArrayList<>()); @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + protected AggregatorFunctionSupplier aggregatorFunction() { return new FilteredAggregatorFunctionSupplier( - new SumIntAggregatorFunctionSupplier(inputChannels), - new FilteredGroupingAggregatorFunctionTests.AnyGreaterThanFactory(unclosed, inputChannels) + new SumIntAggregatorFunctionSupplier(), + new FilteredGroupingAggregatorFunctionTests.AnyGreaterThanFactory(unclosed, List.of(0)) ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java index 26971dc927cd1..efe7fccd4f06a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java @@ -34,10 +34,10 @@ public class FilteredGroupingAggregatorFunctionTests extends GroupingAggregatorF private final List unclosed = Collections.synchronizedList(new ArrayList<>()); @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + protected AggregatorFunctionSupplier aggregatorFunction() { return new FilteredAggregatorFunctionSupplier( - new SumIntAggregatorFunctionSupplier(inputChannels), - new AnyGreaterThanFactory(unclosed, inputChannels) + new SumIntAggregatorFunctionSupplier(), + new AnyGreaterThanFactory(unclosed, List.of(1)) ); } @@ -112,11 +112,12 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { */ public void testAddIntermediateRowInput() { DriverContext ctx = driverContext(); - AggregatorFunctionSupplier supplier = aggregatorFunction(channels(AggregatorMode.SINGLE)); + AggregatorFunctionSupplier supplier = aggregatorFunction(); + List channels = channels(AggregatorMode.SINGLE); Block[] results = new Block[2]; try ( - GroupingAggregatorFunction main = supplier.groupingAggregator(ctx); - GroupingAggregatorFunction leaf = supplier.groupingAggregator(ctx); + GroupingAggregatorFunction main = supplier.groupingAggregator(ctx, channels); + GroupingAggregatorFunction leaf = supplier.groupingAggregator(ctx, channels); SourceOperator source = simpleInput(ctx.blockFactory(), 10); ) { Page p; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java index ff96336dc0bb4..d82a8487b5390 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java @@ -59,10 +59,10 @@ * Shared tests for testing grouped aggregations. */ public abstract class GroupingAggregatorFunctionTestCase extends ForkingOperatorTestCase { - protected abstract AggregatorFunctionSupplier aggregatorFunction(List inputChannels); + protected abstract AggregatorFunctionSupplier aggregatorFunction(); protected final int aggregatorIntermediateBlockCount() { - try (var agg = aggregatorFunction(List.of()).groupingAggregator(driverContext())) { + try (var agg = aggregatorFunction().groupingAggregator(driverContext(), List.of())) { return agg.intermediateBlockCount(); } } @@ -98,14 +98,14 @@ private Operator.OperatorFactory simpleWithMode( ) { int emitChunkSize = between(100, 200); - AggregatorFunctionSupplier supplier = wrap.apply(aggregatorFunction(channels(mode))); + AggregatorFunctionSupplier supplier = wrap.apply(aggregatorFunction()); if (randomBoolean()) { supplier = chunkGroups(emitChunkSize, supplier); } return new HashAggregationOperator.HashAggregationOperatorFactory( List.of(new BlockHash.GroupSpec(0, ElementType.LONG)), mode, - List.of(supplier.groupingAggregatorFactory(mode)), + List.of(supplier.groupingAggregatorFactory(mode, channels(mode))), randomPageSize(), null ); @@ -619,14 +619,24 @@ protected static LongStream allLongs(Page page, Long group) { private AggregatorFunctionSupplier chunkGroups(int emitChunkSize, AggregatorFunctionSupplier supplier) { return new AggregatorFunctionSupplier() { @Override - public AggregatorFunction aggregator(DriverContext driverContext) { - return supplier.aggregator(driverContext); + public List nonGroupingIntermediateStateDesc() { + return supplier.nonGroupingIntermediateStateDesc(); } @Override - public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public List groupingIntermediateStateDesc() { + return supplier.groupingIntermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { + return supplier.aggregator(driverContext, channels); + } + + @Override + public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext, List channels) { return new GroupingAggregatorFunction() { - GroupingAggregatorFunction delegate = supplier.groupingAggregator(driverContext); + GroupingAggregatorFunction delegate = supplier.groupingAggregator(driverContext, channels); BitArray seenGroupIds = new BitArray(0, nonBreakingBigArrays()); @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionTests.java index 11119aade12ff..a7164740af009 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxBooleanAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxBooleanAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionTests.java index adc891a6a977d..54b82dcbc5008 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionTests.java @@ -31,8 +31,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxBytesRefAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxBytesRefAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunctionTests.java index 75a6a839ea62d..97d3126fa7673 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunctionTests.java @@ -40,8 +40,8 @@ protected DataType acceptedDataType() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxBytesRefAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxBytesRefAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java index 9d638fae4e822..fee5950c08257 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java index 18aec87a9d07b..0e7d716d2c0cd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java @@ -32,8 +32,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionTests.java index 5e14a99fd0fa2..a1f13566a069a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunctionTests.java index e4da581a59136..62fe712beb4e8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunctionTests.java @@ -33,8 +33,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java index af198e3aec9d5..5507e2c261e97 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java @@ -25,8 +25,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java index 372015ebd767c..da59a0f91ccdd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java @@ -23,8 +23,8 @@ public class MaxIntGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionTests.java index 84488b5115e5d..b39b5fe384961 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionTests.java @@ -32,8 +32,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxIpAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxIpAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunctionTests.java index 12e34fcf9a50e..2e6210c701367 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunctionTests.java @@ -42,8 +42,8 @@ protected DataType acceptedDataType() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxIpAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxIpAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java index 27a6fb0660461..081ef44f37047 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java index 1bf7cd9eea27d..6d6c37fb306a0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java @@ -23,8 +23,8 @@ public class MaxLongGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MaxLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MaxLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java index 1d105430ce1db..db9ab2c998103 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java @@ -29,8 +29,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java index a6ca769036e54..75305708bd933 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java @@ -46,8 +46,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionTests.java index 786603e12f9c8..fb70fa6385d74 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionTests.java @@ -29,8 +29,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MedianAbsoluteDeviationFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MedianAbsoluteDeviationFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunctionTests.java index 14416b3aec1ee..a0b5495d53bdd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunctionTests.java @@ -46,8 +46,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MedianAbsoluteDeviationFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MedianAbsoluteDeviationFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java index fa396d7dcf7a6..fca7ec47b05a5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java @@ -29,8 +29,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MedianAbsoluteDeviationIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MedianAbsoluteDeviationIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java index 8a8b051528195..d5e5f0869988b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java @@ -46,8 +46,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MedianAbsoluteDeviationIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MedianAbsoluteDeviationIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java index 0f570adfc6fd8..f700d4270f4d7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java @@ -29,8 +29,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MedianAbsoluteDeviationLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MedianAbsoluteDeviationLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java index 818150d3234aa..55895ceadd52c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java @@ -46,8 +46,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MedianAbsoluteDeviationLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MedianAbsoluteDeviationLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionTests.java index 74cdca31da34b..186d9edf2a9e0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinBooleanAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinBooleanAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionTests.java index b4383d6b0f56e..678f7259f7843 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionTests.java @@ -31,8 +31,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinBytesRefAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinBytesRefAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunctionTests.java index d4cfca819f3b7..7d099e7606843 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunctionTests.java @@ -40,8 +40,8 @@ protected DataType acceptedDataType() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinBytesRefAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinBytesRefAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java index e92b98ebf91d0..7f7095d13aa46 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java index 62cf954a1909e..756d19345aa9c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java @@ -31,8 +31,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionTests.java index 59a09569c65a2..ef98a2dd7b954 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunctionTests.java index be41e058f60da..9044732c1b8cc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunctionTests.java @@ -31,8 +31,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java index ffa2189f96b66..e7296a5b08f4d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java @@ -25,8 +25,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java index a7644c8bb26a9..d77b63bbb54c5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java @@ -23,8 +23,8 @@ public class MinIntGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionTests.java index 17e9812d2e4e8..9072702178316 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionTests.java @@ -32,8 +32,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinIpAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinIpAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunctionTests.java index f51662ffee352..86d7d0e961a1d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunctionTests.java @@ -42,8 +42,8 @@ protected DataType acceptedDataType() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinIpAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinIpAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java index 2ce7aab455c53..8bb82a149f45e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java @@ -26,8 +26,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java index 5591fb57a8f2d..da8a63a429200 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java @@ -23,8 +23,8 @@ public class MinLongGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new MinLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new MinLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java index b9ee31fb481f5..aa18c47733ff5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java @@ -32,8 +32,8 @@ public void initParameters() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new PercentileDoubleAggregatorFunctionSupplier(inputChannels, percentile); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new PercentileDoubleAggregatorFunctionSupplier(percentile); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java index d000fba1ee299..8a44fba3bfa18 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java @@ -33,8 +33,8 @@ public void initParameters() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new PercentileDoubleAggregatorFunctionSupplier(inputChannels, percentile); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new PercentileDoubleAggregatorFunctionSupplier(percentile); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionTests.java index da69e11734b36..d23436310cff7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionTests.java @@ -32,8 +32,8 @@ public void initParameters() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new PercentileFloatAggregatorFunctionSupplier(inputChannels, percentile); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new PercentileFloatAggregatorFunctionSupplier(percentile); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunctionTests.java index 917f6b6a0b643..c338dc38395f1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunctionTests.java @@ -33,8 +33,8 @@ public void initParameters() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new PercentileFloatAggregatorFunctionSupplier(inputChannels, percentile); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new PercentileFloatAggregatorFunctionSupplier(percentile); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java index 4b8ef49e09d97..278c37c02be35 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java @@ -31,8 +31,8 @@ public void initParameters() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new PercentileIntAggregatorFunctionSupplier(inputChannels, percentile); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new PercentileIntAggregatorFunctionSupplier(percentile); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java index 15cf0c9202527..ef8fa6eab85fe 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java @@ -33,8 +33,8 @@ public void initParameters() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new PercentileIntAggregatorFunctionSupplier(inputChannels, percentile); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new PercentileIntAggregatorFunctionSupplier(percentile); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java index 664fe1edc6ad9..b1896025c363e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java @@ -31,8 +31,8 @@ public void initParameters() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new PercentileLongAggregatorFunctionSupplier(inputChannels, percentile); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new PercentileLongAggregatorFunctionSupplier(percentile); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java index f09d395c877c6..55065129df0ce 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java @@ -33,8 +33,8 @@ public void initParameters() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new PercentileLongAggregatorFunctionSupplier(inputChannels, percentile); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new PercentileLongAggregatorFunctionSupplier(percentile); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java index 003dc415c6194..a64ec4e155ad0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java @@ -33,8 +33,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new SumDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new SumDoubleAggregatorFunctionSupplier(); } @Override @@ -53,6 +53,7 @@ public void testOverflowSucceeds() { List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(Double.MAX_VALUE - 1, 2)), List.of(simple().get(driverContext)), @@ -71,6 +72,7 @@ public void testSummationAccuracy() { List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new SequenceDoubleBlockSourceOperator( driverContext.blockFactory(), @@ -100,6 +102,7 @@ public void testSummationAccuracy() { driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(values)), List.of(simple().get(driverContext)), @@ -122,6 +125,7 @@ public void testSummationAccuracy() { driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(largeValues)), List.of(simple().get(driverContext)), @@ -141,6 +145,7 @@ public void testSummationAccuracy() { driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(largeValues)), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java index f982ee6cd58d6..8e6970ebdd109 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java @@ -31,8 +31,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new SumDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new SumDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java index 521c1e261cc62..11205907acb2d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java @@ -33,8 +33,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new SumFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new SumFloatAggregatorFunctionSupplier(); } @Override @@ -53,6 +53,7 @@ public void testOverflowSucceeds() { List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new SequenceFloatBlockSourceOperator(driverContext.blockFactory(), Stream.of(Float.MAX_VALUE - 1, 2f)), List.of(simple().get(driverContext)), @@ -71,6 +72,7 @@ public void testSummationAccuracy() { List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new SequenceFloatBlockSourceOperator( driverContext.blockFactory(), @@ -100,6 +102,7 @@ public void testSummationAccuracy() { driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceFloatBlockSourceOperator(driverContext.blockFactory(), Stream.of(values)), List.of(simple().get(driverContext)), @@ -122,6 +125,7 @@ public void testSummationAccuracy() { driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceFloatBlockSourceOperator(driverContext.blockFactory(), Stream.of(largeValues)), List.of(simple().get(driverContext)), @@ -141,6 +145,7 @@ public void testSummationAccuracy() { driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceFloatBlockSourceOperator(driverContext.blockFactory(), Stream.of(largeValues)), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunctionTests.java index 54bd92cbfff21..008b8a18a6b0e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunctionTests.java @@ -31,8 +31,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new SumFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new SumFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java index 8c5e4430128b7..6484382d5ff50 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java @@ -32,8 +32,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new SumIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new SumIntAggregatorFunctionSupplier(); } @Override @@ -52,6 +52,7 @@ public void testRejectsDouble() { BlockFactory blockFactory = driverContext.blockFactory(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java index 3dfa4e9332a08..d83357940d99f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java @@ -22,8 +22,8 @@ public class SumIntGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new SumIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new SumIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java index 00cdbedef54d6..c2b805291f4f6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java @@ -32,8 +32,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new SumLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new SumLongAggregatorFunctionSupplier(); } @Override @@ -51,6 +51,7 @@ public void testOverflowFails() { DriverContext driverContext = driverContext(); try ( Driver d = new Driver( + "test", driverContext, new SequenceLongBlockSourceOperator(driverContext.blockFactory(), LongStream.of(Long.MAX_VALUE - 1, 2)), List.of(simple().get(driverContext)), @@ -68,6 +69,7 @@ public void testRejectsDouble() { BlockFactory blockFactory = driverContext.blockFactory(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple().get(driverContext)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java index f41a5cbef94fb..f289686f8e844 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java @@ -22,8 +22,8 @@ public class SumLongGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new SumLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new SumLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionTests.java index 662b963d32473..cfb91acb1cf20 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionTests.java @@ -27,8 +27,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopBooleanAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new TopBooleanAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionTests.java index 732229c98f9c7..e1f38692877a2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionTests.java @@ -9,8 +9,6 @@ import org.apache.lucene.util.BytesRef; -import java.util.List; - public class TopBytesRefAggregatorFunctionTests extends AbstractTopBytesRefAggregatorFunctionTests { @Override protected BytesRef randomValue() { @@ -18,8 +16,8 @@ protected BytesRef randomValue() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopBytesRefAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new TopBytesRefAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunctionTests.java index 4932e1abef46d..0c27a5f386811 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunctionTests.java @@ -10,8 +10,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.type.DataType; -import java.util.List; - public class TopBytesRefGroupingAggregatorFunctionTests extends AbstractTopBytesRefGroupingAggregatorFunctionTests { @Override protected BytesRef randomValue() { @@ -19,8 +17,8 @@ protected BytesRef randomValue() { } @Override - protected final AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopBytesRefAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected final AggregatorFunctionSupplier aggregatorFunction() { + return new TopBytesRefAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionTests.java index 817df4ba47130..04c2000d2e2d7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionTests.java @@ -27,8 +27,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopDoubleAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new TopDoubleAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionTests.java index c565a13fb73d4..8dd2d5d82f815 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionTests.java @@ -27,8 +27,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopFloatAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new TopFloatAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionTests.java index a0ac1a685413e..b52439dc98263 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionTests.java @@ -27,8 +27,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopIntAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new TopIntAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionTests.java index 840e4cf9af961..c9ec81cb5981d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionTests.java @@ -10,8 +10,6 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; -import java.util.List; - public class TopIpAggregatorFunctionTests extends AbstractTopBytesRefAggregatorFunctionTests { @Override protected BytesRef randomValue() { @@ -19,8 +17,8 @@ protected BytesRef randomValue() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopIpAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new TopIpAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunctionTests.java index 02bf6b667192b..3c0577f7a1a99 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunctionTests.java @@ -11,8 +11,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.type.DataType; -import java.util.List; - public class TopIpGroupingAggregatorFunctionTests extends AbstractTopBytesRefGroupingAggregatorFunctionTests { @Override protected BytesRef randomValue() { @@ -20,8 +18,8 @@ protected BytesRef randomValue() { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopIpAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new TopIpAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionTests.java index cb42be67844dc..4ff27b092a183 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionTests.java @@ -27,8 +27,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopLongAggregatorFunctionSupplier(inputChannels, LIMIT, true); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new TopLongAggregatorFunctionSupplier(LIMIT, true); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionTests.java index c0a91fe22b87b..7c5e3f3861161 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionTests.java @@ -32,8 +32,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesBytesRefAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesBytesRefAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunctionTests.java index fc9bc90828df3..a1367bee53340 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunctionTests.java @@ -28,8 +28,8 @@ public class ValuesBytesRefGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesBytesRefAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesBytesRefAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionTests.java index e5bb8e3138e25..497813e058e67 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionTests.java @@ -28,8 +28,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunctionTests.java index a4b1a3c028e43..b89612a52c682 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunctionTests.java @@ -27,8 +27,8 @@ public class ValuesDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionTests.java index 67068ce10c997..0cf536d3e0eca 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionTests.java @@ -28,8 +28,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunctionTests.java index e25d7567a1933..7dc550abd4e49 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunctionTests.java @@ -27,8 +27,8 @@ public class ValuesFloatGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesFloatAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesFloatAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionTests.java index c60707046a0b1..9e4d56a962b2a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionTests.java @@ -28,8 +28,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunctionTests.java index 154b076d6a246..7368ed285ddb6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunctionTests.java @@ -27,8 +27,8 @@ public class ValuesIntGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesIntAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java index 933058d8d8e13..32609edd2b8fe 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java @@ -28,8 +28,8 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { } @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunctionTests.java index 8259d84d955ef..3180ac53f6efc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunctionTests.java @@ -27,8 +27,8 @@ public class ValuesLongGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new ValuesLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier aggregatorFunction() { + return new ValuesLongAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java index 914d29bb8ba25..42e9fc8deafc1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java @@ -416,6 +416,7 @@ public void testCategorize_withDriver() { List intermediateOutput = new ArrayList<>(); Driver driver = new Driver( + "test", driverContext, new LocalSourceOperator(input1), List.of( @@ -423,8 +424,8 @@ public void testCategorize_withDriver() { List.of(makeGroupSpec()), AggregatorMode.INITIAL, List.of( - new SumLongAggregatorFunctionSupplier(List.of(1)).groupingAggregatorFactory(AggregatorMode.INITIAL), - new MaxLongAggregatorFunctionSupplier(List.of(1)).groupingAggregatorFactory(AggregatorMode.INITIAL) + new SumLongAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.INITIAL, List.of(1)), + new MaxLongAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.INITIAL, List.of(1)) ), 16 * 1024, analysisRegistry @@ -436,6 +437,7 @@ public void testCategorize_withDriver() { runDriver(driver); driver = new Driver( + "test", driverContext, new LocalSourceOperator(input2), List.of( @@ -443,8 +445,8 @@ public void testCategorize_withDriver() { List.of(makeGroupSpec()), AggregatorMode.INITIAL, List.of( - new SumLongAggregatorFunctionSupplier(List.of(1)).groupingAggregatorFactory(AggregatorMode.INITIAL), - new MaxLongAggregatorFunctionSupplier(List.of(1)).groupingAggregatorFactory(AggregatorMode.INITIAL) + new SumLongAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.INITIAL, List.of(1)), + new MaxLongAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.INITIAL, List.of(1)) ), 16 * 1024, analysisRegistry @@ -458,6 +460,7 @@ public void testCategorize_withDriver() { List finalOutput = new ArrayList<>(); driver = new Driver( + "test", driverContext, new CannedSourceOperator(intermediateOutput.iterator()), List.of( @@ -465,8 +468,8 @@ public void testCategorize_withDriver() { List.of(makeGroupSpec()), AggregatorMode.FINAL, List.of( - new SumLongAggregatorFunctionSupplier(List.of(1, 2)).groupingAggregatorFactory(AggregatorMode.FINAL), - new MaxLongAggregatorFunctionSupplier(List.of(3, 4)).groupingAggregatorFactory(AggregatorMode.FINAL) + new SumLongAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.FINAL, List.of(1, 2)), + new MaxLongAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.FINAL, List.of(3, 4)) ), 16 * 1024, analysisRegistry diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java index 5f868f51f06e2..9c89317e4c359 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java @@ -137,13 +137,14 @@ public void testCategorize_withDriver() { List intermediateOutput = new ArrayList<>(); Driver driver = new Driver( + "test", driverContext, new LocalSourceOperator(input1), List.of( new HashAggregationOperator.HashAggregationOperatorFactory( groupSpecs, AggregatorMode.INITIAL, - List.of(new ValuesBytesRefAggregatorFunctionSupplier(List.of(0)).groupingAggregatorFactory(AggregatorMode.INITIAL)), + List.of(new ValuesBytesRefAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.INITIAL, List.of(0))), 16 * 1024, analysisRegistry ).get(driverContext) @@ -154,13 +155,14 @@ public void testCategorize_withDriver() { runDriver(driver); driver = new Driver( + "test", driverContext, new LocalSourceOperator(input2), List.of( new HashAggregationOperator.HashAggregationOperatorFactory( groupSpecs, AggregatorMode.INITIAL, - List.of(new ValuesBytesRefAggregatorFunctionSupplier(List.of(0)).groupingAggregatorFactory(AggregatorMode.INITIAL)), + List.of(new ValuesBytesRefAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.INITIAL, List.of(0))), 16 * 1024, analysisRegistry ).get(driverContext) @@ -173,13 +175,14 @@ public void testCategorize_withDriver() { List finalOutput = new ArrayList<>(); driver = new Driver( + "test", driverContext, new CannedSourceOperator(intermediateOutput.iterator()), List.of( new HashAggregationOperator.HashAggregationOperatorFactory( groupSpecs, AggregatorMode.FINAL, - List.of(new ValuesBytesRefAggregatorFunctionSupplier(List.of(2)).groupingAggregatorFactory(AggregatorMode.FINAL)), + List.of(new ValuesBytesRefAggregatorFunctionSupplier().groupingAggregatorFactory(AggregatorMode.FINAL, List.of(2))), 16 * 1024, analysisRegistry ).get(driverContext) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java index 78ed096c10b3f..2358643dc089e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java @@ -409,6 +409,42 @@ public final void testMergeThisBigger() { } } + public final void testMergePastEnd() { + int buckets = 10000; + int bucketSize = between(1, 1000); + int target = between(0, buckets); + List values = randomList(buckets, buckets, this::randomValue); + Collections.sort(values); + try (T sort = build(SortOrder.ASC, bucketSize)) { + // Add a single value to the main sort. + for (int b = 0; b < buckets; b++) { + collect(sort, values.get(b), b); + } + + try (T other = build(SortOrder.ASC, bucketSize)) { + // Add *all* values to the target bucket of the secondary sort. + for (int i = 0; i < values.size(); i++) { + if (i != target) { + collect(other, values.get(i), target); + } + } + + // Merge all buckets pairwise. Most of the secondary ones are empty. + for (int b = 0; b < buckets; b++) { + merge(sort, b, other, b); + } + } + + for (int b = 0; b < buckets; b++) { + if (b == target) { + assertBlock(sort, b, values.subList(0, bucketSize)); + } else { + assertBlock(sort, b, List.of(values.get(b))); + } + } + } + } + protected void assertBlock(T sort, int groupId, List values) { var blockFactory = TestBlockFactory.getNonBreakingInstance(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java index 1f5b5bf9b9337..61c7582c74245 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java @@ -151,7 +151,7 @@ private void testCount(Supplier contexts, int size, int limit) { int taskConcurrency = between(1, 8); for (int i = 0; i < taskConcurrency; i++) { DriverContext ctx = contexts.get(); - drivers.add(new Driver(ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); + drivers.add(new Driver("test", ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); } OperatorTestCase.runDriver(drivers); assertThat(results.size(), lessThanOrEqualTo(taskConcurrency)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxDoubleOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxDoubleOperatorTests.java index 4cb113457b23f..49d7e42e49df7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxDoubleOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxDoubleOperatorTests.java @@ -70,7 +70,7 @@ public void assertPage(Page page) { @Override public AggregatorFunction newAggregatorFunction(DriverContext context) { - return new MaxDoubleAggregatorFunctionSupplier(List.of(0, 1)).aggregator(context); + return new MaxDoubleAggregatorFunctionSupplier().aggregator(context, List.of(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxFloatOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxFloatOperatorTests.java index 4a009a2d84c66..7651cf5c0b876 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxFloatOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxFloatOperatorTests.java @@ -70,7 +70,7 @@ public void assertPage(Page page) { @Override public AggregatorFunction newAggregatorFunction(DriverContext context) { - return new MaxFloatAggregatorFunctionSupplier(List.of(0, 1)).aggregator(context); + return new MaxFloatAggregatorFunctionSupplier().aggregator(context, List.of(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxIntOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxIntOperatorTests.java index a6118481ca43d..f26274be6f810 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxIntOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxIntOperatorTests.java @@ -69,7 +69,7 @@ public void assertPage(Page page) { @Override public AggregatorFunction newAggregatorFunction(DriverContext context) { - return new MaxIntAggregatorFunctionSupplier(List.of(0, 1)).aggregator(context); + return new MaxIntAggregatorFunctionSupplier().aggregator(context, List.of(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxLongOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxLongOperatorTests.java index 894c8e862123e..ae096b5e3630c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxLongOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxLongOperatorTests.java @@ -69,7 +69,7 @@ public void assertPage(Page page) { @Override public AggregatorFunction newAggregatorFunction(DriverContext context) { - return new MaxLongAggregatorFunctionSupplier(List.of(0, 1)).aggregator(context); + return new MaxLongAggregatorFunctionSupplier().aggregator(context, List.of(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java index b65da5aba7588..f6fba20a28889 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java @@ -166,7 +166,7 @@ private void testMax(Supplier contexts, int size, int limit) { int taskConcurrency = between(1, 8); for (int i = 0; i < taskConcurrency; i++) { DriverContext ctx = contexts.get(); - drivers.add(new Driver(ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); + drivers.add(new Driver("test", ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); } OperatorTestCase.runDriver(drivers); assertThat(results.size(), lessThanOrEqualTo(taskConcurrency)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinDoubleOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinDoubleOperatorTests.java index 5fef2d4897030..ce212392ef888 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinDoubleOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinDoubleOperatorTests.java @@ -70,7 +70,7 @@ public void assertPage(Page page) { @Override public AggregatorFunction newAggregatorFunction(DriverContext context) { - return new MinDoubleAggregatorFunctionSupplier(List.of(0, 1)).aggregator(context); + return new MinDoubleAggregatorFunctionSupplier().aggregator(context, List.of(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinFloatOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinFloatOperatorTests.java index 41c8751c08a96..9500879f450b3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinFloatOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinFloatOperatorTests.java @@ -71,7 +71,7 @@ public void assertPage(Page page) { @Override public AggregatorFunction newAggregatorFunction(DriverContext context) { - return new MinFloatAggregatorFunctionSupplier(List.of(0, 1)).aggregator(context); + return new MinFloatAggregatorFunctionSupplier().aggregator(context, List.of(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinIntegerOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinIntegerOperatorTests.java index 5d2c867f4f660..e800619ef747d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinIntegerOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinIntegerOperatorTests.java @@ -69,7 +69,7 @@ public void assertPage(Page page) { @Override public AggregatorFunction newAggregatorFunction(DriverContext context) { - return new MinIntAggregatorFunctionSupplier(List.of(0, 1)).aggregator(context); + return new MinIntAggregatorFunctionSupplier().aggregator(context, List.of(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinLongOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinLongOperatorTests.java index 15c34f5853ae2..a20d90f1fcb5b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinLongOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinLongOperatorTests.java @@ -69,7 +69,7 @@ public void assertPage(Page page) { @Override public AggregatorFunction newAggregatorFunction(DriverContext context) { - return new MinLongAggregatorFunctionSupplier(List.of(0, 1)).aggregator(context); + return new MinLongAggregatorFunctionSupplier().aggregator(context, List.of(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java index f57bbd8c5ddb5..3033efa50f373 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java @@ -166,7 +166,7 @@ private void testMin(Supplier contexts, int size, int limit) { int taskConcurrency = between(1, 8); for (int i = 0; i < taskConcurrency; i++) { DriverContext ctx = contexts.get(); - drivers.add(new Driver(ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); + drivers.add(new Driver("test", ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); } OperatorTestCase.runDriver(drivers); assertThat(results.size(), lessThanOrEqualTo(taskConcurrency)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java index 54b33732aa425..4a628d596f142 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java @@ -210,6 +210,7 @@ private List runQuery(Set values, Query query, boolean shuffleDocs operators.add(new EvalOperator(blockFactory, luceneQueryEvaluator)); List results = new ArrayList<>(); Driver driver = new Driver( + "test", driverContext, luceneOperatorFactory(reader, new MatchAllDocsQuery(), LuceneOperator.NO_LIMIT, scoring).get(driverContext), operators, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java index b7114bb4e9b54..574f9b25ff146 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java @@ -160,7 +160,7 @@ private void testSimple(DriverContext ctx, int size, int limit) { List results = new ArrayList<>(); OperatorTestCase.runDriver( - new Driver(ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) + new Driver("test", ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) ); OperatorTestCase.assertDriverContext(ctx); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java index 20af40bcc6840..3af21ba37d088 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java @@ -127,7 +127,7 @@ protected void testSimple(DriverContext ctx, int size, int limit) { List results = new ArrayList<>(); OperatorTestCase.runDriver( - new Driver(ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) + new Driver("test", ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) ); OperatorTestCase.assertDriverContext(ctx); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java index a6d652d499d84..92eaa78eedcd7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java @@ -187,7 +187,7 @@ protected void testSimple(DriverContext ctx, int size, int limit) { List results = new ArrayList<>(); OperatorTestCase.runDriver( - new Driver(ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) + new Driver("test", ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) ); OperatorTestCase.assertDriverContext(ctx); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java index feba401d445e7..934fbcc0b897e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java @@ -174,6 +174,7 @@ record Doc(int host, long timestamp, long metric) {} var metricField = new NumberFieldMapper.NumberFieldType("metric", NumberFieldMapper.NumberType.LONG); OperatorTestCase.runDriver( new Driver( + "test", driverContext, timeSeriesFactory.get(driverContext), List.of(ValuesSourceReaderOperatorTests.factory(reader, metricField, ElementType.LONG).get(driverContext)), @@ -248,6 +249,7 @@ public void testMatchNone() throws Exception { List results = new ArrayList<>(); OperatorTestCase.runDriver( new Driver( + "test", driverContext, timeSeriesFactory.get(driverContext), List.of(), @@ -306,6 +308,7 @@ List runDriver(int limit, int maxPageSize, boolean forceMerge, int numTime var hostnameField = new KeywordFieldMapper.KeywordFieldType("hostname"); OperatorTestCase.runDriver( new Driver( + "test", ctx, timeSeriesFactory.get(ctx), List.of( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java index 910541607d83f..32164c7954dda 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java @@ -1299,6 +1299,7 @@ public void testWithNulls() throws IOException { var vsShardContext = new ValuesSourceReaderOperator.ShardContext(reader(indexKey), () -> SourceLoader.FROM_STORED_SOURCE); try ( Driver driver = new Driver( + "test", driverContext, luceneFactory.get(driverContext), List.of( @@ -1376,6 +1377,7 @@ public void testNullsShared() { int[] pages = new int[] { 0 }; try ( Driver d = new Driver( + "test", driverContext, simpleInput(driverContext, 10), List.of( @@ -1497,6 +1499,7 @@ protected final List drive(List operators, Iterator input, boolean success = false; try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(input), operators, @@ -1524,6 +1527,7 @@ public static void runDriver(List drivers) { for (int i = 0; i < dummyDrivers; i++) { drivers.add( new Driver( + "test", "dummy-session", 0, 0, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index 2661ff665831f..07a66a473f3b1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -1307,6 +1307,7 @@ public void testWithNulls() throws IOException { ); try ( Driver driver = new Driver( + "test", driverContext, luceneFactory.get(driverContext), List.of( @@ -1409,6 +1410,7 @@ public void testNullsShared() { int[] pages = new int[] { 0 }; try ( Driver d = new Driver( + "test", driverContext, simpleInput(driverContext.blockFactory(), 10), List.of( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java index 5e16fce2af00b..6fbef583cbefa 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java @@ -48,8 +48,8 @@ protected Operator.OperatorFactory simpleWithMode(AggregatorMode mode) { return new AggregationOperator.AggregationOperatorFactory( List.of( - new SumLongAggregatorFunctionSupplier(sumChannels).aggregatorFactory(mode), - new MaxLongAggregatorFunctionSupplier(maxChannels).aggregatorFactory(mode) + new SumLongAggregatorFunctionSupplier().aggregatorFactory(mode, sumChannels), + new MaxLongAggregatorFunctionSupplier().aggregatorFactory(mode, maxChannels) ), mode ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorStatusTests.java index ab2dcc5e6c443..1990728e128f0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorStatusTests.java @@ -39,31 +39,31 @@ protected AsyncOperator.Status mutateInstance(AsyncOperator.Status in) throws IO case 0 -> new AsyncOperator.Status( randomValueOtherThan(in.receivedPages(), ESTestCase::randomNonNegativeLong), in.completedPages(), - in.totalTimeInMillis() + in.procesNanos() ); case 1 -> new AsyncOperator.Status( in.receivedPages(), randomValueOtherThan(in.completedPages(), ESTestCase::randomNonNegativeLong), - in.totalTimeInMillis() + in.procesNanos() ); case 2 -> new AsyncOperator.Status( in.receivedPages(), in.completedPages(), - randomValueOtherThan(in.totalTimeInMillis(), ESTestCase::randomNonNegativeLong) + randomValueOtherThan(in.procesNanos(), ESTestCase::randomNonNegativeLong) ); default -> throw new AssertionError("unknown "); }; } public void testToXContent() { - var status = new AsyncOperator.Status(100, 50, TimeValue.timeValueSeconds(10).millis()); + var status = new AsyncOperator.Status(100, 50, TimeValue.timeValueNanos(10).nanos()); String json = Strings.toString(status, true, true); assertThat(json, equalTo(""" { + "process_nanos" : 10, + "process_time" : "10nanos", "received_pages" : 100, - "completed_pages" : 50, - "total_time_in_millis" : 10000, - "total_time" : "10s" + "completed_pages" : 50 }""")); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java index f017fed16cc96..acc62de0884c2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java @@ -112,7 +112,7 @@ protected Page createPage(int positionOffset, int length) { } }; int maxConcurrentRequests = randomIntBetween(1, 10); - AsyncOperator asyncOperator = new AsyncOperator(driverContext, maxConcurrentRequests) { + AsyncOperator asyncOperator = new AsyncOperator(driverContext, threadPool.getThreadContext(), maxConcurrentRequests) { final LookupService lookupService = new LookupService(threadPool, globalBlockFactory, dict, maxConcurrentRequests); @Override @@ -165,7 +165,14 @@ public void doClose() { } }); PlainActionFuture future = new PlainActionFuture<>(); - Driver driver = new Driver(driverContext, sourceOperator, intermediateOperators, outputOperator, () -> assertFalse(it.hasNext())); + Driver driver = new Driver( + "test", + driverContext, + sourceOperator, + intermediateOperators, + outputOperator, + () -> assertFalse(it.hasNext()) + ); Driver.start(threadPool.getThreadContext(), threadPool.executor(ESQL_TEST_EXECUTOR), driver, between(1, 10000), future); future.actionGet(); Releasables.close(localBreaker); @@ -175,7 +182,7 @@ class TestOp extends AsyncOperator { Map> handlers = new HashMap<>(); TestOp(DriverContext driverContext, int maxOutstandingRequests) { - super(driverContext, maxOutstandingRequests); + super(driverContext, threadPool.getThreadContext(), maxOutstandingRequests); } @Override @@ -255,7 +262,7 @@ public void testFailure() throws Exception { ); int maxConcurrentRequests = randomIntBetween(1, 10); AtomicBoolean failed = new AtomicBoolean(); - AsyncOperator asyncOperator = new AsyncOperator(driverContext, maxConcurrentRequests) { + AsyncOperator asyncOperator = new AsyncOperator(driverContext, threadPool.getThreadContext(), maxConcurrentRequests) { @Override protected void performAsync(Page inputPage, ActionListener listener) { ActionRunnable command = new ActionRunnable<>(listener) { @@ -295,7 +302,7 @@ protected void doClose() { }; SinkOperator outputOperator = new PageConsumerOperator(Page::releaseBlocks); PlainActionFuture future = new PlainActionFuture<>(); - Driver driver = new Driver(driverContext, sourceOperator, List.of(asyncOperator), outputOperator, localBreaker); + Driver driver = new Driver("test", driverContext, sourceOperator, List.of(asyncOperator), outputOperator, localBreaker); Driver.start(threadPool.getThreadContext(), threadPool.executor(ESQL_TEST_EXECUTOR), driver, between(1, 1000), future); assertBusy(() -> assertTrue(future.isDone())); if (failed.get()) { @@ -317,7 +324,7 @@ public void testIsFinished() { for (int i = 0; i < iters; i++) { DriverContext driverContext = new DriverContext(blockFactory.bigArrays(), blockFactory); CyclicBarrier barrier = new CyclicBarrier(2); - AsyncOperator asyncOperator = new AsyncOperator(driverContext, between(1, 10)) { + AsyncOperator asyncOperator = new AsyncOperator(driverContext, threadPool.getThreadContext(), between(1, 10)) { @Override protected void performAsync(Page inputPage, ActionListener listener) { ActionRunnable command = new ActionRunnable<>(listener) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ChangePointOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ChangePointOperatorTests.java new file mode 100644 index 0000000000000..3175bb06c5627 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ChangePointOperatorTests.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; +import org.hamcrest.Matcher; + +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class ChangePointOperatorTests extends OperatorTestCase { + + @Override + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + // size must be in [25, 1000] for ChangePoint to function correctly + // and detect the step change. + size = Math.clamp(size, 25, 1000); + List data = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + if (i <= size / 2) { + data.add(0L); + } else { + data.add(1L); + } + } + return new SequenceLongBlockSourceOperator(blockFactory, data); + } + + @Override + protected void assertSimpleOutput(List input, List results) { + boolean seenOne = false; + assertThat(results, hasSize(input.size())); + for (int i = 0; i < results.size(); i++) { + Page inputPage = input.get(i); + Page resultPage = results.get(i); + assertThat(resultPage.getPositionCount(), equalTo(inputPage.getPositionCount())); + assertThat(resultPage.getBlockCount(), equalTo(3)); + for (int j = 0; j < resultPage.getPositionCount(); j++) { + long inputValue = ((LongBlock) resultPage.getBlock(0)).getLong(j); + long resultValue = ((LongBlock) resultPage.getBlock(0)).getLong(j); + assertThat(resultValue, equalTo(inputValue)); + if (seenOne == false && resultValue == 1L) { + BytesRef type = ((BytesRefBlock) resultPage.getBlock(1)).getBytesRef(j, new BytesRef()); + double pvalue = ((DoubleBlock) resultPage.getBlock(2)).getDouble(j); + assertThat(type.utf8ToString(), equalTo("step_change")); + assertThat(pvalue, equalTo(0.0)); + seenOne = true; + } else { + assertThat(resultPage.getBlock(1).isNull(j), equalTo(true)); + assertThat(resultPage.getBlock(2).isNull(j), equalTo(true)); + } + } + } + assertThat(seenOne, equalTo(true)); + } + + @Override + protected Operator.OperatorFactory simple() { + return new ChangePointOperator.Factory(0, null, 0, 0); + } + + @Override + protected Matcher expectedDescriptionOfSimple() { + return equalTo("ChangePointOperator[channel=0]"); + } + + @Override + protected Matcher expectedToStringOfSimple() { + return equalTo("ChangePointOperator[channel=0]"); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java index 27083ea0fcd13..a39aa10af5f31 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java @@ -27,6 +27,7 @@ public class DriverProfileTests extends AbstractWireSerializingTestCase { public void testToXContent() { DriverProfile status = new DriverProfile( + "test", 123413220000L, 123413243214L, 10012, @@ -44,6 +45,7 @@ public void testToXContent() { ); assertThat(Strings.toString(status, true, true), equalTo(""" { + "task_description" : "test", "start" : "1973-11-29T09:27:00.000Z", "start_millis" : 123413220000, "stop" : "1973-11-29T09:27:23.214Z", @@ -101,6 +103,7 @@ protected Writeable.Reader instanceReader() { @Override protected DriverProfile createTestInstance() { return new DriverProfile( + DriverStatusTests.randomTaskDescription(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), @@ -113,6 +116,7 @@ protected DriverProfile createTestInstance() { @Override protected DriverProfile mutateInstance(DriverProfile instance) throws IOException { + String taskDescription = instance.taskDescription(); long startMillis = instance.startMillis(); long stopMillis = instance.stopMillis(); long tookNanos = instance.tookNanos(); @@ -120,17 +124,18 @@ protected DriverProfile mutateInstance(DriverProfile instance) throws IOExceptio long iterations = instance.iterations(); var operators = instance.operators(); var sleeps = instance.sleeps(); - switch (between(0, 6)) { - case 0 -> startMillis = randomValueOtherThan(startMillis, ESTestCase::randomNonNegativeLong); - case 1 -> stopMillis = randomValueOtherThan(startMillis, ESTestCase::randomNonNegativeLong); - case 2 -> tookNanos = randomValueOtherThan(tookNanos, ESTestCase::randomNonNegativeLong); - case 3 -> cpuNanos = randomValueOtherThan(cpuNanos, ESTestCase::randomNonNegativeLong); - case 4 -> iterations = randomValueOtherThan(iterations, ESTestCase::randomNonNegativeLong); - case 5 -> operators = randomValueOtherThan(operators, DriverStatusTests::randomOperatorStatuses); - case 6 -> sleeps = randomValueOtherThan(sleeps, DriverSleepsTests::randomDriverSleeps); + switch (between(0, 7)) { + case 0 -> taskDescription = randomValueOtherThan(taskDescription, DriverStatusTests::randomTaskDescription); + case 1 -> startMillis = randomValueOtherThan(startMillis, ESTestCase::randomNonNegativeLong); + case 2 -> stopMillis = randomValueOtherThan(startMillis, ESTestCase::randomNonNegativeLong); + case 3 -> tookNanos = randomValueOtherThan(tookNanos, ESTestCase::randomNonNegativeLong); + case 4 -> cpuNanos = randomValueOtherThan(cpuNanos, ESTestCase::randomNonNegativeLong); + case 5 -> iterations = randomValueOtherThan(iterations, ESTestCase::randomNonNegativeLong); + case 6 -> operators = randomValueOtherThan(operators, DriverStatusTests::randomOperatorStatuses); + case 7 -> sleeps = randomValueOtherThan(sleeps, DriverSleepsTests::randomDriverSleeps); default -> throw new UnsupportedOperationException(); } - return new DriverProfile(startMillis, stopMillis, tookNanos, cpuNanos, iterations, operators, sleeps); + return new DriverProfile(taskDescription, startMillis, stopMillis, tookNanos, cpuNanos, iterations, operators, sleeps); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverSchedulerTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverSchedulerTests.java new file mode 100644 index 0000000000000..ec6bf38e557a9 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverSchedulerTests.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.FixedExecutorBuilder; +import org.elasticsearch.threadpool.TestThreadPool; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Executor; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.hamcrest.Matchers.equalTo; + +public class DriverSchedulerTests extends ESTestCase { + + public void testClearPendingTaskOnRejection() { + DriverScheduler scheduler = new DriverScheduler(); + AtomicInteger counter = new AtomicInteger(); + var threadPool = new TestThreadPool( + "test", + new FixedExecutorBuilder(Settings.EMPTY, "test", 1, 2, "test", EsExecutors.TaskTrackingConfig.DEFAULT) + ); + CountDownLatch latch = new CountDownLatch(1); + Executor executor = threadPool.executor("test"); + try { + for (int i = 0; i < 10; i++) { + try { + executor.execute(() -> safeAwait(latch)); + } catch (EsRejectedExecutionException e) { + break; + } + } + scheduler.scheduleOrRunTask(executor, new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + counter.incrementAndGet(); + } + + @Override + protected void doRun() { + counter.incrementAndGet(); + } + }); + scheduler.runPendingTasks(); + assertThat(counter.get(), equalTo(1)); + } finally { + latch.countDown(); + terminate(threadPool); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java index b46d9f3f4add7..83deb57a3ba7c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java @@ -32,6 +32,7 @@ public class DriverStatusTests extends AbstractWireSerializingTestCase instanceReader() { protected DriverStatus createTestInstance() { return new DriverStatus( randomSessionId(), + randomTaskDescription(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), @@ -129,6 +132,10 @@ private String randomSessionId() { return RandomStrings.randomAsciiLettersOfLengthBetween(random(), 1, 15); } + public static String randomTaskDescription() { + return RandomStrings.randomAsciiLettersOfLength(random(), 5); + } + private DriverStatus.Status randomStatus() { return randomFrom(DriverStatus.Status.values()); } @@ -150,6 +157,7 @@ private static DriverStatus.OperatorStatus randomOperatorStatus() { @Override protected DriverStatus mutateInstance(DriverStatus instance) throws IOException { var sessionId = instance.sessionId(); + var taskDescription = instance.taskDescription(); long started = instance.started(); long lastUpdated = instance.lastUpdated(); long cpuNanos = instance.cpuNanos(); @@ -158,19 +166,31 @@ protected DriverStatus mutateInstance(DriverStatus instance) throws IOException var completedOperators = instance.completedOperators(); var activeOperators = instance.activeOperators(); var sleeps = instance.sleeps(); - switch (between(0, 8)) { + switch (between(0, 9)) { case 0 -> sessionId = randomValueOtherThan(sessionId, this::randomSessionId); - case 1 -> started = randomValueOtherThan(started, ESTestCase::randomNonNegativeLong); - case 2 -> lastUpdated = randomValueOtherThan(lastUpdated, ESTestCase::randomNonNegativeLong); - case 3 -> cpuNanos = randomValueOtherThan(cpuNanos, ESTestCase::randomNonNegativeLong); - case 4 -> iterations = randomValueOtherThan(iterations, ESTestCase::randomNonNegativeLong); - case 5 -> status = randomValueOtherThan(status, this::randomStatus); - case 6 -> completedOperators = randomValueOtherThan(completedOperators, DriverStatusTests::randomOperatorStatuses); - case 7 -> activeOperators = randomValueOtherThan(activeOperators, DriverStatusTests::randomOperatorStatuses); - case 8 -> sleeps = randomValueOtherThan(sleeps, DriverSleepsTests::randomDriverSleeps); + case 1 -> taskDescription = randomValueOtherThan(taskDescription, DriverStatusTests::randomTaskDescription); + case 2 -> started = randomValueOtherThan(started, ESTestCase::randomNonNegativeLong); + case 3 -> lastUpdated = randomValueOtherThan(lastUpdated, ESTestCase::randomNonNegativeLong); + case 4 -> cpuNanos = randomValueOtherThan(cpuNanos, ESTestCase::randomNonNegativeLong); + case 5 -> iterations = randomValueOtherThan(iterations, ESTestCase::randomNonNegativeLong); + case 6 -> status = randomValueOtherThan(status, this::randomStatus); + case 7 -> completedOperators = randomValueOtherThan(completedOperators, DriverStatusTests::randomOperatorStatuses); + case 8 -> activeOperators = randomValueOtherThan(activeOperators, DriverStatusTests::randomOperatorStatuses); + case 9 -> sleeps = randomValueOtherThan(sleeps, DriverSleepsTests::randomDriverSleeps); default -> throw new UnsupportedOperationException(); } - return new DriverStatus(sessionId, started, lastUpdated, cpuNanos, iterations, status, completedOperators, activeOperators, sleeps); + return new DriverStatus( + sessionId, + taskDescription, + started, + lastUpdated, + cpuNanos, + iterations, + status, + completedOperators, + activeOperators, + sleeps + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java index e715b94bc55e5..35ccf0da42963 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java @@ -67,6 +67,7 @@ public void testProfileAndStatusFinishInOneRound() { Driver driver = new Driver( "unset", + "test", startEpoch, startNanos, driverContext, @@ -116,6 +117,7 @@ public void testProfileAndStatusOneIterationAtATime() { Driver driver = new Driver( "unset", + "test", startEpoch, startNanos, driverContext, @@ -166,6 +168,7 @@ public void testProfileAndStatusTimeout() { Driver driver = new Driver( "unset", + "test", startEpoch, startNanos, driverContext, @@ -231,7 +234,7 @@ public void testThreadContext() throws Exception { WarningsOperator warning1 = new WarningsOperator(threadPool); WarningsOperator warning2 = new WarningsOperator(threadPool); CyclicBarrier allPagesProcessed = new CyclicBarrier(2); - Driver driver = new Driver(driverContext, new CannedSourceOperator(inPages.iterator()) { + Driver driver = new Driver("test", driverContext, new CannedSourceOperator(inPages.iterator()) { @Override public Page getOutput() { assertRunningWithRegularUser(threadPool); @@ -315,7 +318,7 @@ public void close() { } }); - Driver driver = new Driver(driverContext, sourceOperator, List.of(delayOperator), sinkOperator, () -> {}); + Driver driver = new Driver("test", driverContext, sourceOperator, List.of(delayOperator), sinkOperator, () -> {}); ThreadContext threadContext = threadPool.getThreadContext(); PlainActionFuture future = new PlainActionFuture<>(); @@ -331,12 +334,11 @@ public void testResumeOnEarlyFinish() throws Exception { DriverContext driverContext = driverContext(); ThreadPool threadPool = threadPool(); try { - PlainActionFuture sourceFuture = new PlainActionFuture<>(); - var sourceHandler = new ExchangeSourceHandler(between(1, 5), threadPool.executor("esql"), sourceFuture); + var sourceHandler = new ExchangeSourceHandler(between(1, 5), threadPool.executor("esql")); var sinkHandler = new ExchangeSinkHandler(driverContext.blockFactory(), between(1, 5), System::currentTimeMillis); var sourceOperator = new ExchangeSourceOperator(sourceHandler.createExchangeSource()); var sinkOperator = new ExchangeSinkOperator(sinkHandler.createExchangeSink(() -> {}), Function.identity()); - Driver driver = new Driver(driverContext, sourceOperator, List.of(), sinkOperator, () -> {}); + Driver driver = new Driver("test", driverContext, sourceOperator, List.of(), sinkOperator, () -> {}); PlainActionFuture future = new PlainActionFuture<>(); Driver.start(threadPool.getThreadContext(), threadPool.executor("esql"), driver, between(1, 1000), future); assertBusy( @@ -348,7 +350,6 @@ public void testResumeOnEarlyFinish() throws Exception { sinkHandler.fetchPageAsync(true, ActionListener.noop()); future.actionGet(5, TimeUnit.SECONDS); assertThat(driver.status().status(), equalTo(DriverStatus.Status.DONE)); - sourceFuture.actionGet(5, TimeUnit.SECONDS); } finally { terminate(threadPool); } @@ -376,7 +377,7 @@ static class SwitchContextOperator extends AsyncOperator { private final ThreadPool threadPool; SwitchContextOperator(DriverContext driverContext, ThreadPool threadPool) { - super(driverContext, between(1, 3)); + super(driverContext, threadPool.getThreadContext(), between(1, 3)); this.threadPool = threadPool; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FailureCollectorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FailureCollectorTests.java index 5fec82b32ddac..4007d4d433f5e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FailureCollectorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FailureCollectorTests.java @@ -8,10 +8,12 @@ package org.elasticsearch.compute.operator; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.NodeDisconnectedException; @@ -106,13 +108,28 @@ public void testEmpty() { public void testTransportExceptions() { FailureCollector collector = new FailureCollector(5); collector.unwrapAndCollect(new NodeDisconnectedException(DiscoveryNodeUtils.builder("node-1").build(), "/field_caps")); - collector.unwrapAndCollect(new TransportException(new CircuitBreakingException("too large", CircuitBreaker.Durability.TRANSIENT))); + collector.unwrapAndCollect(new TransportException(new IOException("disk issue"))); Exception failure = collector.getFailure(); assertNotNull(failure); assertThat(failure, instanceOf(NodeDisconnectedException.class)); assertThat(failure.getMessage(), equalTo("[][0.0.0.0:1][/field_caps] disconnected")); Throwable[] suppressed = failure.getSuppressed(); assertThat(suppressed, arrayWithSize(1)); - assertThat(suppressed[0], instanceOf(CircuitBreakingException.class)); + assertThat(suppressed[0], instanceOf(IOException.class)); + } + + public void testErrorCategory() { + FailureCollector collector = new FailureCollector(5); + collector.unwrapAndCollect(new NoShardAvailableActionException(new ShardId("test", "n/a", 1), "not ready")); + collector.unwrapAndCollect( + new TransportException(new CircuitBreakingException("request is too large", CircuitBreaker.Durability.TRANSIENT)) + ); + Exception failure = collector.getFailure(); + assertNotNull(failure); + assertThat(failure, instanceOf(CircuitBreakingException.class)); + assertThat(failure.getMessage(), equalTo("request is too large")); + assertThat(failure.getSuppressed(), arrayWithSize(1)); + assertThat(failure.getSuppressed()[0], instanceOf(NoShardAvailableActionException.class)); + assertThat(failure.getSuppressed()[0].getMessage(), equalTo("not ready")); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java index 744121a3807c3..f08552913963d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java @@ -68,6 +68,7 @@ public final void testInitialFinal() { List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(input.iterator()), List.of(simpleWithMode(AggregatorMode.INITIAL).get(driverContext), simpleWithMode(AggregatorMode.FINAL).get(driverContext)), @@ -89,6 +90,7 @@ public final void testManyInitialFinal() { List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(partials.iterator()), List.of(simpleWithMode(AggregatorMode.FINAL).get(driverContext)), @@ -110,6 +112,7 @@ public final void testInitialIntermediateFinal() { try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(input.iterator()), List.of( @@ -142,6 +145,7 @@ public final void testManyInitialManyPartialFinal() { List results = new ArrayList<>(); try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(intermediates.iterator()), List.of(simpleWithMode(AggregatorMode.FINAL).get(driverContext)), @@ -212,11 +216,7 @@ List createDriversForInput(List input, List results, boolean randomIntBetween(2, 10), threadPool.relativeTimeInMillisSupplier() ); - ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler( - randomIntBetween(1, 4), - threadPool.executor(ESQL_TEST_EXECUTOR), - ActionListener.noop() - ); + ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler(randomIntBetween(1, 4), threadPool.executor(ESQL_TEST_EXECUTOR)); sourceExchanger.addRemoteSink( sinkExchanger::fetchPageAsync, randomBoolean(), @@ -240,6 +240,7 @@ List createDriversForInput(List input, List results, boolean DriverContext driver1Context = driverContext(); drivers.add( new Driver( + "test", driver1Context, new CannedSourceOperator(pages.iterator()), List.of( @@ -257,6 +258,7 @@ List createDriversForInput(List input, List results, boolean DriverContext driver2Context = driverContext(); drivers.add( new Driver( + "test", driver2Context, new ExchangeSourceOperator(sourceExchanger.createExchangeSource()), List.of( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java index 953c7d1c313f1..30579f864abcb 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java @@ -56,8 +56,8 @@ protected Operator.OperatorFactory simpleWithMode(AggregatorMode mode) { List.of(new BlockHash.GroupSpec(0, ElementType.LONG)), mode, List.of( - new SumLongAggregatorFunctionSupplier(sumChannels).groupingAggregatorFactory(mode), - new MaxLongAggregatorFunctionSupplier(maxChannels).groupingAggregatorFactory(mode) + new SumLongAggregatorFunctionSupplier().groupingAggregatorFactory(mode, sumChannels), + new MaxLongAggregatorFunctionSupplier().groupingAggregatorFactory(mode, maxChannels) ), randomPageSize(), null diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java index afd4695db932f..103a6a35651c7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java @@ -41,6 +41,7 @@ import static org.elasticsearch.compute.lucene.TimeSeriesSortedSourceOperatorTests.createTimeSeriesSourceOperator; import static org.elasticsearch.compute.lucene.TimeSeriesSortedSourceOperatorTests.writeTS; +import static org.elasticsearch.compute.operator.TimeSeriesAggregationOperatorFactories.SupplierWithChannels; import static org.hamcrest.Matchers.equalTo; public class TimeSeriesAggregationOperatorTests extends ComputeTestCase { @@ -269,7 +270,7 @@ public void close() { 1, 3, IntStream.range(0, nonBucketGroupings.size()).mapToObj(n -> new BlockHash.GroupSpec(5 + n, ElementType.BYTES_REF)).toList(), - List.of(new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis)), + List.of(new SupplierWithChannels(new RateLongAggregatorFunctionSupplier(unitInMillis), List.of(4, 2))), List.of(), between(1, 100) ).get(ctx); @@ -279,7 +280,7 @@ public void close() { 0, 1, IntStream.range(0, nonBucketGroupings.size()).mapToObj(n -> new BlockHash.GroupSpec(5 + n, ElementType.BYTES_REF)).toList(), - List.of(new RateLongAggregatorFunctionSupplier(List.of(2, 3, 4), unitInMillis)), + List.of(new SupplierWithChannels(new RateLongAggregatorFunctionSupplier(unitInMillis), List.of(2, 3, 4))), List.of(), between(1, 100) ).get(ctx); @@ -295,7 +296,7 @@ public void close() { } Operator finalAgg = new TimeSeriesAggregationOperatorFactories.Final( finalGroups, - List.of(new SumDoubleAggregatorFunctionSupplier(List.of(2))), + List.of(new SupplierWithChannels(new SumDoubleAggregatorFunctionSupplier(), List.of(2))), List.of(), between(1, 100) ).get(ctx); @@ -303,6 +304,7 @@ public void close() { List results = new ArrayList<>(); OperatorTestCase.runDriver( new Driver( + "test", ctx, sourceOperatorFactory.get(ctx), CollectionUtils.concatLists(intermediateOperators, List.of(intialAgg, intermediateAgg, finalAgg)), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeBufferTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeBufferTests.java index bd5b53fb25c8b..7213e0b27aea0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeBufferTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeBufferTests.java @@ -66,6 +66,25 @@ public void testDrainPages() throws Exception { blockFactory.ensureAllBlocksAreReleased(); } + public void testOutstandingPages() throws Exception { + ExchangeBuffer buffer = new ExchangeBuffer(randomIntBetween(1000, 10000)); + var blockFactory = blockFactory(); + Page p1 = randomPage(blockFactory); + Page p2 = randomPage(blockFactory); + buffer.addPage(p1); + buffer.addPage(p2); + buffer.finish(false); + buffer.addPage(randomPage(blockFactory)); + assertThat(buffer.size(), equalTo(2)); + assertSame(buffer.pollPage(), p1); + p1.releaseBlocks(); + assertSame(buffer.pollPage(), p2); + p2.releaseBlocks(); + assertNull(buffer.pollPage()); + assertTrue(buffer.isFinished()); + blockFactory.ensureAllBlocksAreReleased(); + } + private static MockBlockFactory blockFactory() { BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index fffeeac4e4cc2..57dfe65ca485f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -7,15 +7,18 @@ package org.elasticsearch.compute.operator.exchange; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.node.VersionInformation; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -23,6 +26,7 @@ import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.compute.EsqlRefCountingListener; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.data.IntBlock; @@ -37,6 +41,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancellationService; +import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.test.transport.StubbableTransport; @@ -69,6 +74,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; public class ExchangeServiceTests extends ESTestCase { @@ -100,16 +106,16 @@ public void testBasic() throws Exception { AtomicInteger pagesAddedToSink = new AtomicInteger(); ExchangeSink sink1 = sinkExchanger.createExchangeSink(pagesAddedToSink::incrementAndGet); ExchangeSink sink2 = sinkExchanger.createExchangeSink(pagesAddedToSink::incrementAndGet); - PlainActionFuture sourceCompletion = new PlainActionFuture<>(); - ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler(3, threadPool.executor(ESQL_TEST_EXECUTOR), sourceCompletion); + ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler(3, threadPool.executor(ESQL_TEST_EXECUTOR)); ExchangeSource source = sourceExchanger.createExchangeSource(); AtomicInteger pagesAddedToSource = new AtomicInteger(); + PlainActionFuture remoteSinkFuture = new PlainActionFuture<>(); sourceExchanger.addRemoteSink( sinkExchanger::fetchPageAsync, randomBoolean(), pagesAddedToSource::incrementAndGet, 1, - ActionListener.noop() + remoteSinkFuture ); SubscribableListener waitForReading = source.waitForReading().listener(); assertFalse(waitForReading.isDone()); @@ -156,13 +162,12 @@ public void testBasic() throws Exception { sink2.finish(); assertTrue(sink2.isFinished()); assertTrue(source.isFinished()); - assertFalse(sourceCompletion.isDone()); source.finish(); - sourceCompletion.actionGet(10, TimeUnit.SECONDS); ESTestCase.terminate(threadPool); for (Page page : pages) { page.releaseBlocks(); } + safeGet(remoteSinkFuture); } /** @@ -300,6 +305,7 @@ Set runConcurrentTest( DriverContext dc = driverContext(); Driver d = new Driver( "test-session:1", + "test", 0, 0, dc, @@ -318,6 +324,7 @@ Set runConcurrentTest( DriverContext dc = driverContext(); Driver d = new Driver( "test-session:2", + "test", 0, 0, dc, @@ -343,47 +350,45 @@ protected void start(Driver driver, ActionListener listener) { public void testConcurrentWithHandlers() { BlockFactory blockFactory = blockFactory(); - PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); - var sourceExchanger = new ExchangeSourceHandler( - randomExchangeBuffer(), - threadPool.executor(ESQL_TEST_EXECUTOR), - sourceCompletionFuture - ); - List sinkHandlers = new ArrayList<>(); - Supplier exchangeSink = () -> { - final ExchangeSinkHandler sinkHandler; - if (sinkHandlers.isEmpty() == false && randomBoolean()) { - sinkHandler = randomFrom(sinkHandlers); - } else { - sinkHandler = new ExchangeSinkHandler(blockFactory, randomExchangeBuffer(), threadPool.relativeTimeInMillisSupplier()); - sourceExchanger.addRemoteSink( - sinkHandler::fetchPageAsync, - randomBoolean(), - () -> {}, - randomIntBetween(1, 3), - ActionListener.noop() - ); - sinkHandlers.add(sinkHandler); - } - return sinkHandler.createExchangeSink(() -> {}); - }; - final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); - final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); - Set actualSeqNos = runConcurrentTest(maxInputSeqNo, maxOutputSeqNo, sourceExchanger::createExchangeSource, exchangeSink); - var expectedSeqNos = IntStream.range(0, Math.min(maxInputSeqNo, maxOutputSeqNo)).boxed().collect(Collectors.toSet()); - assertThat(actualSeqNos, hasSize(expectedSeqNos.size())); - assertThat(actualSeqNos, equalTo(expectedSeqNos)); - sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); + var sourceExchanger = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); + PlainActionFuture remoteSinksFuture = new PlainActionFuture<>(); + try (RefCountingListener refs = new RefCountingListener(remoteSinksFuture)) { + List sinkHandlers = new ArrayList<>(); + Supplier exchangeSink = () -> { + final ExchangeSinkHandler sinkHandler; + if (sinkHandlers.isEmpty() == false && randomBoolean()) { + sinkHandler = randomFrom(sinkHandlers); + } else { + sinkHandler = new ExchangeSinkHandler(blockFactory, randomExchangeBuffer(), threadPool.relativeTimeInMillisSupplier()); + sourceExchanger.addRemoteSink( + sinkHandler::fetchPageAsync, + randomBoolean(), + () -> {}, + randomIntBetween(1, 3), + refs.acquire() + ); + sinkHandlers.add(sinkHandler); + } + return sinkHandler.createExchangeSink(() -> {}); + }; + final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); + final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); + Set actualSeqNos = runConcurrentTest( + maxInputSeqNo, + maxOutputSeqNo, + sourceExchanger::createExchangeSource, + exchangeSink + ); + var expectedSeqNos = IntStream.range(0, Math.min(maxInputSeqNo, maxOutputSeqNo)).boxed().collect(Collectors.toSet()); + assertThat(actualSeqNos, hasSize(expectedSeqNos.size())); + assertThat(actualSeqNos, equalTo(expectedSeqNos)); + } + safeGet(remoteSinksFuture); } public void testExchangeSourceContinueOnFailure() { BlockFactory blockFactory = blockFactory(); - PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); - var exchangeSourceHandler = new ExchangeSourceHandler( - randomExchangeBuffer(), - threadPool.executor(ESQL_TEST_EXECUTOR), - sourceCompletionFuture - ); + var exchangeSourceHandler = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); Set expectedSeqNos = ConcurrentCollections.newConcurrentSet(); @@ -391,57 +396,65 @@ public void testExchangeSourceContinueOnFailure() { AtomicInteger totalSinks = new AtomicInteger(); AtomicInteger failedSinks = new AtomicInteger(); AtomicInteger completedSinks = new AtomicInteger(); - Supplier exchangeSink = () -> { - var sinkHandler = new ExchangeSinkHandler(blockFactory, randomExchangeBuffer(), threadPool.relativeTimeInMillisSupplier()); - int failAfter = randomBoolean() ? Integer.MAX_VALUE : randomIntBetween(0, 100); - AtomicInteger fetched = new AtomicInteger(); - int instance = randomIntBetween(1, 3); - totalSinks.incrementAndGet(); - AtomicBoolean sinkFailed = new AtomicBoolean(); - exchangeSourceHandler.addRemoteSink((allSourcesFinished, listener) -> { - if (fetched.incrementAndGet() > failAfter) { - sinkHandler.fetchPageAsync(true, listener.delegateFailure((l, r) -> { - failedRequests.incrementAndGet(); - sinkFailed.set(true); - listener.onFailure(new CircuitBreakingException("simulated", CircuitBreaker.Durability.PERMANENT)); - })); - } else { - sinkHandler.fetchPageAsync(allSourcesFinished, listener.delegateFailure((l, r) -> { - Page page = r.takePage(); - if (page != null) { - IntBlock block = page.getBlock(0); - for (int i = 0; i < block.getPositionCount(); i++) { - int v = block.getInt(i); - if (v < maxOutputSeqNo) { - expectedSeqNos.add(v); + PlainActionFuture remoteSinksFuture = new PlainActionFuture<>(); + try (RefCountingListener refs = new RefCountingListener(remoteSinksFuture)) { + Supplier exchangeSink = () -> { + var sinkHandler = new ExchangeSinkHandler(blockFactory, randomExchangeBuffer(), threadPool.relativeTimeInMillisSupplier()); + int failAfter = randomBoolean() ? Integer.MAX_VALUE : randomIntBetween(0, 100); + AtomicInteger fetched = new AtomicInteger(); + int instance = randomIntBetween(1, 3); + totalSinks.incrementAndGet(); + AtomicBoolean sinkFailed = new AtomicBoolean(); + ActionListener oneSinkListener = refs.acquire(); + exchangeSourceHandler.addRemoteSink((allSourcesFinished, listener) -> { + if (fetched.incrementAndGet() > failAfter) { + sinkHandler.fetchPageAsync(true, listener.delegateFailure((l, r) -> { + failedRequests.incrementAndGet(); + sinkFailed.set(true); + listener.onFailure(new CircuitBreakingException("simulated", CircuitBreaker.Durability.PERMANENT)); + })); + } else { + sinkHandler.fetchPageAsync(allSourcesFinished, listener.delegateFailure((l, r) -> { + Page page = r.takePage(); + if (page != null) { + IntBlock block = page.getBlock(0); + for (int i = 0; i < block.getPositionCount(); i++) { + int v = block.getInt(i); + if (v < maxOutputSeqNo) { + expectedSeqNos.add(v); + } } } - } - l.onResponse(new ExchangeResponse(blockFactory, page, r.finished())); - })); - } - }, false, () -> {}, instance, ActionListener.wrap(r -> { - assertFalse(sinkFailed.get()); - completedSinks.incrementAndGet(); - }, e -> { - assertTrue(sinkFailed.get()); - failedSinks.incrementAndGet(); - })); - return sinkHandler.createExchangeSink(() -> {}); - }; - Set actualSeqNos = runConcurrentTest( - maxInputSeqNo, - maxOutputSeqNo, - exchangeSourceHandler::createExchangeSource, - exchangeSink - ); - assertThat(actualSeqNos, equalTo(expectedSeqNos)); - assertThat(completedSinks.get() + failedSinks.get(), equalTo(totalSinks.get())); - sourceCompletionFuture.actionGet(); + l.onResponse(new ExchangeResponse(blockFactory, page, r.finished())); + })); + } + }, false, () -> {}, instance, ActionListener.wrap(r -> { + assertFalse(sinkFailed.get()); + completedSinks.incrementAndGet(); + oneSinkListener.onResponse(null); + }, e -> { + assertTrue(sinkFailed.get()); + failedSinks.incrementAndGet(); + oneSinkListener.onFailure(e); + })); + return sinkHandler.createExchangeSink(() -> {}); + }; + Set actualSeqNos = runConcurrentTest( + maxInputSeqNo, + maxOutputSeqNo, + exchangeSourceHandler::createExchangeSource, + exchangeSink + ); + assertThat(actualSeqNos, equalTo(expectedSeqNos)); + } if (failedRequests.get() > 0) { + expectThrows(CircuitBreakingException.class, () -> remoteSinksFuture.actionGet(1, TimeUnit.MINUTES)); assertThat(failedSinks.get(), greaterThan(0)); + assertThat(completedSinks.get() + failedSinks.get(), equalTo(totalSinks.get())); } else { + safeGet(remoteSinksFuture); assertThat(failedSinks.get(), equalTo(0)); + assertThat(completedSinks.get(), equalTo(totalSinks.get())); } } @@ -458,7 +471,7 @@ public void testClosingSinks() { assertFalse(sink.waitForWriting().listener().isDone()); PlainActionFuture future = new PlainActionFuture<>(); sinkExchanger.fetchPageAsync(true, future); - ExchangeResponse resp = future.actionGet(); + ExchangeResponse resp = safeGet(future); assertTrue(resp.finished()); assertNull(resp.takePage()); assertTrue(sink.waitForWriting().listener().isDone()); @@ -466,7 +479,7 @@ public void testClosingSinks() { } public void testFinishEarly() throws Exception { - ExchangeSourceHandler sourceHandler = new ExchangeSourceHandler(20, threadPool.generic(), ActionListener.noop()); + ExchangeSourceHandler sourceHandler = new ExchangeSourceHandler(20, threadPool.generic()); Semaphore permits = new Semaphore(between(1, 5)); BlockFactory blockFactory = blockFactory(); Queue pages = ConcurrentCollections.newQueue(); @@ -537,12 +550,7 @@ public void testConcurrentWithTransportActions() { try (exchange0; exchange1; node0; node1) { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); - PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); - var sourceHandler = new ExchangeSourceHandler( - randomExchangeBuffer(), - threadPool.executor(ESQL_TEST_EXECUTOR), - sourceCompletionFuture - ); + var sourceHandler = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomExchangeBuffer()); Transport.Connection connection = node0.getConnection(node1.getLocalNode()); sourceHandler.addRemoteSink( @@ -563,7 +571,6 @@ public void testConcurrentWithTransportActions() { var expectedSeqNos = IntStream.range(0, Math.min(maxInputSeqNo, maxOutputSeqNo)).boxed().collect(Collectors.toSet()); assertThat(actualSeqNos, hasSize(expectedSeqNos.size())); assertThat(actualSeqNos, equalTo(expectedSeqNos)); - sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } } @@ -613,22 +620,18 @@ public void sendResponse(TransportResponse transportResponse) { try (exchange0; exchange1; node0; node1) { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); - PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); - var sourceHandler = new ExchangeSourceHandler( - randomIntBetween(1, 128), - threadPool.executor(ESQL_TEST_EXECUTOR), - sourceCompletionFuture - ); + var sourceHandler = new ExchangeSourceHandler(randomIntBetween(1, 128), threadPool.executor(ESQL_TEST_EXECUTOR)); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomIntBetween(1, 128)); Transport.Connection connection = node0.getConnection(node1.getLocalNode()); + PlainActionFuture remoteSinkFuture = new PlainActionFuture<>(); sourceHandler.addRemoteSink( exchange0.newRemoteSink(task, exchangeId, node0, connection), true, () -> {}, randomIntBetween(1, 5), - ActionListener.noop() + remoteSinkFuture ); - Exception err = expectThrows( + Exception driverException = expectThrows( Exception.class, () -> runConcurrentTest( maxSeqNo, @@ -637,13 +640,36 @@ public void sendResponse(TransportResponse transportResponse) { () -> sinkHandler.createExchangeSink(() -> {}) ) ); - Throwable cause = ExceptionsHelper.unwrap(err, IOException.class); + assertThat(driverException, instanceOf(TaskCancelledException.class)); + var sinkException = expectThrows(Exception.class, remoteSinkFuture::actionGet); + Throwable cause = ExceptionsHelper.unwrap(sinkException, IOException.class); assertNotNull(cause); assertThat(cause.getMessage(), equalTo("page is too large")); PlainActionFuture sinkCompletionFuture = new PlainActionFuture<>(); sinkHandler.addCompletionListener(sinkCompletionFuture); - assertBusy(() -> assertTrue(sinkCompletionFuture.isDone())); - expectThrows(Exception.class, () -> sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS)); + safeGet(sinkCompletionFuture); + } + } + + public void testNoCyclicException() throws Exception { + PlainActionFuture future = new PlainActionFuture<>(); + try (EsqlRefCountingListener refs = new EsqlRefCountingListener(future)) { + var exchangeSourceHandler = new ExchangeSourceHandler(between(10, 100), threadPool.generic()); + int numSinks = between(5, 10); + for (int i = 0; i < numSinks; i++) { + RemoteSink remoteSink = (allSourcesFinished, listener) -> threadPool.schedule( + () -> listener.onFailure(new IOException("simulated")), + TimeValue.timeValueMillis(1), + threadPool.generic() + ); + exchangeSourceHandler.addRemoteSink(remoteSink, randomBoolean(), () -> {}, between(1, 3), refs.acquire()); + } + } + Exception err = expectThrows(Exception.class, () -> future.actionGet(10, TimeUnit.SECONDS)); + assertThat(ExceptionsHelper.unwrap(err, IOException.class).getMessage(), equalTo("simulated")); + try (BytesStreamOutput output = new BytesStreamOutput()) { + // ensure no cyclic exception + ElasticsearchException.writeException(err, output); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java index 454088c1751e8..df0a31965055e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java @@ -99,13 +99,12 @@ public void testQueries() throws Exception { // 3 -> [] -> [] // 4 -> [a3] -> [3] // 5 -> [] -> [] - var warnings = Warnings.createWarnings(DriverContext.WarningsMode.IGNORE, 0, 0, "test enrich"); EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator( blockFactory, 128, queryList, directoryData.reader, - warnings + warnings() ); Page page = queryOperator.getOutput(); assertNotNull(page); @@ -156,13 +155,12 @@ public void testRandomMatchQueries() throws Exception { try (var directoryData = makeDirectoryWith(directoryTermsList); var inputTerms = makeTermsBlock(inputTermsList)) { var queryList = QueryList.rawTermQueryList(directoryData.field, directoryData.searchExecutionContext, inputTerms); int maxPageSize = between(1, 256); - var warnings = Warnings.createWarnings(DriverContext.WarningsMode.IGNORE, 0, 0, "test enrich"); EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator( blockFactory, maxPageSize, queryList, directoryData.reader, - warnings + warnings() ); Map> actualPositions = new HashMap<>(); while (queryOperator.isFinished() == false) { @@ -193,7 +191,7 @@ public void testQueries_OnlySingleValues() throws Exception { ) ) { QueryList queryList = QueryList.rawTermQueryList(directoryData.field, directoryData.searchExecutionContext, inputTerms) - .onlySingleValues(); + .onlySingleValues(warnings(), "multi-value found"); // pos -> terms -> docs // ----------------------------- // 0 -> [b2] -> [] @@ -202,13 +200,12 @@ public void testQueries_OnlySingleValues() throws Exception { // 3 -> [] -> [] // 4 -> [a3] -> [3] // 5 -> [a3, a2, z2, xx] -> [] - var warnings = Warnings.createWarnings(DriverContext.WarningsMode.IGNORE, 0, 0, "test lookup"); EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator( blockFactory, 128, queryList, directoryData.reader, - warnings + warnings() ); Page page = queryOperator.getOutput(); assertNotNull(page); @@ -220,6 +217,10 @@ public void testQueries_OnlySingleValues() throws Exception { assertThat(BlockUtils.toJavaObject(positions, 0), equalTo(4)); page.releaseBlocks(); assertTrue(queryOperator.isFinished()); + assertWarnings( + "Line -1:-1: evaluation of [test] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.IllegalArgumentException: multi-value found" + ); } } @@ -228,6 +229,10 @@ private static IntVector getDocVector(Page page, int blockIndex) { return doc.asVector().docs(); } + private static Warnings warnings() { + return Warnings.createWarnings(DriverContext.WarningsMode.COLLECT, -1, -1, "test"); + } + private record DirectoryData( DirectoryReader reader, MockDirectoryWrapper dir, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index e63e8b63d6ee9..49d91df556d14 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -543,6 +543,7 @@ public void testCollectAllValues() { List> actualTop = new ArrayList<>(); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( @@ -633,6 +634,7 @@ public void testCollectAllValues_RandomMultiValues() { List> actualTop = new ArrayList<>(); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( @@ -668,6 +670,7 @@ private List> topNTwoColumns( List> outputValues = new ArrayList<>(); try ( Driver driver = new Driver( + "test", driverContext, new TupleBlockSourceOperator(driverContext.blockFactory(), inputValues, randomIntBetween(1, 1000)), List.of( @@ -938,6 +941,7 @@ private void assertSortingOnMV( int topCount = randomIntBetween(1, values.size()); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(page).iterator()), List.of( @@ -1112,6 +1116,7 @@ public void testIPSortingSingleValue() throws UnknownHostException { List> actual = new ArrayList<>(); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), List.of( @@ -1239,6 +1244,7 @@ private void assertIPSortingOnMultiValues( DriverContext driverContext = driverContext(); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), List.of( @@ -1327,6 +1333,7 @@ public void testZeroByte() { DriverContext driverContext = driverContext(); try ( Driver driver = new Driver( + "test", driverContext, new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( @@ -1367,6 +1374,7 @@ public void testErrorBeforeFullyDraining() { DriverContext driverContext = driverContext(); try ( Driver driver = new Driver( + "test", driverContext, new SequenceLongBlockSourceOperator(driverContext.blockFactory(), LongStream.range(0, docCount)), List.of( diff --git a/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java index a46dca4ae38cf..876461cafcf84 100644 --- a/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java @@ -46,7 +46,6 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.in; /** * Base tests for {@link Operator}s that are not {@link SourceOperator} or {@link SinkOperator}. @@ -190,6 +189,7 @@ protected final List oneDriverPerPageList(Iterator> source, Sup List in = source.next(); try ( Driver d = new Driver( + "test", driverContext(), new CannedSourceOperator(in.iterator()), operators.get(), @@ -241,16 +241,16 @@ protected final void assertSimple(DriverContext context, int size) { * Tests that finish then close without calling {@link Operator#getOutput} to * retrieve a potential last page, releases all memory. */ - public void testSimpleFinishClose() throws Exception { + public void testSimpleFinishClose() { DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), 1)); - assert input.size() == 1 : "Expected single page, got: " + input; // eventually, when driverContext always returns a tracking factory, we can enable this assertion // assertThat(driverContext.blockFactory().breaker().getUsed(), greaterThan(0L)); - Page page = input.get(0); try (var operator = simple().get(driverContext)) { assert operator.needsInput(); - operator.addInput(page); + for (Page page : input) { + operator.addInput(page); + } operator.finish(); } } @@ -264,6 +264,7 @@ protected final List drive(List operators, Iterator input, boolean success = false; try ( Driver d = new Driver( + "test", driverContext, new CannedSourceOperator(input), operators, @@ -291,6 +292,7 @@ public static void runDriver(List drivers) { for (int i = 0; i < dummyDrivers; i++) { drivers.add( new Driver( + "test", "dummy-session", 0, 0, diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java index 790b12346bb14..9623d6071d320 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -70,6 +70,11 @@ protected void shouldSkipTest(String testName) throws IOException { assumeTrue("Test " + testName + " is skipped on " + bwcVersion, isEnabled(testName, instructions, bwcVersion)); } + @Override + protected boolean shouldSkipTestsWithSemanticTextFields() { + return true; + } + @Override protected boolean enableRoundingDoubleValuesOnAsserting() { return true; diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/Clusters.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/Clusters.java index 5f3f135810322..6a1b9f9d051eb 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/Clusters.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/Clusters.java @@ -31,6 +31,10 @@ public static ElasticsearchCluster remoteCluster() { } public static ElasticsearchCluster localCluster(ElasticsearchCluster remoteCluster) { + return localCluster(remoteCluster, true); + } + + public static ElasticsearchCluster localCluster(ElasticsearchCluster remoteCluster, Boolean skipUnavailable) { return ElasticsearchCluster.local() .name(LOCAL_CLUSTER_NAME) .distribution(DistributionType.DEFAULT) @@ -41,6 +45,7 @@ public static ElasticsearchCluster localCluster(ElasticsearchCluster remoteClust .setting("node.roles", "[data,ingest,master,remote_cluster_client]") .setting("cluster.remote.remote_cluster.seeds", () -> "\"" + remoteCluster.getTransportEndpoint(0) + "\"") .setting("cluster.remote.connections_per_cluster", "1") + .setting("cluster.remote." + REMOTE_CLUSTER_NAME + ".skip_unavailable", skipUnavailable.toString()) .shared(true) .setting("cluster.routing.rebalance.enable", "none") .build(); diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/EsqlRestValidationSkipUnFalseIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/EsqlRestValidationSkipUnFalseIT.java new file mode 100644 index 0000000000000..4b9957f9f5cea --- /dev/null +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/EsqlRestValidationSkipUnFalseIT.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.ccq; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +// Duplicate of EsqlRestValidationIT test where skip_unavailable is set to false +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) +public class EsqlRestValidationSkipUnFalseIT extends EsqlRestValidationIT { + static ElasticsearchCluster localCluster = Clusters.localCluster(remoteCluster, false); + + @ClassRule + public static TestRule clusterRule = RuleChain.outerRule(remoteCluster).around(localCluster); + + @Override + protected String getTestRestCluster() { + return localCluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 4d06db94801bf..723c5e2dfd1aa 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -48,6 +48,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V2; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V3; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V12; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_PLANNING_V1; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.METADATA_FIELDS_REMOTE_TEST; @@ -124,9 +125,15 @@ protected void shouldSkipTest(String testName) throws IOException { assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V2.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName())); + assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V3.capabilityName())); assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V12.capabilityName())); } + @Override + protected boolean shouldSkipTestsWithSemanticTextFields() { + return true; + } + private TestFeatureService remoteFeaturesService() throws IOException { if (remoteFeaturesService == null) { var remoteNodeVersions = readVersionsFromNodesInfo(remoteClusterClient()); diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java index b838d8ae284a4..791f5dacdce64 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java @@ -38,7 +38,10 @@ import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.xpack.esql.ccq.Clusters.REMOTE_CLUSTER_NAME; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.any; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasKey; @ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class MultiClustersIT extends ESRestTestCase { diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/RequestIndexFilteringIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/RequestIndexFilteringIT.java index 7c81f97714a6f..d8c68dd5281aa 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/RequestIndexFilteringIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/RequestIndexFilteringIT.java @@ -14,9 +14,12 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.esql.qa.rest.RequestIndexFilteringTestCase; +import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase; +import org.hamcrest.Matcher; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -25,6 +28,12 @@ import org.junit.rules.TestRule; import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.instanceOf; @ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class RequestIndexFilteringIT extends RequestIndexFilteringTestCase { @@ -49,6 +58,8 @@ public void setRemoteClient() throws IOException { } } + private boolean isCCSRequest; + @AfterClass public static void closeRemoteClients() throws IOException { try { @@ -66,13 +77,20 @@ protected void indexTimestampData(int docs, String indexName, String date, Strin @Override protected String from(String... indexName) { - if (randomBoolean()) { + isCCSRequest = randomBoolean(); + if (isCCSRequest) { return "FROM *:" + String.join(",*:", indexName); } else { return "FROM " + String.join(",", indexName); } } + @Override + public Map runEsql(RestEsqlTestCase.RequestObjectBuilder requestObject) throws IOException { + requestObject.includeCCSMetadata(true); + return super.runEsql(requestObject); + } + @After public void wipeRemoteTestData() throws IOException { try { @@ -82,4 +100,35 @@ public void wipeRemoteTestData() throws IOException { assertEquals(404, re.getResponse().getStatusLine().getStatusCode()); } } + + private MapMatcher getClustersMetadataMatcher() { + MapMatcher mapMatcher = matchesMap(); + mapMatcher = mapMatcher.entry("running", 0); + mapMatcher = mapMatcher.entry("total", 1); + mapMatcher = mapMatcher.entry("failed", 0); + mapMatcher = mapMatcher.entry("partial", 0); + mapMatcher = mapMatcher.entry("successful", 1); + mapMatcher = mapMatcher.entry("skipped", 0); + mapMatcher = mapMatcher.entry( + "details", + matchesMap().entry( + Clusters.REMOTE_CLUSTER_NAME, + matchesMap().entry("_shards", matchesMap().extraOk()) + .entry("took", greaterThanOrEqualTo(0)) + .entry("indices", instanceOf(String.class)) + .entry("status", "successful") + ) + ); + return mapMatcher; + } + + @Override + protected void assertQueryResult(Map result, Matcher columnMatcher, Matcher valuesMatcher) { + var matcher = getResultMatcher(result).entry("columns", columnMatcher).entry("values", valuesMatcher); + if (isCCSRequest) { + matcher = matcher.entry("_clusters", getClustersMetadataMatcher()); + } + assertMap(result, matcher); + } + } diff --git a/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java b/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java index 64c113345bd53..c0e82a455a4f4 100644 --- a/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java @@ -37,4 +37,9 @@ public EsqlSpecIT( protected boolean enableRoundingDoubleValuesOnAsserting() { return true; } + + @Override + protected boolean shouldSkipTestsWithSemanticTextFields() { + return true; + } } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java index 368eebe808eee..42974795a77db 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java @@ -42,4 +42,9 @@ protected boolean enableRoundingDoubleValuesOnAsserting() { // This suite runs with more than one node and three shards in serverless return cluster.getNumNodes() > 1; } + + @Override + protected boolean shouldSkipTestsWithSemanticTextFields() { + return cluster.getNumNodes() > 1; + } } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeIT.java index 9b98c29f5c3e3..d322263ce9182 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeIT.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.qa.rest.generative.GenerativeRestTest; import org.junit.ClassRule; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102084") +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/121754") @ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class GenerativeIT extends GenerativeRestTest { @ClassRule diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index 601ce819224b5..58c82d800954c 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -40,8 +40,10 @@ import static org.elasticsearch.test.ListMatcher.matchesList; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.hamcrest.Matchers.any; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasItem; @@ -287,7 +289,6 @@ public void testProfile() throws IOException { equalTo(List.of(List.of(499.5d))) ); - List> signatures = new ArrayList<>(); @SuppressWarnings("unchecked") List> profiles = (List>) ((Map) result.get("profile")).get("drivers"); for (Map p : profiles) { @@ -299,26 +300,34 @@ public void testProfile() throws IOException { for (Map o : operators) { sig.add(checkOperatorProfile(o)); } - signatures.add(sig); + String taskDescription = p.get("task_description").toString(); + switch (taskDescription) { + case "data" -> assertMap( + sig, + matchesList().item("LuceneSourceOperator") + .item("ValuesSourceReaderOperator") + .item("AggregationOperator") + .item("ExchangeSinkOperator") + ); + case "node_reduce" -> assertThat( + sig, + either(matchesList().item("ExchangeSourceOperator").item("ExchangeSinkOperator")).or( + matchesList().item("ExchangeSourceOperator").item("AggregationOperator").item("ExchangeSinkOperator") + ) + ); + case "final" -> assertMap( + sig, + matchesList().item("ExchangeSourceOperator") + .item("AggregationOperator") + .item("ProjectOperator") + .item("LimitOperator") + .item("EvalOperator") + .item("ProjectOperator") + .item("OutputOperator") + ); + default -> throw new IllegalArgumentException("can't match " + taskDescription); + } } - var readProfile = matchesList().item("LuceneSourceOperator") - .item("ValuesSourceReaderOperator") - .item("AggregationOperator") - .item("ExchangeSinkOperator"); - var mergeProfile = matchesList().item("ExchangeSourceOperator") - .item("AggregationOperator") - .item("ProjectOperator") - .item("LimitOperator") - .item("EvalOperator") - .item("ProjectOperator") - .item("OutputOperator"); - var emptyReduction = matchesList().item("ExchangeSourceOperator").item("ExchangeSinkOperator"); - var reduction = matchesList().item("ExchangeSourceOperator").item("AggregationOperator").item("ExchangeSinkOperator"); - assertThat( - signatures, - Matchers.either(containsInAnyOrder(readProfile, reduction, mergeProfile)) - .or(containsInAnyOrder(readProfile, emptyReduction, mergeProfile)) - ); } public void testProfileOrdinalsGroupingOperator() throws IOException { @@ -391,6 +400,7 @@ public void testInlineStatsProfile() throws IOException { } signatures.add(sig); } + // TODO adapt this to use task_description once this is reenabled assertThat( signatures, containsInAnyOrder( @@ -491,10 +501,10 @@ public void testForceSleepsProfile() throws IOException { MapMatcher sleepMatcher = matchesMap().entry("reason", "exchange empty") .entry("sleep_millis", greaterThan(0L)) .entry("wake_millis", greaterThan(0L)); - if (operators.contains("LuceneSourceOperator")) { - assertMap(sleeps, matchesMap().entry("counts", Map.of()).entry("first", List.of()).entry("last", List.of())); - } else if (operators.contains("ExchangeSourceOperator")) { - if (operators.contains("ExchangeSinkOperator")) { + String taskDescription = p.get("task_description").toString(); + switch (taskDescription) { + case "data" -> assertMap(sleeps, matchesMap().entry("counts", Map.of()).entry("first", List.of()).entry("last", List.of())); + case "node_reduce" -> { assertMap(sleeps, matchesMap().entry("counts", matchesMap().entry("exchange empty", greaterThan(0))).extraOk()); @SuppressWarnings("unchecked") List> first = (List>) sleeps.get("first"); @@ -506,8 +516,8 @@ public void testForceSleepsProfile() throws IOException { for (Map s : last) { assertMap(s, sleepMatcher); } - - } else { + } + case "final" -> { assertMap( sleeps, matchesMap().entry("counts", matchesMap().entry("exchange empty", 1)) @@ -515,14 +525,14 @@ public void testForceSleepsProfile() throws IOException { .entry("last", List.of(sleepMatcher)) ); } - } else { - fail("unknown signature: " + operators); + default -> throw new IllegalArgumentException("unknown task: " + taskDescription); } } } private MapMatcher commonProfile() { - return matchesMap().entry("start_millis", greaterThan(0L)) + return matchesMap().entry("task_description", any(String.class)) + .entry("start_millis", greaterThan(0L)) .entry("stop_millis", greaterThan(0L)) .entry("iterations", greaterThan(0L)) .entry("cpu_nanos", greaterThan(0L)) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 18bfb6b8676ce..d5e4651a847cf 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -70,6 +70,7 @@ import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.deleteInferenceEndpoint; import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.loadDataSetIntoEs; import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.SEMANTIC_TEXT_TYPE; // This test can run very long in serverless configurations @TimeoutSuite(millis = 30 * TimeUnits.MINUTE) @@ -128,15 +129,22 @@ protected EsqlSpecTestCase( @Before public void setup() throws IOException { + if (shouldSkipTestsWithSemanticTextFields()) { + assumeFalse("semantic_text tests are muted", testCase.requiredCapabilities.contains(SEMANTIC_TEXT_TYPE.capabilityName())); + } if (supportsInferenceTestService() && clusterHasInferenceEndpoint(client()) == false) { createInferenceEndpoint(client()); } - if (indexExists(availableDatasetsForEs(client(), supportsIndexModeLookup()).iterator().next().indexName()) == false) { loadDataSetIntoEs(client(), supportsIndexModeLookup()); } } + // https://github.com/elastic/elasticsearch/issues/121411 + protected boolean shouldSkipTestsWithSemanticTextFields() { + return false; + } + @AfterClass public static void wipeTestData() throws IOException { try { @@ -172,6 +180,9 @@ protected void shouldSkipTest(String testName) throws IOException { } checkCapabilities(adminClient(), testFeatureService, testName, testCase); assumeTrue("Test " + testName + " is not enabled", isEnabled(testName, instructions, Version.CURRENT)); + if (shouldSkipTestsWithSemanticTextFields()) { + assumeFalse("semantic_text tests are muted", testCase.requiredCapabilities.contains(SEMANTIC_TEXT_TYPE.capabilityName())); + } } protected static void checkCapabilities(RestClient client, TestFeatureService testFeatureService, String testName, CsvTestCase testCase) @@ -232,7 +243,7 @@ protected boolean supportsIndexModeLookup() throws IOException { protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); - if (testCase.query.toUpperCase(Locale.ROOT).contains("LOOKUP")) { + if (testCase.query.toUpperCase(Locale.ROOT).contains("LOOKUP_\uD83D\uDC14")) { builder.tables(tables()); } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java index ad61c52775eb9..1fdc11174ee09 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java @@ -17,6 +17,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.esql.AssertWarnings; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; +import org.hamcrest.Matcher; import org.junit.After; import org.junit.Assert; @@ -62,7 +63,7 @@ public void testTimestampFilterFromQuery() throws IOException { // filter includes both indices in the result (all columns, all rows) RestEsqlTestCase.RequestObjectBuilder builder = timestampFilter("gte", "2023-01-01").query(from("test*")); - assertResultMap( + assertQueryResult( runEsql(builder), matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date")) .item(matchesMap().entry("name", "id1").entry("type", "integer")) @@ -73,7 +74,7 @@ public void testTimestampFilterFromQuery() throws IOException { // filter includes only test1. Columns from test2 are filtered out, as well (not only rows)! builder = timestampFilter("gte", "2024-01-01").query(from("test*")); - assertResultMap( + assertQueryResult( runEsql(builder), matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date")) .item(matchesMap().entry("name", "id1").entry("type", "integer")) @@ -84,7 +85,7 @@ public void testTimestampFilterFromQuery() throws IOException { // filter excludes both indices (no rows); the first analysis step fails because there are no columns, a second attempt succeeds // after eliminating the index filter. All columns are returned. builder = timestampFilter("gte", "2025-01-01").query(from("test*")); - assertResultMap( + assertQueryResult( runEsql(builder), matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date")) .item(matchesMap().entry("name", "id1").entry("type", "integer")) @@ -102,7 +103,7 @@ public void testFieldExistsFilter_KeepWildcard() throws IOException { // filter includes only test1. Columns and rows of test2 are filtered out RestEsqlTestCase.RequestObjectBuilder builder = existsFilter("id1").query(from("test*")); - assertResultMap( + assertQueryResult( runEsql(builder), matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date")) .item(matchesMap().entry("name", "id1").entry("type", "integer")) @@ -113,7 +114,7 @@ public void testFieldExistsFilter_KeepWildcard() throws IOException { // filter includes only test1. Columns from test2 are filtered out, as well (not only rows)! builder = existsFilter("id1").query(from("test*") + " METADATA _index | KEEP _index, id*"); Map result = runEsql(builder); - assertResultMap( + assertQueryResult( result, matchesList().item(matchesMap().entry("name", "_index").entry("type", "keyword")) .item(matchesMap().entry("name", "id1").entry("type", "integer")), @@ -138,7 +139,7 @@ public void testFieldExistsFilter_With_ExplicitUseOfDiscardedIndexFields() throw from("test*") + " METADATA _index | SORT id2 | KEEP _index, id*" ); Map result = runEsql(builder); - assertResultMap( + assertQueryResult( result, matchesList().item(matchesMap().entry("name", "_index").entry("type", "keyword")) .item(matchesMap().entry("name", "id1").entry("type", "integer")) @@ -298,4 +299,9 @@ protected void indexTimestampDataForClient(RestClient client, int docs, String i Assert.assertEquals("{\"errors\":false}", EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8)); } } + + protected void assertQueryResult(Map result, Matcher columnMatcher, Matcher valuesMatcher) { + assertResultMap(result, columnMatcher, valuesMatcher); + } + } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 66333421eeb75..dc979806370f0 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -33,7 +33,6 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.esql.AssertWarnings; import org.elasticsearch.xpack.esql.EsqlTestUtils; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -660,10 +659,6 @@ public void testErrorMessageForArrayValuesInParams() throws IOException { } public void testNamedParamsForIdentifierAndIdentifierPatterns() throws IOException { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); bulkLoadTestData(10); // positive var query = requestObjectBuilder().query( @@ -773,6 +768,33 @@ public void testNamedParamsForIdentifierAndIdentifierPatterns() throws IOExcepti } } + public void testErrorMessageForMissingParams() throws IOException { + ResponseException re = expectThrows( + ResponseException.class, + () -> runEsql(requestObjectBuilder().query("from idx | where x == ?n1").params("[]")) + ); + assertThat( + EntityUtils.toString(re.getResponse().getEntity()).replaceAll("\\\\\n\s+\\\\", ""), + containsString("line 1:23: Unknown query parameter [n1]") + ); + + re = expectThrows( + ResponseException.class, + () -> runEsql(requestObjectBuilder().query("from idx | where x == ?n1 and y == ?n2").params("[{\"n\" : \"v\"}]")) + ); + assertThat(EntityUtils.toString(re.getResponse().getEntity()).replaceAll("\\\\\n\s+\\\\", ""), containsString(""" + line 1:23: Unknown query parameter [n1], did you mean [n]?; line 1:36: Unknown query parameter [n2], did you mean [n]?""")); + + re = expectThrows( + ResponseException.class, + () -> runEsql(requestObjectBuilder().query("from idx | where x == ?n1 and y == ?n2").params("[{\"n1\" : \"v1\"}]")) + ); + assertThat( + EntityUtils.toString(re.getResponse().getEntity()).replaceAll("\\\\\n\s+\\\\", ""), + containsString("line 1:36: Unknown query parameter [n2], did you mean [n1]") + ); + } + public void testErrorMessageForLiteralDateMathOverflow() throws IOException { List dateMathOverflowExpressions = List.of( "2147483647 day + 1 day", diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java index 844ccd3802bf1..5bf13d2d9c762 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java @@ -28,10 +28,13 @@ public record Column(String name, String type) {} public record QueryExecuted(String query, int depth, List outputSchema, Exception exception) {} public static String sourceCommand(List availabeIndices) { - return switch (randomIntBetween(0, 2)) { + return switch (randomIntBetween(0, 1)) { case 0 -> from(availabeIndices); - case 1 -> metaFunctions(); - default -> row(); + // case 1 -> metaFunctions(); + default -> from(availabeIndices); + // TODO re-enable ROW. + // now it crashes nodes in some cases: exiting java.lang.AssertionError: estimated row size [0] wasn't set + // default -> row(); }; } @@ -41,8 +44,12 @@ public static String sourceCommand(List availabeIndices) { * @param policies * @return a new command that can process it as input */ - public static String pipeCommand(List previousOutput, List policies) { - return switch (randomIntBetween(0, 11)) { + public static String pipeCommand( + List previousOutput, + List policies, + List lookupIndices + ) { + return switch (randomIntBetween(0, 12)) { case 0 -> dissect(previousOutput); case 1 -> drop(previousOutput); case 2 -> enrich(previousOutput, policies); @@ -54,10 +61,26 @@ public static String pipeCommand(List previousOutput, List rename(previousOutput); case 9 -> sort(previousOutput); case 10 -> stats(previousOutput); + case 11 -> join(previousOutput, lookupIndices); default -> where(previousOutput); }; } + private static String join(List previousOutput, List lookupIndices) { + + GenerativeRestTest.LookupIdx lookupIdx = randomFrom(lookupIndices); + String lookupIdxName = lookupIdx.idxName(); + String idxKey = lookupIdx.key(); + String keyType = lookupIdx.keyType(); + + var candidateKeys = previousOutput.stream().filter(x -> x.type.equals(keyType)).toList(); + if (candidateKeys.isEmpty()) { + return ""; + } + Column key = randomFrom(candidateKeys); + return "| rename " + key.name + " as " + idxKey + " | lookup join " + lookupIdxName + " on " + idxKey; + } + private static String where(List previousOutput) { // TODO more complex conditions StringBuilder result = new StringBuilder(" | where "); @@ -191,7 +214,53 @@ private static String keep(List previousOutput) { } private static String randomName(List previousOutput) { - return previousOutput.get(randomIntBetween(0, previousOutput.size() - 1)).name(); + // we need to exclude + // https://github.com/elastic/elasticsearch/issues/121741 + return randomFrom(previousOutput.stream().filter(x -> x.name().equals("") == false).toList()).name(); + } + + private static String randomGroupableName(List previousOutput) { + // we need to exclude + // https://github.com/elastic/elasticsearch/issues/121741 + var candidates = previousOutput.stream() + .filter(EsqlQueryGenerator::groupable) + .filter(x -> x.name().equals("") == false) + .toList(); + if (candidates.isEmpty()) { + return null; + } + return randomFrom(candidates).name(); + } + + private static boolean groupable(Column col) { + return col.type.equals("keyword") + || col.type.equals("text") + || col.type.equals("long") + || col.type.equals("integer") + || col.type.equals("ip") + || col.type.equals("version"); + } + + private static String randomSortableName(List previousOutput) { + // we need to exclude + // https://github.com/elastic/elasticsearch/issues/121741 + var candidates = previousOutput.stream() + .filter(EsqlQueryGenerator::sortable) + .filter(x -> x.name().equals("") == false) + .toList(); + if (candidates.isEmpty()) { + return null; + } + return randomFrom(candidates).name(); + } + + private static boolean sortable(Column col) { + return col.type.equals("keyword") + || col.type.equals("text") + || col.type.equals("long") + || col.type.equals("integer") + || col.type.equals("ip") + || col.type.equals("version"); } private static String rename(List previousOutput) { @@ -199,7 +268,12 @@ private static String rename(List previousOutput) { List proj = new ArrayList<>(); List names = new ArrayList<>(previousOutput.stream().map(Column::name).collect(Collectors.toList())); for (int i = 0; i < n; i++) { - String name = names.remove(randomIntBetween(0, names.size() - 1)); + var colN = randomIntBetween(0, names.size() - 1); + if (previousOutput.get(colN).type().endsWith("_range")) { + // ranges are not fully supported yet + continue; + } + String name = names.remove(colN); String newName; if (names.isEmpty() || randomBoolean()) { newName = randomAlphaOfLength(5); @@ -209,6 +283,9 @@ private static String rename(List previousOutput) { names.add(newName); proj.add(name + " AS " + newName); } + if (proj.isEmpty()) { + return ""; + } return " | rename " + proj.stream().collect(Collectors.joining(", ")); } @@ -227,7 +304,7 @@ private static String drop(List previousOutput) { name = "*" + name.substring(randomIntBetween(1, name.length() - 1)); } } - proj.add(name); + proj.add(name.contains("*") ? name : "`" + name + "`"); } return " | drop " + proj.stream().collect(Collectors.joining(", ")); } @@ -236,7 +313,11 @@ private static String sort(List previousOutput) { int n = randomIntBetween(1, previousOutput.size()); Set proj = new HashSet<>(); for (int i = 0; i < n; i++) { - proj.add(randomName(previousOutput)); + String col = randomSortableName(previousOutput); + if (col == null) { + return "";// no sortable columns + } + proj.add(col); } return " | sort " + proj.stream() @@ -295,9 +376,10 @@ private static String stats(List previousOutput) { cmd.append(expression); } if (randomBoolean()) { - cmd.append(" by "); - - cmd.append(randomName(nonNull)); + var col = randomGroupableName(nonNull); + if (col != null) { + cmd.append(" by " + col); + } } return cmd.toString(); } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java index 588d5870d89ec..a841c2fc99958 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java @@ -35,7 +35,18 @@ public abstract class GenerativeRestTest extends ESRestTestCase { public static final Set ALLOWED_ERRORS = Set.of( "Reference \\[.*\\] is ambiguous", "Cannot use field \\[.*\\] due to ambiguities", - "cannot sort on .*" + "cannot sort on .*", + "argument of \\[count_distinct\\(.*\\)\\] must", + "Cannot use field \\[.*\\] with unsupported type \\[.*_range\\]", + // warnings + "Field '.*' shadowed by field at line .*", + "evaluation of \\[.*\\] failed, treating result as null", // TODO investigate? + // Awaiting fixes + "estimated row size \\[0\\] wasn't set", // https://github.com/elastic/elasticsearch/issues/121739 + "unknown physical plan node \\[OrderExec\\]", // https://github.com/elastic/elasticsearch/issues/120817 + "Unknown column \\[\\]", // https://github.com/elastic/elasticsearch/issues/121741 + // + "The incoming YAML document exceeds the limit:" // still to investigate, but it seems to be specific to the test framework ); public static final Set ALLOWED_ERROR_PATTERNS = ALLOWED_ERRORS.stream() @@ -64,6 +75,7 @@ public static void wipeTestData() throws IOException { public void test() { List indices = availableIndices(); + List lookupIndices = lookupIndices(); List policies = availableEnrichPolicies(); for (int i = 0; i < ITERATIONS; i++) { String command = EsqlQueryGenerator.sourceCommand(indices); @@ -76,7 +88,7 @@ public void test() { if (result.outputSchema().isEmpty()) { break; } - command = EsqlQueryGenerator.pipeCommand(result.outputSchema(), policies); + command = EsqlQueryGenerator.pipeCommand(result.outputSchema(), policies, lookupIndices); result = execute(result.query() + command, result.depth() + 1); if (result.exception() != null) { checkException(result); @@ -102,6 +114,9 @@ private EsqlQueryGenerator.QueryExecuted execute(String command, int depth) { return new EsqlQueryGenerator.QueryExecuted(command, depth, outputSchema, null); } catch (Exception e) { return new EsqlQueryGenerator.QueryExecuted(command, depth, null, e); + } catch (AssertionError ae) { + // this is for ensureNoWarnings() + return new EsqlQueryGenerator.QueryExecuted(command, depth, null, new RuntimeException(ae.getMessage())); } } @@ -116,7 +131,23 @@ private List outputSchema(Map a) { } private List availableIndices() { - return new ArrayList<>(CSV_DATASET_MAP.keySet()); + return new ArrayList<>( + CSV_DATASET_MAP.entrySet() + .stream() + .filter(x -> x.getValue().requiresInferenceEndpoint() == false) + .map(Map.Entry::getKey) + .toList() + ); + } + + record LookupIdx(String idxName, String key, String keyType) {} + + private List lookupIndices() { + List result = new ArrayList<>(); + // we don't have key info from the dataset loader, let's hardcode it for now + result.add(new LookupIdx("languages_lookup", "language_code", "integer")); + result.add(new LookupIdx("message_types_lookup", "message", "keyword")); + return result; } List availableEnrichPolicies() { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index f3b2ea0d864ff..6deda725dcad4 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -11,6 +11,8 @@ import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; @@ -31,9 +33,11 @@ import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.index.IndexMode; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.search.SearchService; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; @@ -66,15 +70,16 @@ import org.elasticsearch.xpack.esql.parser.QueryParam; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; +import org.elasticsearch.xpack.esql.plan.logical.Limit; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import org.elasticsearch.xpack.esql.plugin.TransportActionServices; import org.elasticsearch.xpack.esql.session.Configuration; -import org.elasticsearch.xpack.esql.session.QueryBuilderResolver; -import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.SearchStats; +import org.elasticsearch.xpack.esql.telemetry.Metrics; import org.elasticsearch.xpack.versionfield.Version; import org.junit.Assert; @@ -111,6 +116,7 @@ import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; +import static org.elasticsearch.test.ESTestCase.assertEquals; import static org.elasticsearch.test.ESTestCase.between; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; import static org.elasticsearch.test.ESTestCase.randomBoolean; @@ -138,6 +144,7 @@ import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; +import static org.mockito.Mockito.mock; public final class EsqlTestUtils { @@ -358,7 +365,14 @@ public static LogicalOptimizerContext unboundLogicalOptimizerContext() { public static final Verifier TEST_VERIFIER = new Verifier(new Metrics(new EsqlFunctionRegistry()), new XPackLicenseState(() -> 0L)); - public static final QueryBuilderResolver MOCK_QUERY_BUILDER_RESOLVER = new MockQueryBuilderResolver(); + public static final TransportActionServices MOCK_TRANSPORT_ACTION_SERVICES = new TransportActionServices( + mock(TransportService.class), + mock(SearchService.class), + null, + mock(ClusterService.class), + mock(IndexNameExpressionResolver.class), + null + ); private EsqlTestUtils() {} @@ -403,6 +417,21 @@ public static T as(Object node, Class type) { return type.cast(node); } + public static Limit asLimit(Object node, Integer limitLiteral) { + return asLimit(node, limitLiteral, null); + } + + public static Limit asLimit(Object node, Integer limitLiteral, Boolean duplicated) { + Limit limit = as(node, Limit.class); + if (limitLiteral != null) { + assertEquals(as(limit.limit(), Literal.class).value(), limitLiteral); + } + if (duplicated != null) { + assertEquals(limit.duplicated(), duplicated); + } + return limit; + } + public static Map loadMapping(String name) { return LoadMapping.loadMapping(name); } @@ -765,9 +794,8 @@ public static Literal randomLiteral(DataType type) { throw new UncheckedIOException(e); } } - case UNSUPPORTED, OBJECT, DOC_DATA_TYPE, TSID_DATA_TYPE, PARTIAL_AGG -> throw new IllegalArgumentException( - "can't make random values for [" + type.typeName() + "]" - ); + case UNSUPPORTED, OBJECT, DOC_DATA_TYPE, TSID_DATA_TYPE, PARTIAL_AGG, AGGREGATE_METRIC_DOUBLE -> + throw new IllegalArgumentException("can't make random values for [" + type.typeName() + "]"); }, type); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/MockQueryBuilderResolver.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/MockQueryBuilderResolver.java deleted file mode 100644 index 7af3a89108fc0..0000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/MockQueryBuilderResolver.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.session.QueryBuilderResolver; -import org.elasticsearch.xpack.esql.session.Result; - -import java.util.function.BiConsumer; - -public class MockQueryBuilderResolver extends QueryBuilderResolver { - public MockQueryBuilderResolver() { - super(null, null, null, null); - } - - @Override - public void resolveQueryBuilders( - LogicalPlan plan, - ActionListener listener, - BiConsumer> callback - ) { - callback.accept(plan, listener); - } -} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/change_point.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/change_point.csv-spec new file mode 100644 index 0000000000000..64269e0d28491 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/change_point.csv-spec @@ -0,0 +1,1200 @@ +detect nothing (long) +required_capability: change_point + +FROM k8s + | STATS count=COUNT() BY @timestamp=BUCKET(@timestamp, 1 MINUTE) + | CHANGE_POINT count ON @timestamp AS type, pvalue +; + +count:long | @timestamp:datetime | type:keyword | pvalue:double +4 | 2024-05-10T00:00:00.000Z | null | null +4 | 2024-05-10T00:01:00.000Z | null | null +8 | 2024-05-10T00:02:00.000Z | null | null +8 | 2024-05-10T00:03:00.000Z | null | null +5 | 2024-05-10T00:04:00.000Z | null | null +8 | 2024-05-10T00:05:00.000Z | null | null +10 | 2024-05-10T00:06:00.000Z | null | null +5 | 2024-05-10T00:07:00.000Z | null | null +12 | 2024-05-10T00:08:00.000Z | null | null +20 | 2024-05-10T00:09:00.000Z | null | null +5 | 2024-05-10T00:10:00.000Z | null | null +7 | 2024-05-10T00:11:00.000Z | null | null +8 | 2024-05-10T00:12:00.000Z | null | null +9 | 2024-05-10T00:13:00.000Z | null | null +9 | 2024-05-10T00:14:00.000Z | null | null +11 | 2024-05-10T00:15:00.000Z | null | null +7 | 2024-05-10T00:16:00.000Z | null | null +15 | 2024-05-10T00:17:00.000Z | null | null +17 | 2024-05-10T00:18:00.000Z | null | null +5 | 2024-05-10T00:19:00.000Z | null | null +10 | 2024-05-10T00:20:00.000Z | null | null +4 | 2024-05-10T00:21:00.000Z | null | null +9 | 2024-05-10T00:22:00.000Z | null | null +; + + +detect spike (long; default output columns) +required_capability: change_point + +FROM k8s + | STATS count=COUNT() BY @timestamp=BUCKET(@timestamp, 1 MINUTE) + | EVAL count=count+CASE(@timestamp=="2024-05-10T00:08:00.000Z", 100, 0) + | CHANGE_POINT count ON @timestamp +; + +@timestamp:datetime | count:long | type:keyword | pvalue:double +2024-05-10T00:00:00.000Z | 4 | null | null +2024-05-10T00:01:00.000Z | 4 | null | null +2024-05-10T00:02:00.000Z | 8 | null | null +2024-05-10T00:03:00.000Z | 8 | null | null +2024-05-10T00:04:00.000Z | 5 | null | null +2024-05-10T00:05:00.000Z | 8 | null | null +2024-05-10T00:06:00.000Z | 10 | null | null +2024-05-10T00:07:00.000Z | 5 | null | null +2024-05-10T00:08:00.000Z | 112 | spike | 1.7502597878858522E-193 +2024-05-10T00:09:00.000Z | 20 | null | null +2024-05-10T00:10:00.000Z | 5 | null | null +2024-05-10T00:11:00.000Z | 7 | null | null +2024-05-10T00:12:00.000Z | 8 | null | null +2024-05-10T00:13:00.000Z | 9 | null | null +2024-05-10T00:14:00.000Z | 9 | null | null +2024-05-10T00:15:00.000Z | 11 | null | null +2024-05-10T00:16:00.000Z | 7 | null | null +2024-05-10T00:17:00.000Z | 15 | null | null +2024-05-10T00:18:00.000Z | 17 | null | null +2024-05-10T00:19:00.000Z | 5 | null | null +2024-05-10T00:20:00.000Z | 10 | null | null +2024-05-10T00:21:00.000Z | 4 | null | null +2024-05-10T00:22:00.000Z | 9 | null | null +; + + +detect step change (long; default timestamp) +required_capability: change_point + +FROM k8s + | STATS count=COUNT() BY @timestamp=BUCKET(@timestamp, 1 MINUTE) + | EVAL count=count+CASE(@timestamp>="2024-05-10T00:11:00.000Z", 100, 0) + | CHANGE_POINT count AS type, pvalue +; + +@timestamp:datetime | count:long | type:keyword | pvalue:double +2024-05-10T00:00:00.000Z | 4 | null | null +2024-05-10T00:01:00.000Z | 4 | null | null +2024-05-10T00:02:00.000Z | 8 | null | null +2024-05-10T00:03:00.000Z | 8 | null | null +2024-05-10T00:04:00.000Z | 5 | null | null +2024-05-10T00:05:00.000Z | 8 | null | null +2024-05-10T00:06:00.000Z | 10 | null | null +2024-05-10T00:07:00.000Z | 5 | null | null +2024-05-10T00:08:00.000Z | 12 | null | null +2024-05-10T00:09:00.000Z | 20 | null | null +2024-05-10T00:10:00.000Z | 5 | null | null +2024-05-10T00:11:00.000Z | 107 | step_change | 3.0924162021968114E-23 +2024-05-10T00:12:00.000Z | 108 | null | null +2024-05-10T00:13:00.000Z | 109 | null | null +2024-05-10T00:14:00.000Z | 109 | null | null +2024-05-10T00:15:00.000Z | 111 | null | null +2024-05-10T00:16:00.000Z | 107 | null | null +2024-05-10T00:17:00.000Z | 115 | null | null +2024-05-10T00:18:00.000Z | 117 | null | null +2024-05-10T00:19:00.000Z | 105 | null | null +2024-05-10T00:20:00.000Z | 110 | null | null +2024-05-10T00:21:00.000Z | 104 | null | null +2024-05-10T00:22:00.000Z | 109 | null | null +; + + +detect dip (double) +required_capability: change_point + +FROM employees + | STATS salary=AVG(salary) BY height + | EVAL salary=CASE(height==2.1, 0.0, salary) + | CHANGE_POINT salary ON height AS type, pvalue +; + +height:double | salary:double | type:keyword | pvalue:double +1.41 | 40031.0 | null | null +1.42 | 34142.5 | null | null +1.44 | 40266.0 | null | null +1.45 | 49095.0 | null | null +1.46 | 39878.0 | null | null +1.47 | 60408.0 | null | null +1.48 | 44307.0 | null | null +1.5 | 31120.0 | null | null +1.51 | 28035.0 | null | null +1.52 | 41243.5 | null | null +1.53 | 60079.333333333336 | null | null +1.54 | 61358.0 | null | null +1.55 | 36876.5 | null | null +1.56 | 60335.0 | null | null +1.57 | 38486.0 | null | null +1.58 | 41701.5 | null | null +1.59 | 36575.666666666664 | null | null +1.61 | 55299.5 | null | null +1.63 | 70011.0 | null | null +1.64 | 38992.0 | null | null +1.66 | 28946.0 | null | null +1.68 | 42155.5 | null | null +1.69 | 45656.0 | null | null +1.7 | 65092.25 | null | null +1.74 | 53178.0 | null | null +1.75 | 43429.0 | null | null +1.77 | 54184.25 | null | null +1.78 | 44147.5 | null | null +1.79 | 55360.0 | null | null +1.8 | 52833.0 | null | null +1.81 | 56475.666666666664 | null | null +1.82 | 56039.333333333336 | null | null +1.83 | 54195.333333333336 | null | null +1.85 | 66174.0 | null | null +1.87 | 47411.0 | null | null +1.89 | 58121.0 | null | null +1.9 | 37112.0 | null | null +1.91 | 39638.0 | null | null +1.92 | 67492.0 | null | null +1.93 | 33956.0 | null | null +1.94 | 48193.333333333336 | null | null +1.96 | 43026.0 | null | null +1.97 | 52851.0 | null | null +1.99 | 56068.0 | null | null +2.0 | 36314.666666666664 | null | null +2.01 | 35742.0 | null | null +2.03 | 51130.5 | null | null +2.04 | 49281.0 | null | null +2.05 | 63528.0 | null | null +2.06 | 56722.5 | null | null +2.07 | 39984.0 | null | null +2.08 | 60523.0 | null | null +2.09 | 38645.0 | null | null +2.1 | 0.0 | dip | 9.590143836835097E-6 +; + + +no stats command +required_capability: change_point + +FROM employees + | KEEP emp_no, salary + | CHANGE_POINT salary ON emp_no AS type, pvalue +; + +emp_no:integer | salary:integer | type:keyword | pvalue:double +10001 | 57305 | null | null +10002 | 56371 | null | null +10003 | 61805 | null | null +10004 | 36174 | null | null +10005 | 63528 | null | null +10006 | 60335 | null | null +10007 | 74572 | null | null +10008 | 43906 | null | null +10009 | 66174 | null | null +10010 | 45797 | null | null +10011 | 31120 | null | null +10012 | 48942 | null | null +10013 | 48735 | null | null +10014 | 37137 | null | null +10015 | 25324 | null | null +10016 | 61358 | null | null +10017 | 58715 | null | null +10018 | 56760 | null | null +10019 | 73717 | null | null +10020 | 40031 | null | null +10021 | 60408 | null | null +10022 | 48233 | null | null +10023 | 47896 | null | null +10024 | 64675 | null | null +10025 | 47411 | null | null +10026 | 28336 | null | null +10027 | 73851 | null | null +10028 | 39356 | null | null +10029 | 74999 | null | null +10030 | 67492 | null | null +10031 | 37716 | null | null +10032 | 62233 | null | null +10033 | 70011 | null | null +10034 | 39878 | null | null +10035 | 25945 | null | null +10036 | 60781 | null | null +10037 | 37691 | null | null +10038 | 35222 | null | null +10039 | 36051 | null | null +10040 | 37112 | null | null +10041 | 56415 | null | null +10042 | 30404 | null | null +10043 | 34341 | null | null +10044 | 39728 | null | null +10045 | 74970 | null | null +10046 | 50064 | null | null +10047 | 42716 | null | null +10048 | 26436 | null | null +10049 | 37853 | null | null +10050 | 43026 | null | null +10051 | 58121 | null | null +10052 | 55360 | null | null +10053 | 54462 | null | null +10054 | 65367 | null | null +10055 | 49281 | null | null +10056 | 33370 | null | null +10057 | 27215 | null | null +10058 | 38376 | null | null +10059 | 44307 | null | null +10060 | 29175 | null | null +10061 | 49095 | null | null +10062 | 65030 | null | null +10063 | 52121 | null | null +10064 | 33956 | null | null +10065 | 50249 | null | null +10066 | 31897 | null | null +10067 | 52044 | null | null +10068 | 28941 | null | null +10069 | 41933 | null | null +10070 | 54329 | null | null +10071 | 40612 | null | null +10072 | 54518 | null | null +10073 | 32568 | null | null +10074 | 38992 | null | null +10075 | 51956 | null | null +10076 | 62405 | null | null +10077 | 46595 | null | null +10078 | 69904 | null | null +10079 | 32263 | null | null +10080 | 52833 | null | null +10081 | 50128 | null | null +10082 | 49818 | null | null +10083 | 39110 | null | null +10084 | 28035 | null | null +10085 | 35742 | null | null +10086 | 68547 | null | null +10087 | 32272 | null | null +10088 | 39638 | null | null +10089 | 43602 | null | null +10090 | 44956 | null | null +10091 | 38645 | null | null +10092 | 25976 | null | null +10093 | 45656 | null | null +10094 | 66817 | null | null +10095 | 37702 | null | null +10096 | 43889 | null | null +10097 | 71165 | null | null +10098 | 44817 | null | null +10099 | 73578 | null | null +10100 | 68431 | null | null +; + + +where before change point +required_capability: change_point + +FROM employees + | KEEP emp_no, salary + | EVAL salary = CASE(emp_no==10022, 100000, salary) + | EVAL salary = CASE(emp_no==10033, 1000000, salary) + | WHERE emp_no < 10025 + | CHANGE_POINT salary ON emp_no AS type, pvalue +; + +emp_no:integer | salary:integer | type:keyword | pvalue:double +10001 | 57305 | null | null +10002 | 56371 | null | null +10003 | 61805 | null | null +10004 | 36174 | null | null +10005 | 63528 | null | null +10006 | 60335 | null | null +10007 | 74572 | null | null +10008 | 43906 | null | null +10009 | 66174 | null | null +10010 | 45797 | null | null +10011 | 31120 | null | null +10012 | 48942 | null | null +10013 | 48735 | null | null +10014 | 37137 | null | null +10015 | 25324 | null | null +10016 | 61358 | null | null +10017 | 58715 | null | null +10018 | 56760 | null | null +10019 | 73717 | null | null +10020 | 40031 | null | null +10021 | 60408 | null | null +10022 | 100000 | spike | 0.0019710754505321004 +10023 | 47896 | null | null +10024 | 64675 | null | null +; + + +where after change point +required_capability: change_point + +FROM employees + | KEEP emp_no, salary + | EVAL salary = CASE(emp_no==10022, 100000, salary) + | EVAL salary = CASE(emp_no==10033, 1000000, salary) + | CHANGE_POINT salary ON emp_no AS type, pvalue + | WHERE emp_no < 10025 +; + +emp_no:integer | salary:integer | type:keyword | pvalue:double +10001 | 57305 | null | null +10002 | 56371 | null | null +10003 | 61805 | null | null +10004 | 36174 | null | null +10005 | 63528 | null | null +10006 | 60335 | null | null +10007 | 74572 | null | null +10008 | 43906 | null | null +10009 | 66174 | null | null +10010 | 45797 | null | null +10011 | 31120 | null | null +10012 | 48942 | null | null +10013 | 48735 | null | null +10014 | 37137 | null | null +10015 | 25324 | null | null +10016 | 61358 | null | null +10017 | 58715 | null | null +10018 | 56760 | null | null +10019 | 73717 | null | null +10020 | 40031 | null | null +10021 | 60408 | null | null +10022 | 100000 | null | null +10023 | 47896 | null | null +10024 | 64675 | null | null +; + + +where with shadowing +required_capability: change_point + +FROM employees + | KEEP emp_no, salary + | EVAL salary=CASE(emp_no==10015, -1000000, salary) + | WHERE emp_no < 10025 + | CHANGE_POINT salary ON emp_no AS type, emp_no + | WHERE emp_no IS NOT NULL + | RENAME emp_no AS pvalue +; + +salary:integer | type:keyword | pvalue:double +-1000000 | dip | 0.0 +; + + +stats after change point +required_capability: change_point + +FROM employees + | KEEP emp_no, salary + | EVAL salary=CASE(emp_no==10042, 1000000, salary) + | CHANGE_POINT salary ON emp_no + | STATS COUNT() by type + | SORT type +; + +COUNT():long | type:keyword +1 | spike +99 | null +; + + +sort/limit before change point +required_capability: change_point + +FROM k8s + | STATS count=COUNT() BY @timestamp=BUCKET(@timestamp, 1 MINUTE) + | SORT count, @timestamp + | LIMIT 22 + | CHANGE_POINT count AS type, pvalue +; + +count:long | @timestamp:datetime | type:keyword | pvalue:double +4 | 2024-05-10T00:00:00.000Z | null | null +4 | 2024-05-10T00:01:00.000Z | null | null +8 | 2024-05-10T00:02:00.000Z | null | null +8 | 2024-05-10T00:03:00.000Z | null | null +5 | 2024-05-10T00:04:00.000Z | null | null +8 | 2024-05-10T00:05:00.000Z | null | null +10 | 2024-05-10T00:06:00.000Z | null | null +5 | 2024-05-10T00:07:00.000Z | null | null +12 | 2024-05-10T00:08:00.000Z | null | null +5 | 2024-05-10T00:10:00.000Z | null | null +7 | 2024-05-10T00:11:00.000Z | null | null +8 | 2024-05-10T00:12:00.000Z | null | null +9 | 2024-05-10T00:13:00.000Z | null | null +9 | 2024-05-10T00:14:00.000Z | null | null +11 | 2024-05-10T00:15:00.000Z | null | null +7 | 2024-05-10T00:16:00.000Z | null | null +15 | 2024-05-10T00:17:00.000Z | null | null +17 | 2024-05-10T00:18:00.000Z | null | null +5 | 2024-05-10T00:19:00.000Z | null | null +10 | 2024-05-10T00:20:00.000Z | null | null +4 | 2024-05-10T00:21:00.000Z | null | null +9 | 2024-05-10T00:22:00.000Z | null | null +; + + +sort after change point +required_capability: change_point + +FROM k8s + | STATS count=COUNT() BY @timestamp=BUCKET(@timestamp, 1 MINUTE) + | EVAL count=count+CASE(@timestamp>="2024-05-10T00:11:00.000Z", 100, 0) + | CHANGE_POINT count AS type, pvalue + | SORT type, count, @timestamp +; + +@timestamp:datetime | count:long | type:keyword | pvalue:double +2024-05-10T00:11:00.000Z | 107 | step_change | 3.0924162021968114E-23 +2024-05-10T00:00:00.000Z | 4 | null | null +2024-05-10T00:01:00.000Z | 4 | null | null +2024-05-10T00:04:00.000Z | 5 | null | null +2024-05-10T00:07:00.000Z | 5 | null | null +2024-05-10T00:10:00.000Z | 5 | null | null +2024-05-10T00:02:00.000Z | 8 | null | null +2024-05-10T00:03:00.000Z | 8 | null | null +2024-05-10T00:05:00.000Z | 8 | null | null +2024-05-10T00:06:00.000Z | 10 | null | null +2024-05-10T00:08:00.000Z | 12 | null | null +2024-05-10T00:09:00.000Z | 20 | null | null +2024-05-10T00:21:00.000Z | 104 | null | null +2024-05-10T00:19:00.000Z | 105 | null | null +2024-05-10T00:16:00.000Z | 107 | null | null +2024-05-10T00:12:00.000Z | 108 | null | null +2024-05-10T00:13:00.000Z | 109 | null | null +2024-05-10T00:14:00.000Z | 109 | null | null +2024-05-10T00:22:00.000Z | 109 | null | null +2024-05-10T00:20:00.000Z | 110 | null | null +2024-05-10T00:15:00.000Z | 111 | null | null +2024-05-10T00:17:00.000Z | 115 | null | null +2024-05-10T00:18:00.000Z | 117 | null | null +; + + +reuse input column names +required_capability: change_point + +FROM k8s + | STATS count=COUNT() BY @timestamp=BUCKET(@timestamp, 1 MINUTE) + | EVAL count=count+CASE(@timestamp=="2024-05-10T00:08:00.000Z", 100, 0) + | CHANGE_POINT count ON @timestamp AS @timestamp, count +; + +@timestamp:keyword | count:double +null | null +null | null +null | null +null | null +null | null +null | null +null | null +null | null +spike | 1.7502597878858522E-193 +null | null +null | null +null | null +null | null +null | null +null | null +null | null +null | null +null | null +null | null +null | null +null | null +null | null +null | null +; + + +reuse value column name twice +required_capability: change_point + +FROM k8s + | STATS count=COUNT() BY @timestamp=BUCKET(@timestamp, 1 MINUTE) + | EVAL count=count+CASE(@timestamp=="2024-05-10T00:08:00.000Z", 100, 0) + | CHANGE_POINT count ON @timestamp AS count, count +; + +@timestamp:datetime | count:double +2024-05-10T00:00:00.000Z | null +2024-05-10T00:01:00.000Z | null +2024-05-10T00:02:00.000Z | null +2024-05-10T00:03:00.000Z | null +2024-05-10T00:04:00.000Z | null +2024-05-10T00:05:00.000Z | null +2024-05-10T00:06:00.000Z | null +2024-05-10T00:07:00.000Z | null +2024-05-10T00:08:00.000Z | 1.7502597878858522E-193 +2024-05-10T00:09:00.000Z | null +2024-05-10T00:10:00.000Z | null +2024-05-10T00:11:00.000Z | null +2024-05-10T00:12:00.000Z | null +2024-05-10T00:13:00.000Z | null +2024-05-10T00:14:00.000Z | null +2024-05-10T00:15:00.000Z | null +2024-05-10T00:16:00.000Z | null +2024-05-10T00:17:00.000Z | null +2024-05-10T00:18:00.000Z | null +2024-05-10T00:19:00.000Z | null +2024-05-10T00:20:00.000Z | null +2024-05-10T00:21:00.000Z | null +2024-05-10T00:22:00.000Z | null +; + + +same key and value column +required_capability: change_point + +FROM k8s + | STATS count=COUNT() BY BUCKET(@timestamp, 1 MINUTE) + | KEEP count + | CHANGE_POINT count ON count AS type, pvalue +; + +count:long | type:keyword | pvalue:double +4 | null | null +4 | null | null +4 | null | null +5 | null | null +5 | null | null +5 | null | null +5 | null | null +7 | null | null +7 | null | null +8 | null | null +8 | null | null +8 | null | null +8 | trend_change | 3.793633808495355E-12 +9 | null | null +9 | null | null +9 | null | null +10 | null | null +10 | null | null +11 | null | null +12 | null | null +15 | null | null +17 | null | null +20 | null | null +; + + +all four columns the same +required_capability: change_point + +FROM k8s + | STATS count=COUNT() BY BUCKET(@timestamp, 1 MINUTE) + | KEEP count + | CHANGE_POINT count ON count AS count, count +; + +count:double +null +null +null +null +null +null +null +null +null +null +null +null +3.793633808495355E-12 +null +null +null +null +null +null +null +null +null +null +; + + +rename columns +required_capability: change_point + +FROM k8s + | STATS count=COUNT() BY @timestamp=BUCKET(@timestamp, 1 MINUTE) + | EVAL count=count+CASE(@timestamp=="2024-05-10T00:08:00.000Z", 100, 0) + | RENAME count AS count2, @timestamp AS time2 + | CHANGE_POINT count2 ON time2 AS type, pvalue + | RENAME time2 AS time, count2 AS cnt, type AS kind, pvalue AS significance +; + +time:datetime | cnt:long | kind:keyword | significance:double +2024-05-10T00:00:00.000Z | 4 | null | null +2024-05-10T00:01:00.000Z | 4 | null | null +2024-05-10T00:02:00.000Z | 8 | null | null +2024-05-10T00:03:00.000Z | 8 | null | null +2024-05-10T00:04:00.000Z | 5 | null | null +2024-05-10T00:05:00.000Z | 8 | null | null +2024-05-10T00:06:00.000Z | 10 | null | null +2024-05-10T00:07:00.000Z | 5 | null | null +2024-05-10T00:08:00.000Z | 112 | spike | 1.7502597878858522E-193 +2024-05-10T00:09:00.000Z | 20 | null | null +2024-05-10T00:10:00.000Z | 5 | null | null +2024-05-10T00:11:00.000Z | 7 | null | null +2024-05-10T00:12:00.000Z | 8 | null | null +2024-05-10T00:13:00.000Z | 9 | null | null +2024-05-10T00:14:00.000Z | 9 | null | null +2024-05-10T00:15:00.000Z | 11 | null | null +2024-05-10T00:16:00.000Z | 7 | null | null +2024-05-10T00:17:00.000Z | 15 | null | null +2024-05-10T00:18:00.000Z | 17 | null | null +2024-05-10T00:19:00.000Z | 5 | null | null +2024-05-10T00:20:00.000Z | 10 | null | null +2024-05-10T00:21:00.000Z | 4 | null | null +2024-05-10T00:22:00.000Z | 9 | null | null +; + + +null keys +required_capability: change_point + +FROM k8s + | STATS count=COUNT() BY @timestamp=BUCKET(@timestamp, 1 MINUTE) + | EVAL @timestamp=CASE(@timestamp=="2024-05-10T00:04:00.000Z", NULL, @timestamp) + | EVAL @timestamp=CASE(@timestamp=="2024-05-10T00:08:00.000Z", NULL, @timestamp) + | EVAL count=count+CASE(@timestamp<="2024-05-10T00:11:00.000Z", 100, 0) + | CHANGE_POINT count ON @timestamp AS type, pvalue + | SORT @timestamp, count +; + +@timestamp:datetime | count:long | type:keyword | pvalue:double +2024-05-10T00:00:00.000Z | 104 | null | null +2024-05-10T00:01:00.000Z | 104 | null | null +2024-05-10T00:02:00.000Z | 108 | null | null +2024-05-10T00:03:00.000Z | 108 | null | null +2024-05-10T00:05:00.000Z | 108 | null | null +2024-05-10T00:06:00.000Z | 110 | null | null +2024-05-10T00:07:00.000Z | 105 | null | null +2024-05-10T00:09:00.000Z | 120 | null | null +2024-05-10T00:10:00.000Z | 105 | null | null +2024-05-10T00:11:00.000Z | 107 | null | null +2024-05-10T00:12:00.000Z | 8 | step_change | 9.678892139828202E-24 +2024-05-10T00:13:00.000Z | 9 | null | null +2024-05-10T00:14:00.000Z | 9 | null | null +2024-05-10T00:15:00.000Z | 11 | null | null +2024-05-10T00:16:00.000Z | 7 | null | null +2024-05-10T00:17:00.000Z | 15 | null | null +2024-05-10T00:18:00.000Z | 17 | null | null +2024-05-10T00:19:00.000Z | 5 | null | null +2024-05-10T00:20:00.000Z | 10 | null | null +2024-05-10T00:21:00.000Z | 4 | null | null +2024-05-10T00:22:00.000Z | 9 | null | null +null | 5 | null | null +null | 12 | null | null +; + +null values +required_capability: change_point + +FROM k8s + | STATS count=COUNT() BY @timestamp=BUCKET(@timestamp, 1 MINUTE) + | EVAL count=count+CASE(@timestamp>="2024-05-10T00:11:00.000Z", 100, 0) + | EVAL count=CASE(@timestamp=="2024-05-10T00:04:00.000Z", NULL, count) + | CHANGE_POINT count ON @timestamp AS type, pvalue +; + +warning:Line 5:3: warnings during evaluation of [CHANGE_POINT count ON @timestamp AS type, pvalue]. Only first 20 failures recorded. +warning:Line 5:3: java.lang.IllegalArgumentException: values contain nulls; skipping them + +@timestamp:datetime | count:long | type:keyword | pvalue:double +2024-05-10T00:00:00.000Z | 4 | null | null +2024-05-10T00:01:00.000Z | 4 | null | null +2024-05-10T00:02:00.000Z | 8 | null | null +2024-05-10T00:03:00.000Z | 8 | null | null +2024-05-10T00:04:00.000Z | null | null | null +2024-05-10T00:05:00.000Z | 8 | null | null +2024-05-10T00:06:00.000Z | 10 | null | null +2024-05-10T00:07:00.000Z | 5 | null | null +2024-05-10T00:08:00.000Z | 12 | null | null +2024-05-10T00:09:00.000Z | 20 | null | null +2024-05-10T00:10:00.000Z | 5 | null | null +2024-05-10T00:11:00.000Z | 107 | step_change | 3.438939970021414E-22 +2024-05-10T00:12:00.000Z | 108 | null | null +2024-05-10T00:13:00.000Z | 109 | null | null +2024-05-10T00:14:00.000Z | 109 | null | null +2024-05-10T00:15:00.000Z | 111 | null | null +2024-05-10T00:16:00.000Z | 107 | null | null +2024-05-10T00:17:00.000Z | 115 | null | null +2024-05-10T00:18:00.000Z | 117 | null | null +2024-05-10T00:19:00.000Z | 105 | null | null +2024-05-10T00:20:00.000Z | 110 | null | null +2024-05-10T00:21:00.000Z | 104 | null | null +2024-05-10T00:22:00.000Z | 109 | null | null +; + + +multivalued +required_capability: change_point + +FROM employees + | STATS salary=MV_SORT(VALUES(salary)) BY height + | EVAL salary = CASE(height == 1.5, [1, 22222, 33333], salary) + | EVAL salary = CASE(height == 1.63, [43210, -10000, 999999999], salary) + | EVAL salary = CASE(height == 1.8, 999999999, salary) + | CHANGE_POINT salary ON height +; + +warning:Line 6:3: warnings during evaluation of [CHANGE_POINT salary ON height]. Only first 20 failures recorded. +warning:Line 6:3: java.lang.IllegalArgumentException: values contains multivalued entries; skipping them (please consider reducing them with e.g. MV_AVG or MV_SUM) + +height:double | salary:integer | type:keyword | pvalue:double +1.41 | 40031 | null | null +1.42 | [29175, 39110] | null | null +1.44 | [30404, 50128] | null | null +1.45 | 49095 | null | null +1.46 | 39878 | null | null +1.47 | 60408 | null | null +1.48 | 44307 | null | null +1.5 | [1, 22222, 33333] | null | null +1.51 | 28035 | null | null +1.52 | [34341, 37853, 42716, 50064] | null | null +1.53 | [35222, 71165, 73851] | null | null +1.54 | 61358 | null | null +1.55 | [36051, 37702] | null | null +1.56 | 60335 | null | null +1.57 | [33370, 43602] | null | null +1.58 | [28941, 54462] | null | null +1.59 | [27215, 32263, 50249] | null | null +1.61 | [49818, 60781] | null | null +1.63 | [43210, -10000, 999999999] | null | null +1.64 | 38992 | null | null +1.66 | [25324, 32568] | null | null +1.68 | [37716, 46595] | null | null +1.69 | 45656 | null | null +1.7 | [45797, 65030, 74572, 74970] | null | null +1.74 | [32272, 58715, 68547] | null | null +1.75 | [25976, 47896, 56415] | null | null +1.77 | [41933, 52044, 54329, 68431] | null | null +1.78 | [36174, 52121] | null | null +1.79 | 55360 | null | null +1.8 | 999999999 | spike | 0.0 +1.81 | [25945, 69904, 73578] | null | null +1.82 | [48233, 54518, 65367] | null | null +1.83 | [38376, 61805, 62405] | null | null +1.85 | 66174 | null | null +1.87 | 47411 | null | null +1.89 | 58121 | null | null +1.9 | 37112 | null | null +1.91 | 39638 | null | null +1.92 | 67492 | null | null +1.93 | 33956 | null | null +1.94 | [43889, 48735, 51956] | null | null +1.96 | 43026 | null | null +1.97 | [48942, 56760] | null | null +1.99 | [37137, 74999] | null | null +2.0 | [26436, 37691, 44817] | null | null +2.01 | 35742 | null | null +2.03 | [44956, 57305] | null | null +2.04 | 49281 | null | null +2.05 | 63528 | null | null +2.06 | [39728, 73717] | null | null +2.07 | [39356, 40612] | null | null +2.08 | [56371, 64675] | null | null +2.09 | 38645 | null | null +2.1 | [28336, 31897, 43906, 62233, 66817] | null | null +; + + +multivalued with MV_AVG +required_capability: change_point + +FROM employees + | STATS salary=MV_SORT(VALUES(salary)) BY height + | EVAL salary = CASE(height == 1.5, [1, 22222, 33333], salary) + | EVAL salary = CASE(height == 1.63, [43210, -10000, 999999999], salary) + | EVAL salary = MV_AVG(salary) + | CHANGE_POINT salary ON height +; + +height:double | salary:double | type:keyword | pvalue:double +1.41 | 40031.0 | null | null +1.42 | 34142.5 | null | null +1.44 | 40266.0 | null | null +1.45 | 49095.0 | null | null +1.46 | 39878.0 | null | null +1.47 | 60408.0 | null | null +1.48 | 44307.0 | null | null +1.5 | 18518.666666666668 | null | null +1.51 | 28035.0 | null | null +1.52 | 41243.5 | null | null +1.53 | 60079.333333333336 | null | null +1.54 | 61358.0 | null | null +1.55 | 36876.5 | null | null +1.56 | 60335.0 | null | null +1.57 | 38486.0 | null | null +1.58 | 41701.5 | null | null +1.59 | 36575.666666666664 | null | null +1.61 | 55299.5 | null | null +1.63 | 3.33344403E8 | spike | 0.0 +1.64 | 38992.0 | null | null +1.66 | 28946.0 | null | null +1.68 | 42155.5 | null | null +1.69 | 45656.0 | null | null +1.7 | 65092.25 | null | null +1.74 | 53178.0 | null | null +1.75 | 43429.0 | null | null +1.77 | 54184.25 | null | null +1.78 | 44147.5 | null | null +1.79 | 55360.0 | null | null +1.8 | 52833.0 | null | null +1.81 | 56475.666666666664 | null | null +1.82 | 56039.333333333336 | null | null +1.83 | 54195.333333333336 | null | null +1.85 | 66174.0 | null | null +1.87 | 47411.0 | null | null +1.89 | 58121.0 | null | null +1.9 | 37112.0 | null | null +1.91 | 39638.0 | null | null +1.92 | 67492.0 | null | null +1.93 | 33956.0 | null | null +1.94 | 48193.333333333336 | null | null +1.96 | 43026.0 | null | null +1.97 | 52851.0 | null | null +1.99 | 56068.0 | null | null +2.0 | 36314.666666666664 | null | null +2.01 | 35742.0 | null | null +2.03 | 51130.5 | null | null +2.04 | 49281.0 | null | null +2.05 | 63528.0 | null | null +2.06 | 56722.5 | null | null +2.07 | 39984.0 | null | null +2.08 | 60523.0 | null | null +2.09 | 38645.0 | null | null +2.1 | 46637.8 | null | null +; + + +too much data (change point inside limit) +required_capability: change_point + +ROW key1=["A","B","C","D","E","F","G","H","I","J","K","L","M"], + key2=["0","1","2","3","4","5","6","7","8","9"], + key3=["0","1","2","3","4","5","6","7","8","9"] + | MV_EXPAND key1 + | MV_EXPAND key2 + | MV_EXPAND key3 + | EVAL key=CONCAT(key1,key2,key3) + | EVAL value=CASE(key >= "I42", 1000, 1) + | KEEP key, value + | CHANGE_POINT value ON key AS type, pvalue + | WHERE type IS NOT NULL +; + +warning:Line 10:3: warnings during evaluation of [CHANGE_POINT value ON key AS type, pvalue]. Only first 20 failures recorded. +warning:Line 10:3: java.lang.IllegalArgumentException: too many values; keeping only first 1000 values + +key:keyword | value:integer | type:keyword | pvalue:double +I42 | 1000 | step_change | 0.0 +; + + +too much data (change point outside limit) +required_capability: change_point + +ROW key1=["A","B","C","D","E","F","G","H","I","J","K","L","M"], + key2=["0","1","2","3","4","5","6","7","8","9"], + key3=["0","1","2","3","4","5","6","7","8","9"] + | MV_EXPAND key1 + | MV_EXPAND key2 + | MV_EXPAND key3 + | EVAL key=CONCAT(key1,key2,key3) + | EVAL value=CASE(key >= "L42", 1000, 1) + | KEEP key, value + | CHANGE_POINT value ON key AS type, pvalue + | WHERE type IS NOT NULL +; + +warning:Line 10:3: warnings during evaluation of [CHANGE_POINT value ON key AS type, pvalue]. Only first 20 failures recorded. +warning:Line 10:3: java.lang.IllegalArgumentException: too many values; keeping only first 1000 values + +key:keyword | value:integer | type:keyword | pvalue:double +; + + +too much data (assert output size) +required_capability: change_point + +ROW key1=["A","B","C","D","E","F","G","H","I","J","K","L","M"], + key2=["0","1","2","3","4","5","6","7","8","9"], + key3=["0","1","2","3","4","5","6","7","8","9"] + | MV_EXPAND key1 + | MV_EXPAND key2 + | MV_EXPAND key3 + | EVAL key=CONCAT(key1,key2,key3) + | EVAL value=CASE(key >= "I42", 1000, 1) + | KEEP key, value + | CHANGE_POINT value ON key AS type, pvalue + | STATS count=COUNT() +; + +warning:Line 10:3: warnings during evaluation of [CHANGE_POINT value ON key AS type, pvalue]. Only first 20 failures recorded. +warning:Line 10:3: java.lang.IllegalArgumentException: too many values; keeping only first 1000 values + +count:LONG +1000 +; + + +detect trend_change (using row) +required_capability: change_point + +ROW time = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25] + | MV_EXPAND time + | EVAL val = ABS(15-time) + | CHANGE_POINT val ON time +; + +time:integer | val:integer | type:keyword | pvalue:double +1 | 14 | null | null +2 | 13 | null | null +3 | 12 | null | null +4 | 11 | null | null +5 | 10 | null | null +6 | 9 | null | null +7 | 8 | null | null +8 | 7 | null | null +9 | 6 | null | null +10 | 5 | null | null +11 | 4 | null | null +12 | 3 | null | null +13 | 2 | null | null +14 | 1 | null | null +15 | 0 | trend_change | 1.2352704486638883E-112 +16 | 1 | null | null +17 | 2 | null | null +18 | 3 | null | null +19 | 4 | null | null +20 | 5 | null | null +21 | 6 | null | null +22 | 7 | null | null +23 | 8 | null | null +24 | 9 | null | null +25 | 10 | null | null +; + + +keys null column +required_capability: change_point + +ROW key=NULL, value=[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1] + | MV_EXPAND value + | CHANGE_POINT value ON key +; + +key:null | value:integer | type:keyword | pvalue:double +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +null | 1 | null | null +; + + +values null column +required_capability: change_point + +ROW time=[7,13,12,17,14,5,16,10,19,18,20,2,3,21,25,4,22,15,6,23,9,11,1,8,24], values=NULL::INTEGER + | MV_EXPAND time + | CHANGE_POINT values ON time +; + +warning: Line 3:3: evaluation of [CHANGE_POINT values ON time] failed, treating result as null. Only first 20 failures recorded. +warning: Line 3:3: java.lang.IllegalArgumentException: not enough buckets to calculate change_point. Requires at least [22]; found [0] +warning: Line 3:3: java.lang.IllegalArgumentException: values contain nulls; skipping them + +time:integer | values:integer | type:keyword | pvalue:double +1 | null | null | null +2 | null | null | null +3 | null | null | null +4 | null | null | null +5 | null | null | null +6 | null | null | null +7 | null | null | null +8 | null | null | null +9 | null | null | null +10 | null | null | null +11 | null | null | null +12 | null | null | null +13 | null | null | null +14 | null | null | null +15 | null | null | null +16 | null | null | null +17 | null | null | null +18 | null | null | null +19 | null | null | null +20 | null | null | null +21 | null | null | null +22 | null | null | null +23 | null | null | null +24 | null | null | null +25 | null | null | null +; + + +row with integer key and value +required_capability: change_point + +ROW key=[7,13,12,17,14,5,16,10,19,18,20,2,3,21,25,4,22,15,6,23,9,11,1,8,24] + | MV_EXPAND key + | EVAL value=CASE(key<13, 0, 1) + | CHANGE_POINT value ON key + | WHERE type IS NOT NULL +; + +key:integer | value:integer | type:keyword | pvalue:double +13 | 1 | step_change | 0.0 +; + + +row with long key and value +required_capability: change_point + +ROW key=[7,13,12,17,14,5,16,10,19,18,20,2,3,21,25,4,22,15,6,23,9,11,1,8,24] + | MV_EXPAND key + | EVAL key=TO_LONG(key), value=CASE(key<13, 0::LONG, 1::LONG) + | CHANGE_POINT value ON key + | WHERE type IS NOT NULL +; + +key:long | value:long | type:keyword | pvalue:double +13 | 1 | step_change | 0.0 +; + + +row with double key and value +required_capability: change_point + +ROW key=[7,13,12,17,14,5,16,10,19,18,20,2,3,21,25,4,22,15,6,23,9,11,1,8,24] + | MV_EXPAND key + | EVAL key=TO_DOUBLE(key), value=CASE(key<13, 0::DOUBLE, 1::DOUBLE) + | CHANGE_POINT value ON key + | WHERE type IS NOT NULL +; + +key:double | value:double | type:keyword | pvalue:double +13 | 1 | step_change | 0.0 +; + + +row with string key +required_capability: change_point + +ROW key=["23","17","02","06","01","21","12","15","19","07","11","05","24","04","10","14","13","22","09","18","20","08","25","16","03"] + | MV_EXPAND key + | EVAL value=CASE(key<"13", 0, 1) + | CHANGE_POINT value ON key + | WHERE type IS NOT NULL +; + +key:string | value:integer | type:keyword | pvalue:double +13 | 1 | step_change | 0.0 +; + + +row with datetime key +required_capability: change_point + +ROW key=[7,13,12,17,14,5,16,10,19,18,20,2,3,21,25,4,22,15,6,23,9,11,1,8,24] + | MV_EXPAND key + | EVAL key=TO_DATETIME(key), value=CASE(key<"1970-01-01T00:00:00.013Z", 0, 1) + | CHANGE_POINT value ON key + | WHERE type IS NOT NULL +; + +key:datetime | value:integer | type:keyword | pvalue:double +1970-01-01T00:00:00.013Z | 1 | step_change | 0.0 +; + + +row with ip key +required_capability: change_point + +ROW key=[7,13,12,17,14,5,16,10,19,18,20,2,3,21,25,4,22,15,6,23,9,11,1,8,24] + | MV_EXPAND key + | EVAL key=TO_STRING(key), key=TO_IP(CONCAT(key,".",key,".",key,".",key)), value=CASE(key<"13.13.13.13", 0, 1) + | CHANGE_POINT value ON key + | WHERE type IS NOT NULL +; + +key:ip | value:integer | type:keyword | pvalue:double +13.13.13.13 | 1 | step_change | 0.0 +; + + +row with version key +required_capability: change_point + +ROW key=[7,13,12,17,14,5,16,10,19,18,20,2,3,21,25,4,22,15,6,23,9,11,1,8,24] + | MV_EXPAND key + | EVAL key=TO_STRING(key), key=TO_VERSION(CONCAT(key,".",key,".",key)), value=CASE(key<"13.13.13", 0, 1) + | CHANGE_POINT value ON key + | WHERE type IS NOT NULL +; + +key:version | value:integer | type:keyword | pvalue:double +13.13.13 | 1 | step_change | 0.0 +; + + +row with boolean key +required_capability: change_point + +ROW key=[1,0,1,1,1,1,1,0,0,0,1,1,0,0,0,1,1,1,0,0,1,0,0,0,1] + | MV_EXPAND key + | EVAL key=TO_BOOLEAN(key), value=CASE(key==false, 0, 1) + | CHANGE_POINT value ON key +; + +key:boolean | value:integer | type:keyword | pvalue:double +false | 0 | null | null +false | 0 | null | null +false | 0 | null | null +false | 0 | null | null +false | 0 | null | null +false | 0 | null | null +false | 0 | null | null +false | 0 | null | null +false | 0 | null | null +false | 0 | null | null +false | 0 | null | null +false | 0 | null | null +true | 1 | step_change | 0.0 +true | 1 | null | null +true | 1 | null | null +true | 1 | null | null +true | 1 | null | null +true | 1 | null | null +true | 1 | null | null +true | 1 | null | null +true | 1 | null | null +true | 1 | null | null +true | 1 | null | null +true | 1 | null | null +true | 1 | null | null +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec-ignored b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec similarity index 97% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec-ignored rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec index 91075691a6a1c..cf2d44665bd5a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec-ignored +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec @@ -2,8 +2,8 @@ // TODO: re-enable the commented tests once the Join functionality stabilizes // -maxOfInt-Ignore -required_capability: join_planning_v1 +maxOfInt +required_capability: inlinestats_v3 // tag::max-languages[] FROM employees | KEEP emp_no, languages @@ -25,7 +25,7 @@ emp_no:integer | languages:integer | max_lang:integer ; maxOfIntByKeyword -required_capability: join_planning_v1 +required_capability: inlinestats_v3 FROM employees | KEEP emp_no, languages, gender @@ -43,7 +43,7 @@ emp_no:integer | languages:integer | gender:keyword | max_lang:integer ; maxOfLongByKeyword -required_capability: join_planning_v1 +required_capability: inlinestats_v3 FROM employees | KEEP emp_no, avg_worked_seconds, gender @@ -57,8 +57,8 @@ emp_no:integer | avg_worked_seconds:long | gender:keyword | max_avg_worked_secon 10030 | 394597613 | M | 394597613 ; -maxOfLong-Ignore -required_capability: join_planning_v1 +maxOfLong +required_capability: inlinestats_v3 FROM employees | KEEP emp_no, avg_worked_seconds, gender @@ -71,7 +71,7 @@ emp_no:integer | avg_worked_seconds:long | gender:keyword | max_avg_worked_secon ; maxOfLongByCalculatedKeyword -required_capability: join_planning_v1 +required_capability: inlinestats_v3 // tag::longest-tenured-by-first[] FROM employees @@ -94,7 +94,7 @@ emp_no:integer | avg_worked_seconds:long | last_name:keyword | SUBSTRING(last_na ; maxOfLongByCalculatedNamedKeyword -required_capability: join_planning_v1 +required_capability: inlinestats_v3 FROM employees | KEEP emp_no, avg_worked_seconds, last_name @@ -112,7 +112,7 @@ emp_no:integer | avg_worked_seconds:long | last_name:keyword | l:keyword | max_a 10087 | 305782871 | Eugenio | E | 305782871 ; -maxOfLongByCalculatedDroppedKeyword +maxOfLongByCalculatedDroppedKeyword-Ignore required_capability: join_planning_v1 FROM employees @@ -132,7 +132,7 @@ emp_no:integer | avg_worked_seconds:long | last_name:keyword | max_avg_worked_se ; maxOfLongByEvaledKeyword -required_capability: join_planning_v1 +required_capability: inlinestats_v3 FROM employees | EVAL l = SUBSTRING(last_name, 0, 1) @@ -152,7 +152,7 @@ emp_no:integer | avg_worked_seconds:long | l:keyword | max_avg_worked_seconds:lo ; maxOfLongByInt -required_capability: join_planning_v1 +required_capability: inlinestats_v3 FROM employees | KEEP emp_no, avg_worked_seconds, languages @@ -170,7 +170,7 @@ emp_no:integer | avg_worked_seconds:long | languages:integer | max_avg_worked_se ; maxOfLongByIntDouble -required_capability: join_planning_v1 +required_capability: inlinestats_v3 FROM employees | KEEP emp_no, avg_worked_seconds, languages, height @@ -205,7 +205,7 @@ emp_no:integer | languages:integer | avg_worked_seconds:long | gender:keyword | 10007 | 4 | 393084805 | F | 2.863684210555556E8 | 5 ; -byMultivaluedSimple +byMultivaluedSimple-Ignore required_capability: join_planning_v1 // tag::mv-group[] @@ -223,7 +223,7 @@ abbrev:keyword | type:keyword | scalerank:integer | min_scalerank:integer // end::mv-group-result[] ; -byMultivaluedMvExpand +byMultivaluedMvExpand-Ignore required_capability: join_planning_v1 // tag::mv-expand[] @@ -243,7 +243,7 @@ abbrev:keyword | type:keyword | scalerank:integer | min_scalerank:integer // end::mv-expand-result[] ; -byMvExpand +byMvExpand-Ignore required_capability: join_planning_v1 // tag::extreme-airports[] @@ -307,7 +307,7 @@ count:long | country:keyword | avg:double 17 | United Kingdom | 4.455 ; -afterWhere +afterWhere-Ignore required_capability: join_planning_v1 FROM airports @@ -366,8 +366,8 @@ abbrev:keyword | city:keyword | region:text | "COUNT(*)":long FUK | Fukuoka | 中央区 | 2 ; -beforeStats-Ignore -required_capability: join_planning_v1 +beforeStats +required_capability: inlinestats_v3 FROM airports | EVAL lat = ST_Y(location) @@ -379,7 +379,7 @@ northern:long | southern:long 520 | 371 ; -beforeKeepSort +beforeKeepSort-Ignore required_capability: join_planning_v1 FROM employees @@ -394,7 +394,7 @@ emp_no:integer | languages:integer | max_salary:integer 10003 | 4 | 74572 ; -beforeKeepWhere +beforeKeepWhere-Ignore required_capability: join_planning_v1 FROM employees @@ -537,8 +537,8 @@ emp_no:integer | one:integer 10005 | 1 ; -percentile-Ignore -required_capability: join_planning_v1 +percentile +required_capability: inlinestats_v3 FROM employees | KEEP emp_no, salary diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index dbeaedd7e0416..1b5de3283fe63 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -273,6 +273,58 @@ emp_no:integer 10001 ; + +lookupIndexInFromRepeatedRowBug +// Test for https://github.com/elastic/elasticsearch/issues/118852 +required_capability: join_lookup_v12 +FROM languages_lookup_non_unique_key +| WHERE language_code == 1 +| LOOKUP JOIN languages_lookup ON language_code +| KEEP language_code, language_name, country +| SORT language_code, language_name, country +; + +language_code:integer | language_name:keyword | country:text +1 | English | Canada +1 | English | United Kingdom +1 | English | United States of America +1 | English | null +; + +nonUniqueRightKeyOnTheCoordinatorLateLimit +required_capability: join_lookup_v12 +required_capability: join_lookup_fix_limit_pushdown + +FROM employees +| SORT emp_no +| EVAL language_code = emp_no % 10 +| LOOKUP JOIN languages_lookup_non_unique_key ON language_code +| KEEP emp_no, language_code, language_name, country +| LIMIT 4 +| SORT country +; + +emp_no:integer | language_code:integer | language_name:keyword | country:text +10001 | 1 | English | Canada +10001 | 1 | null | United Kingdom +10001 | 1 | English | United States of America +10001 | 1 | English | null +; + +nonUniqueRightKeyLateLimitWithEmptyRelation +required_capability: join_lookup_v12 +required_capability: join_lookup_fix_limit_pushdown + +ROW language_code = 1 +| WHERE language_code != 1 +| LOOKUP JOIN languages_lookup_non_unique_key ON language_code +| LIMIT 1 +| KEEP language_code, language_name +; + +language_code:integer | language_name:keyword +; + ########################################################################### # null and multi-value behavior with languages_lookup_non_unique_key index ########################################################################### @@ -301,7 +353,7 @@ emp_no:integer | language_code:integer | language_name:keyword mvJoinKeyOnTheLookupIndex required_capability: join_lookup_v12 -required_capability: join_lookup_skip_mv_on_lookup_key +required_capability: join_lookup_skip_mv_warnings FROM employees | WHERE 10003 < emp_no AND emp_no < 10008 @@ -311,6 +363,9 @@ FROM employees | KEEP emp_no, language_code, language_name ; +warning:Line 4:3: evaluation of [LOOKUP JOIN languages_lookup_non_unique_key ON language_code] failed, treating result as null. Only first 20 failures recorded. +warning:Line 4:3: java.lang.IllegalArgumentException: LOOKUP JOIN encountered multi-value + emp_no:integer | language_code:integer | language_name:keyword 10004 | 4 | Quenya 10005 | 5 | null @@ -320,7 +375,7 @@ emp_no:integer | language_code:integer | language_name:keyword mvJoinKeyOnFrom required_capability: join_lookup_v12 -required_capability: join_lookup_skip_mv +required_capability: join_lookup_skip_mv_warnings FROM employees | WHERE emp_no < 10006 @@ -330,6 +385,56 @@ FROM employees | KEEP emp_no, language_code, language_name ; +warning:Line 4:3: evaluation of [LOOKUP JOIN languages_lookup ON language_code] failed, treating result as null. Only first 20 failures recorded. +warning:Line 4:3: java.lang.IllegalArgumentException: LOOKUP JOIN encountered multi-value + +emp_no:integer | language_code:integer | language_name:keyword +10001 | 1 | English +10002 | [-7, 11] | null +10003 | [12, 14] | null +10004 | [0, 1, 3, 13] | null +10005 | [-2, 13] | null +; + +mvJoinKeyOnTheLookupIndexAfterStats +required_capability: join_lookup_v12 +required_capability: join_lookup_skip_mv_warnings + +FROM employees +| WHERE 10003 < emp_no AND emp_no < 10008 +| EVAL language_code = emp_no % 10 +| STATS BY emp_no, language_code +| LOOKUP JOIN languages_lookup_non_unique_key ON language_code +| SORT emp_no, language_name +| KEEP emp_no, language_code, language_name +; + +warning:Line 5:3: evaluation of [LOOKUP JOIN languages_lookup_non_unique_key ON language_code] failed, treating result as null. Only first 20 failures recorded. +warning:Line 5:3: java.lang.IllegalArgumentException: LOOKUP JOIN encountered multi-value + +emp_no:integer | language_code:integer | language_name:keyword +10004 | 4 | Quenya +10005 | 5 | null +10006 | 6 | null +10007 | 7 | null +; + +mvJoinKeyOnFromAfterStats +required_capability: join_lookup_v12 +required_capability: join_lookup_skip_mv_warnings + +FROM employees +| WHERE emp_no < 10006 +| EVAL language_code = salary_change.int +| STATS language_code = VALUES(language_code) BY emp_no +| LOOKUP JOIN languages_lookup ON language_code +| SORT emp_no +| KEEP emp_no, language_code, language_name +; + +warning:Line 5:3: evaluation of [LOOKUP JOIN languages_lookup ON language_code] failed, treating result as null. Only first 20 failures recorded. +warning:Line 5:3: java.lang.IllegalArgumentException: LOOKUP JOIN encountered multi-value + emp_no:integer | language_code:integer | language_name:keyword 10001 | 1 | English 10002 | [-7, 11] | null @@ -340,7 +445,7 @@ emp_no:integer | language_code:integer | language_name:keyword mvJoinKeyFromRow required_capability: join_lookup_v12 -required_capability: join_lookup_skip_mv +required_capability: join_lookup_skip_mv_warnings ROW language_code = [4, 5, 6, 7] | LOOKUP JOIN languages_lookup_non_unique_key ON language_code @@ -348,13 +453,16 @@ ROW language_code = [4, 5, 6, 7] | SORT language_code, language_name, country ; +warning:Line 2:3: evaluation of [LOOKUP JOIN languages_lookup_non_unique_key ON language_code] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:3: java.lang.IllegalArgumentException: LOOKUP JOIN encountered multi-value + language_code:integer | language_name:keyword | country:text [4, 5, 6, 7] | null | null ; mvJoinKeyFromRowExpanded required_capability: join_lookup_v12 -required_capability: join_lookup_skip_mv_on_lookup_key +required_capability: join_lookup_skip_mv_warnings ROW language_code = [4, 5, 6, 7, 8] | MV_EXPAND language_code @@ -363,6 +471,9 @@ ROW language_code = [4, 5, 6, 7, 8] | SORT language_code, language_name, country ; +warning:Line 3:3: evaluation of [LOOKUP JOIN languages_lookup_non_unique_key ON language_code] failed, treating result as null. Only first 20 failures recorded. +warning:Line 3:3: java.lang.IllegalArgumentException: LOOKUP JOIN encountered multi-value + language_code:integer | language_name:keyword | country:text 4 | Quenya | null 5 | null | Atlantis @@ -1278,12 +1389,12 @@ ignoreOrder:true 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 | QA | null ; -lookupIndexInFromRepeatedRowBug -// Test for https://github.com/elastic/elasticsearch/issues/118852 +lookupIndexQuoting required_capability: join_lookup_v12 FROM languages_lookup_non_unique_key | WHERE language_code == 1 -| LOOKUP JOIN languages_lookup ON language_code +| LOOKUP JOIN "languages_lookup" ON language_code +| LOOKUP JOIN """languages_lookup""" ON language_code | KEEP language_code, language_name, country | SORT language_code, language_name, country ; @@ -1295,19 +1406,49 @@ language_code:integer | language_name:keyword | country:text 1 | English | null ; -lookupIndexQuoting + +sortBeforeAndAfterJoin required_capability: join_lookup_v12 -FROM languages_lookup_non_unique_key -| WHERE language_code == 1 -| LOOKUP JOIN "languages_lookup" ON language_code -| LOOKUP JOIN """languages_lookup""" ON language_code -| KEEP language_code, language_name, country -| SORT language_code, language_name, country +required_capability: remove_redundant_sort + +FROM employees +| sort first_name +| EVAL language_code = languages +| LOOKUP JOIN languages_lookup ON language_code +| WHERE emp_no >= 10091 AND emp_no < 10094 +| SORT emp_no +| KEEP emp_no, language_code, language_name ; -language_code:integer | language_name:keyword | country:text -1 | English | Canada -1 | English | United Kingdom -1 | English | United States of America -1 | English | null +emp_no:integer | language_code:integer | language_name:keyword +10091 | 3 | Spanish +10092 | 1 | English +10093 | 3 | Spanish +; + + + +sortBeforeAndAfterMultipleJoinAndMvExpand +required_capability: join_lookup_v12 +required_capability: remove_redundant_sort + +FROM employees +| sort first_name +| EVAL language_code = languages +| LOOKUP JOIN languages_lookup ON language_code +| WHERE emp_no >= 10091 AND emp_no < 10094 +| SORT language_name +| MV_EXPAND first_name +| SORT first_name +| MV_EXPAND last_name +| SORT last_name +| LOOKUP JOIN languages_lookup ON language_code +| SORT emp_no +| KEEP emp_no, language_code, language_name +; + +emp_no:integer | language_code:integer | language_name:keyword +10091 | 3 | Spanish +10092 | 1 | English +10093 | 3 | Spanish ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-remote.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-remote.csv-spec index 4d7ee9b1b5af6..88c4fbf7de6cc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-remote.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-remote.csv-spec @@ -39,7 +39,7 @@ max:integer |_index:keyword ; metaIndexAliasedInAggs -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: metadata_fields_remote_test from employees metadata _index | eval _i = _index | stats max = max(emp_no) by _i | SORT _i; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec index a213c378d33d8..1f41ffdb60691 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec @@ -40,7 +40,7 @@ max:integer |_index:keyword ; metaIndexSorted -required_capability: metadata_fields +required_capability: index_metadata_field from employees metadata _index | sort _index, emp_no desc | keep emp_no, _index | limit 2; @@ -50,7 +50,7 @@ emp_no:integer |_index:keyword ; metaIndexWithInPredicate -required_capability: metadata_fields +required_capability: index_metadata_field from employees metadata _index | where _index in ("employees", "foobar") | sort emp_no desc | keep emp_no, _index | limit 2; @@ -60,7 +60,7 @@ emp_no:integer |_index:keyword ; metaIndexAliasedInAggs -required_capability: metadata_fields +required_capability: index_metadata_field from employees metadata _index | eval _i = _index | stats max = max(emp_no) by _i; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec index 2a7c092798404..1b4c1f0bc2b6c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec @@ -404,3 +404,17 @@ from employees | where emp_no == 10003 | mv_expand first_name | keep first_name first_name:keyword Parto ; + + +sortBeforeAndAfterMvExpand +from employees +| sort first_name +| mv_expand job_positions +| sort emp_no, job_positions +| keep emp_no, job_positions +| limit 2; + +emp_no:integer | job_positions:keyword +10001 | Accountant +10001 | Senior Python Developer +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec index a2f491e20e3b9..8b19bc589fcff 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec @@ -133,7 +133,7 @@ mc:l | count:l multiIndexIpString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: casting_operator required_capability: union_types_remove_fields @@ -162,7 +162,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringRename required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: casting_operator required_capability: union_types_remove_fields @@ -191,7 +191,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringRenameToString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_str METADATA _index @@ -219,7 +219,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexWhereIpString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_str METADATA _index @@ -237,7 +237,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 3450233 | Connected multiIndexWhereIpStringLike required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_str METADATA _index @@ -445,7 +445,7 @@ count:long | message:keyword multiIndexMissingIpToString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_missing_field FROM sample_data, sample_data_str, missing_ip_sample_data METADATA _index @@ -480,7 +480,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450 multiIndexMissingIpToIp required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_missing_field FROM sample_data, sample_data_str, missing_ip_sample_data METADATA _index @@ -515,7 +515,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexTsLong required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_long METADATA _index @@ -543,7 +543,7 @@ sample_data_ts_long | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexTsLongRename required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_long METADATA _index @@ -573,7 +573,7 @@ sample_data_ts_long | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexTsNanosRename required_capability: to_date_nanos required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_nanos METADATA _index @@ -602,7 +602,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexTsNanosRenameToNanos required_capability: to_date_nanos required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_nanos METADATA _index @@ -631,7 +631,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360123456Z | 172.21.2.162 | 34502 multiIndex sort millis and nanos as nanos required_capability: to_date_nanos required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_nanos METADATA _index @@ -660,7 +660,7 @@ sample_data | 2023-10-23T12:15:03.360000000Z | 172.21.2.162 | 34502 multiIndex sort millis and nanos as millis required_capability: to_date_nanos required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_nanos METADATA _index @@ -691,7 +691,7 @@ multiIndexTsNanosRenameToNanosWithFiltering required_capability: to_date_nanos required_capability: date_nanos_binary_comparison required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_nanos METADATA _index @@ -716,7 +716,7 @@ sample_data_ts_nanos | 2023-10-23T13:33:34.937123456Z | 172.21.0.5 | 12323 multiIndexTsLongRenameToString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_long METADATA _index @@ -744,7 +744,7 @@ sample_data_ts_long | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexWhereTsLong required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_long METADATA _index @@ -979,7 +979,7 @@ count:long | message:keyword multiIndexIpStringTsLong required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1022,7 +1022,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringTsLongDropped required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: to_date_nanos FROM sample_data* METADATA _index @@ -1064,7 +1064,7 @@ sample_data_ts_nanos | 8268153 | Connection error multiIndexIpStringTsLongRename required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1107,7 +1107,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringTsLongRenameDropped required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: to_date_nanos FROM sample_data* METADATA _index @@ -1149,7 +1149,7 @@ sample_data_ts_nanos | 8268153 | Connection error multiIndexIpStringTsLongRenameToString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1192,7 +1192,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexWhereIpStringTsLong required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1226,7 +1226,7 @@ count:long | message:keyword multiIndexWhereIpStringLikeTsLong required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1260,7 +1260,7 @@ count:long | message:keyword multiIndexMultiColumnTypesRename required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1279,7 +1279,7 @@ null | null | 8268153 | Connectio multiIndexMultiColumnTypesRenameAndKeep required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1299,7 +1299,7 @@ sample_data_ts_nanos | 2023-10-23T13:52:55.015Z | 2023-10-23T13:52:55.015123456 multiIndexMultiColumnTypesRenameAndDrop required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1591,7 +1591,7 @@ FROM sample_data, sample_data_ts_long shortIntegerWidening required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: casting_operator required_capability: union_types_numeric_widening diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractCrossClusterTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractCrossClusterTestCase.java new file mode 100644 index 0000000000000..510f5945f745a --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractCrossClusterTestCase.java @@ -0,0 +1,282 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.compute.operator.DriverTaskRunner; +import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.test.AbstractMultiClustersTestCase; +import org.elasticsearch.test.FailingFieldPlugin; +import org.elasticsearch.test.XContentTestUtils; +import org.elasticsearch.transport.RemoteClusterAware; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; + +public abstract class AbstractCrossClusterTestCase extends AbstractMultiClustersTestCase { + protected static final String REMOTE_CLUSTER_1 = "cluster-a"; + protected static final String REMOTE_CLUSTER_2 = "remote-b"; + protected static final String LOCAL_INDEX = "logs-1"; + protected static final String REMOTE_INDEX = "logs-2"; + protected static final String INDEX_WITH_BLOCKING_MAPPING = "blocking"; + protected static final String INDEX_WITH_FAIL_MAPPING = "failing"; + + @Override + protected List remoteClusterAlias() { + return List.of(REMOTE_CLUSTER_1, REMOTE_CLUSTER_2); + } + + @Override + protected Map skipUnavailableForRemoteClusters() { + return Map.of(REMOTE_CLUSTER_1, false, REMOTE_CLUSTER_2, randomBoolean()); + } + + @Override + protected Collection> nodePlugins(String clusterAlias) { + List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); + plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); + plugins.add(EsqlAsyncActionIT.LocalStateEsqlAsync.class); // allows the async_search DELETE action + plugins.add(CrossClusterAsyncQueryIT.InternalExchangePlugin.class); + plugins.add(SimplePauseFieldPlugin.class); + plugins.add(FailingPauseFieldPlugin.class); + plugins.add(FailingFieldPlugin.class); + plugins.add(CrossClusterAsyncQueryIT.CountingPauseFieldPlugin.class); + return plugins; + } + + public static class InternalExchangePlugin extends Plugin { + @Override + public List> getSettings() { + return List.of( + Setting.timeSetting( + ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, + TimeValue.timeValueSeconds(30), + Setting.Property.NodeScope + ) + ); + } + } + + public static class CountingPauseFieldPlugin extends SimplePauseFieldPlugin { + public static AtomicLong count = new AtomicLong(0); + + protected String scriptTypeName() { + return "pause_count"; + } + + public static void resetPlugin() { + count.set(0); + } + + @Override + public boolean onWait() throws InterruptedException { + count.incrementAndGet(); + return allowEmitting.await(30, TimeUnit.SECONDS); + } + } + + @Before + public void resetPlugin() { + SimplePauseFieldPlugin.resetPlugin(); + FailingPauseFieldPlugin.resetPlugin(); + CrossClusterAsyncQueryIT.CountingPauseFieldPlugin.resetPlugin(); + } + + @After + public void releaseLatches() { + SimplePauseFieldPlugin.release(); + FailingPauseFieldPlugin.release(); + CrossClusterAsyncQueryIT.CountingPauseFieldPlugin.release(); + } + + protected void assertClusterInfoSuccess(EsqlExecutionInfo.Cluster cluster, int numShards) { + assertThat(cluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(cluster.getTotalShards(), equalTo(numShards)); + assertThat(cluster.getSuccessfulShards(), equalTo(numShards)); + assertThat(cluster.getSkippedShards(), equalTo(0)); + assertThat(cluster.getFailedShards(), equalTo(0)); + assertThat(cluster.getFailures().size(), equalTo(0)); + } + + protected static void assertClusterMetadataInResponse(EsqlQueryResponse resp, boolean responseExpectMeta, int numClusters) { + try { + final Map esqlResponseAsMap = XContentTestUtils.convertToMap(resp); + final Object clusters = esqlResponseAsMap.get("_clusters"); + if (responseExpectMeta) { + assertNotNull(clusters); + // test a few entries to ensure it looks correct (other tests do a full analysis of the metadata in the response) + @SuppressWarnings("unchecked") + Map inner = (Map) clusters; + assertTrue(inner.containsKey("total")); + assertThat((int) inner.get("total"), equalTo(numClusters)); + assertTrue(inner.containsKey("details")); + } else { + assertNull(clusters); + } + } catch (IOException e) { + fail("Could not convert ESQLQueryResponse to Map: " + e); + } + } + + protected Map setupClusters(int numClusters) throws IOException { + assert numClusters == 2 || numClusters == 3 : "2 or 3 clusters supported not: " + numClusters; + int numShardsLocal = randomIntBetween(1, 5); + populateLocalIndices(LOCAL_INDEX, numShardsLocal); + + int numShardsRemote = randomIntBetween(1, 5); + populateRemoteIndices(REMOTE_CLUSTER_1, REMOTE_INDEX, numShardsRemote); + + Map clusterInfo = new HashMap<>(); + clusterInfo.put("local.num_shards", numShardsLocal); + clusterInfo.put("local.index", LOCAL_INDEX); + clusterInfo.put("remote1.num_shards", numShardsRemote); + clusterInfo.put("remote1.index", REMOTE_INDEX); + clusterInfo.put("remote.num_shards", numShardsRemote); + clusterInfo.put("remote.index", REMOTE_INDEX); + + if (numClusters == 3) { + int numShardsRemote2 = randomIntBetween(1, 5); + populateRemoteIndices(REMOTE_CLUSTER_2, REMOTE_INDEX, numShardsRemote2); + clusterInfo.put("remote2.index", REMOTE_INDEX); + clusterInfo.put("remote2.num_shards", numShardsRemote2); + } + + String skipUnavailableKey = Strings.format("cluster.remote.%s.skip_unavailable", REMOTE_CLUSTER_1); + Setting skipUnavailableSetting = cluster(REMOTE_CLUSTER_1).clusterService().getClusterSettings().get(skipUnavailableKey); + boolean skipUnavailable = (boolean) cluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY).clusterService() + .getClusterSettings() + .get(skipUnavailableSetting); + clusterInfo.put("remote.skip_unavailable", skipUnavailable); + + return clusterInfo; + } + + protected void populateLocalIndices(String indexName, int numShards) { + Client localClient = client(LOCAL_CLUSTER); + assertAcked( + localClient.admin() + .indices() + .prepareCreate(indexName) + .setSettings(Settings.builder().put("index.number_of_shards", numShards)) + .setMapping("id", "type=keyword", "tag", "type=keyword", "v", "type=long", "const", "type=long") + ); + for (int i = 0; i < 10; i++) { + localClient.prepareIndex(indexName).setSource("id", "local-" + i, "tag", "local", "v", i).get(); + } + localClient.admin().indices().prepareRefresh(indexName).get(); + } + + protected void populateRuntimeIndex(String clusterAlias, String langName, String indexName) throws IOException { + populateRuntimeIndex(clusterAlias, langName, indexName, 10); + } + + protected void populateRuntimeIndex(String clusterAlias, String langName, String indexName, int count) throws IOException { + XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); + mapping.startObject("runtime"); + { + mapping.startObject("const"); + { + mapping.field("type", "long"); + mapping.startObject("script").field("source", "").field("lang", langName).endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + mapping.endObject(); + client(clusterAlias).admin().indices().prepareCreate(indexName).setMapping(mapping).get(); + BulkRequestBuilder bulk = client(clusterAlias).prepareBulk(indexName).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + for (int i = 0; i < count; i++) { + bulk.add(new IndexRequest().source("foo", i)); + } + bulk.get(); + } + + protected void populateRemoteIndices(String clusterAlias, String indexName, int numShards) throws IOException { + Client remoteClient = client(clusterAlias); + assertAcked( + remoteClient.admin() + .indices() + .prepareCreate(indexName) + .setSettings(Settings.builder().put("index.number_of_shards", numShards)) + .setMapping("id", "type=keyword", "tag", "type=keyword", "v", "type=long") + ); + for (int i = 0; i < 10; i++) { + remoteClient.prepareIndex(indexName).setSource("id", "remote-" + i, "tag", "remote", "v", i * i).get(); + } + remoteClient.admin().indices().prepareRefresh(indexName).get(); + } + + protected void setSkipUnavailable(String clusterAlias, boolean skip) { + client(LOCAL_CLUSTER).admin() + .cluster() + .prepareUpdateSettings(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT) + .setPersistentSettings(Settings.builder().put("cluster.remote." + clusterAlias + ".skip_unavailable", skip).build()) + .get(); + } + + protected void clearSkipUnavailable(int numClusters) { + assert numClusters == 2 || numClusters == 3 : "Only 2 or 3 clusters supported"; + Settings.Builder settingsBuilder = Settings.builder().putNull("cluster.remote." + REMOTE_CLUSTER_1 + ".skip_unavailable"); + if (numClusters == 3) { + settingsBuilder.putNull("cluster.remote." + REMOTE_CLUSTER_2 + ".skip_unavailable"); + } + client(LOCAL_CLUSTER).admin() + .cluster() + .prepareUpdateSettings(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT) + .setPersistentSettings(settingsBuilder.build()) + .get(); + } + + protected void clearSkipUnavailable() { + clearSkipUnavailable(3); + } + + protected EsqlQueryResponse runQuery(EsqlQueryRequest request) { + return client(LOCAL_CLUSTER).execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS); + } + + protected EsqlQueryResponse runQuery(String query, Boolean ccsMetadataInResponse) { + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); + request.query(query); + request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); + request.profile(randomInt(5) == 2); + request.columnar(randomBoolean()); + if (ccsMetadataInResponse != null) { + request.includeCCSMetadata(ccsMetadataInResponse); + } + return runQuery(request); + } + + static List getDriverTasks(Client client) { + return client.admin().cluster().prepareListTasks().setActions(DriverTaskRunner.ACTION_NAME).setDetailed(true).get().getTasks(); + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractCrossClustersUsageTelemetryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractCrossClusterUsageTelemetryIT.java similarity index 98% rename from x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractCrossClustersUsageTelemetryIT.java rename to x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractCrossClusterUsageTelemetryIT.java index 7df40da0344a9..73dbb0de8acd4 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractCrossClustersUsageTelemetryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractCrossClusterUsageTelemetryIT.java @@ -33,8 +33,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; -public class AbstractCrossClustersUsageTelemetryIT extends AbstractMultiClustersTestCase { - private static final Logger LOGGER = LogManager.getLogger(AbstractCrossClustersUsageTelemetryIT.class); +public class AbstractCrossClusterUsageTelemetryIT extends AbstractMultiClustersTestCase { + private static final Logger LOGGER = LogManager.getLogger(AbstractCrossClusterUsageTelemetryIT.class); protected static final String REMOTE1 = "cluster-a"; protected static final String REMOTE2 = "cluster-b"; protected static final String LOCAL_INDEX = "logs-1"; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEnrichBasedCrossClusterTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEnrichBasedCrossClusterTestCase.java index 4d1f908702152..de54e7de13b79 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEnrichBasedCrossClusterTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEnrichBasedCrossClusterTestCase.java @@ -75,7 +75,7 @@ protected Collection allClusters() { @Override protected Collection> nodePlugins(String clusterAlias) { List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); - plugins.add(CrossClustersEnrichIT.LocalStateEnrich.class); + plugins.add(CrossClusterEnrichIT.LocalStateEnrich.class); plugins.add(IngestCommonPlugin.class); plugins.add(ReindexPlugin.class); return plugins; @@ -274,7 +274,7 @@ protected List> infoActions() { @Override protected Class> getInfoAction() { - return CrossClustersQueriesWithInvalidLicenseIT.LocalStateEnrich.EnrichTransportXPackInfoAction.class; + return CrossClusterQueriesWithInvalidLicenseIT.LocalStateEnrich.EnrichTransportXPackInfoAction.class; } } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncEnrichStopIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncEnrichStopIT.java index 99a81c60a9ad2..1d6acf51db032 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncEnrichStopIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncEnrichStopIT.java @@ -10,7 +10,9 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.compute.operator.DriverStatus; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.async.AsyncStopRequest; import org.elasticsearch.xpack.esql.plan.logical.Enrich; @@ -28,10 +30,13 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; +import static org.elasticsearch.xpack.esql.action.AbstractCrossClusterTestCase.getDriverTasks; import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.deleteAsyncId; import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.startAsyncQuery; import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.waitForCluster; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; // This tests if enrich after stop works correctly public class CrossClusterAsyncEnrichStopIT extends AbstractEnrichBasedCrossClusterTestCase { @@ -87,10 +92,27 @@ public void testEnrichAfterStop() throws Exception { // wait until c1 is done waitForCluster(client(), "c1", asyncExecutionId); waitForCluster(client(), LOCAL_CLUSTER, asyncExecutionId); + // wait until remote reduce task starts on c2 + assertBusy(() -> { + List tasks = getDriverTasks(client(REMOTE_CLUSTER_2)); + List reduceTasks = tasks.stream() + .filter(t -> t.status() instanceof DriverStatus ds && ds.taskDescription().equals("remote_reduce")) + .toList(); + assertThat(reduceTasks, not(empty())); + }); // Run the stop request var stopRequest = new AsyncStopRequest(asyncExecutionId); var stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest); + // wait until remote reduce tasks are gone + assertBusy(() -> { + List tasks = getDriverTasks(client(REMOTE_CLUSTER_2)); + List reduceTasks = tasks.stream() + .filter(t -> t.status() instanceof DriverStatus ds && ds.taskDescription().equals("remote_reduce")) + .toList(); + assertThat(reduceTasks, empty()); + }); + // Allow the processing to proceed SimplePauseFieldPlugin.allowEmitting.countDown(); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java index 42a96cc7b7743..d5c53e94f8687 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java @@ -9,40 +9,17 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.ActionFuture; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.compute.operator.DriverTaskRunner; -import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.tasks.TaskInfo; -import org.elasticsearch.test.AbstractMultiClustersTestCase; -import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.transport.RemoteClusterAware; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.async.AsyncExecutionId; import org.elasticsearch.xpack.core.async.AsyncStopRequest; -import org.junit.Before; import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -51,66 +28,14 @@ import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.getAsyncResponse; import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.runAsyncQuery; import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.startAsyncQuery; -import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.startAsyncQueryWithPragmas; import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.waitForCluster; -import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; -public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase { - - private static final String REMOTE_CLUSTER_1 = "cluster-a"; - private static final String REMOTE_CLUSTER_2 = "remote-b"; - private static String LOCAL_INDEX = "logs-1"; - private static String REMOTE_INDEX = "logs-2"; - private static final String INDEX_WITH_RUNTIME_MAPPING = "blocking"; - private static final String INDEX_WITH_FAIL_MAPPING = "failing"; - - @Override - protected List remoteClusterAlias() { - return List.of(REMOTE_CLUSTER_1, REMOTE_CLUSTER_2); - } - - @Override - protected Map skipUnavailableForRemoteClusters() { - return Map.of(REMOTE_CLUSTER_1, false, REMOTE_CLUSTER_2, randomBoolean()); - } - - @Override - protected Collection> nodePlugins(String clusterAlias) { - List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); - plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); - plugins.add(EsqlAsyncActionIT.LocalStateEsqlAsync.class); // allows the async_search DELETE action - plugins.add(InternalExchangePlugin.class); - plugins.add(SimplePauseFieldPlugin.class); - plugins.add(FailingPauseFieldPlugin.class); - plugins.add(CountingPauseFieldPlugin.class); - return plugins; - } - - public static class InternalExchangePlugin extends Plugin { - @Override - public List> getSettings() { - return List.of( - Setting.timeSetting( - ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, - TimeValue.timeValueSeconds(30), - Setting.Property.NodeScope - ) - ); - } - } - - @Before - public void resetPlugin() { - SimplePauseFieldPlugin.resetPlugin(); - FailingPauseFieldPlugin.resetPlugin(); - CountingPauseFieldPlugin.resetPlugin(); - } - +public class CrossClusterAsyncQueryIT extends AbstractCrossClusterTestCase { /** * Includes testing for CCS metadata in the GET /_query/async/:id response while the search is still running */ @@ -118,7 +43,7 @@ public void testSuccessfulPathways() throws Exception { Map testClusterInfo = setupClusters(3); int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); int remote1NumShards = (Integer) testClusterInfo.get("remote1.num_shards"); - populateRuntimeIndex(REMOTE_CLUSTER_2, "pause", INDEX_WITH_RUNTIME_MAPPING); + populateRuntimeIndex(REMOTE_CLUSTER_2, "pause", INDEX_WITH_BLOCKING_MAPPING); Tuple includeCCSMetadata = randomIncludeCCSMetadata(); boolean responseExpectMeta = includeCCSMetadata.v2(); @@ -264,202 +189,6 @@ public void testAsyncQueriesWithLimit0() throws IOException { } } - public void testStopQuery() throws Exception { - Map testClusterInfo = setupClusters(3); - int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); - int remote1NumShards = (Integer) testClusterInfo.get("remote1.num_shards"); - // Create large index so we could be sure we're stopping before the end - populateRuntimeIndex(REMOTE_CLUSTER_2, "pause_count", INDEX_WITH_RUNTIME_MAPPING); - - Tuple includeCCSMetadata = randomIncludeCCSMetadata(); - boolean responseExpectMeta = includeCCSMetadata.v2(); - - final String asyncExecutionId = startAsyncQueryWithPragmas( - client(), - "FROM logs-*,cluster-a:logs-*,remote-b:blocking | STATS total=sum(coalesce(const,v)) | LIMIT 1", - includeCCSMetadata.v1(), - Map.of("page_size", 1, "data_partitioning", "shard", "task_concurrency", 1) - ); - - // wait until we know that the query against 'remote-b:blocking' has started - CountingPauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS); - - // wait until the query of 'cluster-a:logs-*' has finished (it is not blocked since we are not searching the 'blocking' index on it) - waitForCluster(client(), REMOTE_CLUSTER_1, asyncExecutionId); - waitForCluster(client(), LOCAL_CLUSTER, asyncExecutionId); - - /* at this point: - * the query against cluster-a should be finished - * the query against remote-b should be running (blocked on the PauseFieldPlugin.allowEmitting CountDown) - * the query against the local cluster should be running because it has a STATS clause that needs to wait on remote-b - */ - - // run the stop query - AsyncStopRequest stopRequest = new AsyncStopRequest(asyncExecutionId); - ActionFuture stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest); - assertBusy(() -> { - List tasks = getDriverTasks(client(REMOTE_CLUSTER_2)); - List reduceTasks = tasks.stream().filter(t -> t.description().contains("_LuceneSourceOperator") == false).toList(); - assertThat(reduceTasks, empty()); - }); - // allow remoteB query to proceed - CountingPauseFieldPlugin.allowEmitting.countDown(); - - // Since part of the query has not been stopped, we expect some result to emerge here - try (EsqlQueryResponse asyncResponse = stopAction.actionGet(30, TimeUnit.SECONDS)) { - // Check that we did not process all the fields on remote-b - // Should not be getting more than one page here, and we set page size to 1 - assertThat(CountingPauseFieldPlugin.count.get(), lessThanOrEqualTo(1L)); - assertThat(asyncResponse.isRunning(), is(false)); - assertThat(asyncResponse.columns().size(), equalTo(1)); - assertThat(asyncResponse.values().hasNext(), is(true)); - Iterator row = asyncResponse.values().next(); - // sum of 0-9 is 45, and sum of 0-9 squared is 285 - assertThat(row.next(), equalTo(330L)); - - EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo(); - assertNotNull(executionInfo); - assertThat(executionInfo.isCrossClusterSearch(), is(true)); - long overallTookMillis = executionInfo.overallTook().millis(); - assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); - assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER, REMOTE_CLUSTER_1, REMOTE_CLUSTER_2))); - assertThat(executionInfo.isPartial(), equalTo(true)); - - EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER_1); - assertThat(remoteCluster.getIndexExpression(), equalTo("logs-*")); - assertClusterInfoSuccess(remoteCluster, remote1NumShards); - - EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE_CLUSTER_2); - assertThat(remote2Cluster.getIndexExpression(), equalTo("blocking")); - assertThat(remote2Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.PARTIAL)); - - EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER); - assertThat(localCluster.getIndexExpression(), equalTo("logs-*")); - assertClusterInfoSuccess(localCluster, localNumShards); - - assertClusterMetadataInResponse(asyncResponse, responseExpectMeta, 3); - } finally { - assertAcked(deleteAsyncId(client(), asyncExecutionId)); - } - } - - public void testStopQueryLocal() throws Exception { - Map testClusterInfo = setupClusters(3); - int remote1NumShards = (Integer) testClusterInfo.get("remote1.num_shards"); - int remote2NumShards = (Integer) testClusterInfo.get("remote2.num_shards"); - populateRuntimeIndex(LOCAL_CLUSTER, "pause", INDEX_WITH_RUNTIME_MAPPING); - - Tuple includeCCSMetadata = randomIncludeCCSMetadata(); - boolean responseExpectMeta = includeCCSMetadata.v2(); - - final String asyncExecutionId = startAsyncQuery( - client(), - "FROM blocking,*:logs-* | STATS total=sum(coalesce(const,v)) | LIMIT 1", - includeCCSMetadata.v1() - ); - - // wait until we know that the query against 'remote-b:blocking' has started - SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS); - - // wait until the remotes are done - waitForCluster(client(), REMOTE_CLUSTER_1, asyncExecutionId); - waitForCluster(client(), REMOTE_CLUSTER_2, asyncExecutionId); - - /* at this point: - * the query against remotes should be finished - * the query against the local cluster should be running because it's blocked - */ - - // run the stop query - AsyncStopRequest stopRequest = new AsyncStopRequest(asyncExecutionId); - ActionFuture stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest); - // ensure stop operation is running - assertBusy(() -> { - try (EsqlQueryResponse asyncResponse = getAsyncResponse(client(), asyncExecutionId)) { - EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo(); - assertNotNull(executionInfo); - assertThat(executionInfo.isPartial(), is(true)); - } - }); - // allow local query to proceed - SimplePauseFieldPlugin.allowEmitting.countDown(); - - // Since part of the query has not been stopped, we expect some result to emerge here - try (EsqlQueryResponse asyncResponse = stopAction.actionGet(30, TimeUnit.SECONDS)) { - assertThat(asyncResponse.isRunning(), is(false)); - assertThat(asyncResponse.columns().size(), equalTo(1)); - assertThat(asyncResponse.values().hasNext(), is(true)); - Iterator row = asyncResponse.values().next(); - // sum of 0-9 squared is 285, from two remotes it's 570 - assertThat(row.next(), equalTo(570L)); - - EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo(); - assertNotNull(executionInfo); - assertThat(executionInfo.isCrossClusterSearch(), is(true)); - long overallTookMillis = executionInfo.overallTook().millis(); - assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); - assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER, REMOTE_CLUSTER_1, REMOTE_CLUSTER_2))); - assertThat(executionInfo.isPartial(), equalTo(true)); - - EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER_1); - assertThat(remoteCluster.getIndexExpression(), equalTo("logs-*")); - assertClusterInfoSuccess(remoteCluster, remote1NumShards); - - EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE_CLUSTER_2); - assertThat(remote2Cluster.getIndexExpression(), equalTo("logs-*")); - assertClusterInfoSuccess(remote2Cluster, remote2NumShards); - - EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER); - assertThat(localCluster.getIndexExpression(), equalTo("blocking")); - assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.PARTIAL)); - - assertClusterMetadataInResponse(asyncResponse, responseExpectMeta, 3); - } finally { - assertAcked(deleteAsyncId(client(), asyncExecutionId)); - } - } - - public void testStopQueryLocalNoRemotes() throws Exception { - setupClusters(3); - populateRuntimeIndex(LOCAL_CLUSTER, "pause", INDEX_WITH_RUNTIME_MAPPING); - - Tuple includeCCSMetadata = randomIncludeCCSMetadata(); - boolean responseExpectMeta = includeCCSMetadata.v2(); - - final String asyncExecutionId = startAsyncQuery( - client(), - "FROM blocking | STATS total=count(const) | LIMIT 1", - includeCCSMetadata.v1() - ); - - // wait until we know that the query against 'remote-b:blocking' has started - SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS); - - /* at this point: - * the query against the local cluster should be running because it's blocked - */ - - // run the stop query - var stopRequest = new AsyncStopRequest(asyncExecutionId); - var stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest); - // allow local query to proceed - SimplePauseFieldPlugin.allowEmitting.countDown(); - - try (EsqlQueryResponse asyncResponse = stopAction.actionGet(30, TimeUnit.SECONDS)) { - assertThat(asyncResponse.isRunning(), is(false)); - assertThat(asyncResponse.columns().size(), equalTo(1)); - assertThat(asyncResponse.values().hasNext(), is(true)); - Iterator row = asyncResponse.values().next(); - assertThat((long) row.next(), greaterThanOrEqualTo(0L)); - - EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo(); - assertNotNull(executionInfo); - assertThat(executionInfo.isCrossClusterSearch(), is(false)); - } finally { - assertAcked(deleteAsyncId(client(), asyncExecutionId)); - } - } - public void testAsyncFailure() throws Exception { Map testClusterInfo = setupClusters(2); populateRuntimeIndex(REMOTE_CLUSTER_1, "pause_fail", INDEX_WITH_FAIL_MAPPING); @@ -517,142 +246,4 @@ public void testBadAsyncId() throws Exception { var stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest); assertThrows(ResourceNotFoundException.class, () -> stopAction.actionGet(1000, TimeUnit.SECONDS)); } - - private void assertClusterInfoSuccess(EsqlExecutionInfo.Cluster cluster, int numShards) { - assertThat(cluster.getTook().millis(), greaterThanOrEqualTo(0L)); - assertThat(cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); - assertThat(cluster.getTotalShards(), equalTo(numShards)); - assertThat(cluster.getSuccessfulShards(), equalTo(numShards)); - assertThat(cluster.getSkippedShards(), equalTo(0)); - assertThat(cluster.getFailedShards(), equalTo(0)); - assertThat(cluster.getFailures().size(), equalTo(0)); - } - - private static void assertClusterMetadataInResponse(EsqlQueryResponse resp, boolean responseExpectMeta, int numClusters) { - try { - final Map esqlResponseAsMap = XContentTestUtils.convertToMap(resp); - final Object clusters = esqlResponseAsMap.get("_clusters"); - if (responseExpectMeta) { - assertNotNull(clusters); - // test a few entries to ensure it looks correct (other tests do a full analysis of the metadata in the response) - @SuppressWarnings("unchecked") - Map inner = (Map) clusters; - assertTrue(inner.containsKey("total")); - assertThat((int) inner.get("total"), equalTo(numClusters)); - assertTrue(inner.containsKey("details")); - } else { - assertNull(clusters); - } - } catch (IOException e) { - fail("Could not convert ESQLQueryResponse to Map: " + e); - } - } - - Map setupClusters(int numClusters) throws IOException { - assert numClusters == 2 || numClusters == 3 : "2 or 3 clusters supported not: " + numClusters; - int numShardsLocal = randomIntBetween(1, 5); - populateLocalIndices(LOCAL_INDEX, numShardsLocal); - - int numShardsRemote = randomIntBetween(1, 5); - populateRemoteIndices(REMOTE_CLUSTER_1, REMOTE_INDEX, numShardsRemote); - - Map clusterInfo = new HashMap<>(); - clusterInfo.put("local.num_shards", numShardsLocal); - clusterInfo.put("local.index", LOCAL_INDEX); - clusterInfo.put("remote1.num_shards", numShardsRemote); - clusterInfo.put("remote1.index", REMOTE_INDEX); - - if (numClusters == 3) { - int numShardsRemote2 = randomIntBetween(1, 5); - populateRemoteIndices(REMOTE_CLUSTER_2, REMOTE_INDEX, numShardsRemote2); - clusterInfo.put("remote2.index", REMOTE_INDEX); - clusterInfo.put("remote2.num_shards", numShardsRemote2); - } - - String skipUnavailableKey = Strings.format("cluster.remote.%s.skip_unavailable", REMOTE_CLUSTER_1); - Setting skipUnavailableSetting = cluster(REMOTE_CLUSTER_1).clusterService().getClusterSettings().get(skipUnavailableKey); - boolean skipUnavailable = (boolean) cluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY).clusterService() - .getClusterSettings() - .get(skipUnavailableSetting); - clusterInfo.put("remote.skip_unavailable", skipUnavailable); - - return clusterInfo; - } - - void populateLocalIndices(String indexName, int numShards) { - Client localClient = client(LOCAL_CLUSTER); - assertAcked( - localClient.admin() - .indices() - .prepareCreate(indexName) - .setSettings(Settings.builder().put("index.number_of_shards", numShards)) - .setMapping("id", "type=keyword", "tag", "type=keyword", "v", "type=long", "const", "type=long") - ); - for (int i = 0; i < 10; i++) { - localClient.prepareIndex(indexName).setSource("id", "local-" + i, "tag", "local", "v", i).get(); - } - localClient.admin().indices().prepareRefresh(indexName).get(); - } - - void populateRuntimeIndex(String clusterAlias, String langName, String indexName) throws IOException { - populateRuntimeIndex(clusterAlias, langName, indexName, 10); - } - - void populateRuntimeIndex(String clusterAlias, String langName, String indexName, int count) throws IOException { - XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); - mapping.startObject("runtime"); - { - mapping.startObject("const"); - { - mapping.field("type", "long"); - mapping.startObject("script").field("source", "").field("lang", langName).endObject(); - } - mapping.endObject(); - } - mapping.endObject(); - mapping.endObject(); - client(clusterAlias).admin().indices().prepareCreate(indexName).setMapping(mapping).get(); - BulkRequestBuilder bulk = client(clusterAlias).prepareBulk(indexName).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - for (int i = 0; i < count; i++) { - bulk.add(new IndexRequest().source("foo", i)); - } - bulk.get(); - } - - void populateRemoteIndices(String clusterAlias, String indexName, int numShards) throws IOException { - Client remoteClient = client(clusterAlias); - assertAcked( - remoteClient.admin() - .indices() - .prepareCreate(indexName) - .setSettings(Settings.builder().put("index.number_of_shards", numShards)) - .setMapping("id", "type=keyword", "tag", "type=keyword", "v", "type=long") - ); - for (int i = 0; i < 10; i++) { - remoteClient.prepareIndex(indexName).setSource("id", "remote-" + i, "tag", "remote", "v", i * i).get(); - } - remoteClient.admin().indices().prepareRefresh(indexName).get(); - } - - public static class CountingPauseFieldPlugin extends SimplePauseFieldPlugin { - public static AtomicLong count = new AtomicLong(0); - - protected String scriptTypeName() { - return "pause_count"; - } - - public static void resetPlugin() { - count.set(0); - } - - @Override - public boolean onWait() throws InterruptedException { - count.incrementAndGet(); - return allowEmitting.await(30, TimeUnit.SECONDS); - } - } - - private static List getDriverTasks(Client client) { - return client.admin().cluster().prepareListTasks().setActions(DriverTaskRunner.ACTION_NAME).setDetailed(true).get().getTasks(); - } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryStopIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryStopIT.java new file mode 100644 index 0000000000000..7401a2838ae82 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryStopIT.java @@ -0,0 +1,245 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.Build; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.xpack.core.async.AsyncStopRequest; + +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase.randomIncludeCCSMetadata; +import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.deleteAsyncId; +import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.getAsyncResponse; +import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.startAsyncQuery; +import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.startAsyncQueryWithPragmas; +import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.waitForCluster; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + +public class CrossClusterAsyncQueryStopIT extends AbstractCrossClusterTestCase { + + public void testStopQuery() throws Exception { + assumeTrue("Pragma does not work in release builds", Build.current().isSnapshot()); + Map testClusterInfo = setupClusters(3); + int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); + int remote1NumShards = (Integer) testClusterInfo.get("remote1.num_shards"); + // Create large index so we could be sure we're stopping before the end + populateRuntimeIndex(REMOTE_CLUSTER_2, "pause_count", INDEX_WITH_BLOCKING_MAPPING); + + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + + final String asyncExecutionId = startAsyncQueryWithPragmas( + client(), + "FROM logs-*,cluster-a:logs-*,remote-b:blocking | STATS total=sum(coalesce(const,v)) | LIMIT 1", + includeCCSMetadata.v1(), + Map.of("page_size", 1, "data_partitioning", "shard", "task_concurrency", 1) + ); + try { + // wait until we know that the query against 'remote-b:blocking' has started + CountingPauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS); + + // wait until the query of 'cluster-a:logs-*' has finished (it is not blocked since we are not searching the 'blocking' index on + // it) + waitForCluster(client(), REMOTE_CLUSTER_1, asyncExecutionId); + waitForCluster(client(), LOCAL_CLUSTER, asyncExecutionId); + + /* at this point: + * the query against cluster-a should be finished + * the query against remote-b should be running (blocked on the PauseFieldPlugin.allowEmitting CountDown) + * the query against the local cluster should be running because it has a STATS clause that needs to wait on remote-b + */ + + // run the stop query + AsyncStopRequest stopRequest = new AsyncStopRequest(asyncExecutionId); + ActionFuture stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest); + assertBusy(() -> { + List tasks = getDriverTasks(client(REMOTE_CLUSTER_2)); + List reduceTasks = tasks.stream() + .filter(t -> t.description().contains("_LuceneSourceOperator") == false) + .toList(); + assertThat(reduceTasks, empty()); + }); + // allow remoteB query to proceed + CountingPauseFieldPlugin.allowEmitting.countDown(); + + // Since part of the query has not been stopped, we expect some result to emerge here + try (EsqlQueryResponse asyncResponse = stopAction.actionGet(30, TimeUnit.SECONDS)) { + // Check that we did not process all the fields on remote-b + // Should not be getting more than one page here, and we set page size to 1 + assertThat(CountingPauseFieldPlugin.count.get(), lessThanOrEqualTo(1L)); + assertThat(asyncResponse.isRunning(), is(false)); + assertThat(asyncResponse.columns().size(), equalTo(1)); + assertThat(asyncResponse.values().hasNext(), is(true)); + Iterator row = asyncResponse.values().next(); + // sum of 0-9 is 45, and sum of 0-9 squared is 285 + assertThat(row.next(), equalTo(330L)); + + EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo(); + assertNotNull(executionInfo); + assertThat(executionInfo.isCrossClusterSearch(), is(true)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); + assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER, REMOTE_CLUSTER_1, REMOTE_CLUSTER_2))); + assertThat(executionInfo.isPartial(), equalTo(true)); + + EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER_1); + assertThat(remoteCluster.getIndexExpression(), equalTo("logs-*")); + assertClusterInfoSuccess(remoteCluster, remote1NumShards); + + EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE_CLUSTER_2); + assertThat(remote2Cluster.getIndexExpression(), equalTo("blocking")); + assertThat(remote2Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.PARTIAL)); + + EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER); + assertThat(localCluster.getIndexExpression(), equalTo("logs-*")); + assertClusterInfoSuccess(localCluster, localNumShards); + + assertClusterMetadataInResponse(asyncResponse, responseExpectMeta, 3); + } + } finally { + // Ensure proper cleanup if the test fails + CountingPauseFieldPlugin.allowEmitting.countDown(); + assertAcked(deleteAsyncId(client(), asyncExecutionId)); + } + } + + public void testStopQueryLocal() throws Exception { + Map testClusterInfo = setupClusters(3); + int remote1NumShards = (Integer) testClusterInfo.get("remote1.num_shards"); + int remote2NumShards = (Integer) testClusterInfo.get("remote2.num_shards"); + populateRuntimeIndex(LOCAL_CLUSTER, "pause", INDEX_WITH_BLOCKING_MAPPING); + + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + + final String asyncExecutionId = startAsyncQuery( + client(), + "FROM blocking,*:logs-* | STATS total=sum(coalesce(const,v)) | LIMIT 1", + includeCCSMetadata.v1() + ); + + try { + // wait until we know that the query against 'remote-b:blocking' has started + SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS); + + // wait until the remotes are done + waitForCluster(client(), REMOTE_CLUSTER_1, asyncExecutionId); + waitForCluster(client(), REMOTE_CLUSTER_2, asyncExecutionId); + + /* at this point: + * the query against remotes should be finished + * the query against the local cluster should be running because it's blocked + */ + + // run the stop query + AsyncStopRequest stopRequest = new AsyncStopRequest(asyncExecutionId); + ActionFuture stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest); + // ensure stop operation is running + assertBusy(() -> { + try (EsqlQueryResponse asyncResponse = getAsyncResponse(client(), asyncExecutionId)) { + EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo(); + assertNotNull(executionInfo); + assertThat(executionInfo.isPartial(), is(true)); + } + }); + // allow local query to proceed + SimplePauseFieldPlugin.allowEmitting.countDown(); + + // Since part of the query has not been stopped, we expect some result to emerge here + try (EsqlQueryResponse asyncResponse = stopAction.actionGet(30, TimeUnit.SECONDS)) { + assertThat(asyncResponse.isRunning(), is(false)); + assertThat(asyncResponse.columns().size(), equalTo(1)); + assertThat(asyncResponse.values().hasNext(), is(true)); + Iterator row = asyncResponse.values().next(); + // sum of 0-9 squared is 285, from two remotes it's 570 + assertThat(row.next(), equalTo(570L)); + + EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo(); + assertNotNull(executionInfo); + assertThat(executionInfo.isCrossClusterSearch(), is(true)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); + assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER, REMOTE_CLUSTER_1, REMOTE_CLUSTER_2))); + assertThat(executionInfo.isPartial(), equalTo(true)); + + EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER_1); + assertThat(remoteCluster.getIndexExpression(), equalTo("logs-*")); + assertClusterInfoSuccess(remoteCluster, remote1NumShards); + + EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE_CLUSTER_2); + assertThat(remote2Cluster.getIndexExpression(), equalTo("logs-*")); + assertClusterInfoSuccess(remote2Cluster, remote2NumShards); + + EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER); + assertThat(localCluster.getIndexExpression(), equalTo("blocking")); + assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.PARTIAL)); + + assertClusterMetadataInResponse(asyncResponse, responseExpectMeta, 3); + } + } finally { + SimplePauseFieldPlugin.allowEmitting.countDown(); + assertAcked(deleteAsyncId(client(), asyncExecutionId)); + } + } + + public void testStopQueryLocalNoRemotes() throws Exception { + setupClusters(3); + populateRuntimeIndex(LOCAL_CLUSTER, "pause", INDEX_WITH_BLOCKING_MAPPING); + + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + + final String asyncExecutionId = startAsyncQuery( + client(), + "FROM blocking | STATS total=count(const) | LIMIT 1", + includeCCSMetadata.v1() + ); + + try { + // wait until we know that the query against 'remote-b:blocking' has started + SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS); + + /* at this point: + * the query against the local cluster should be running because it's blocked + */ + + // run the stop query + var stopRequest = new AsyncStopRequest(asyncExecutionId); + var stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest); + // allow local query to proceed + SimplePauseFieldPlugin.allowEmitting.countDown(); + + try (EsqlQueryResponse asyncResponse = stopAction.actionGet(30, TimeUnit.SECONDS)) { + assertThat(asyncResponse.isRunning(), is(false)); + assertThat(asyncResponse.columns().size(), equalTo(1)); + assertThat(asyncResponse.values().hasNext(), is(true)); + Iterator row = asyncResponse.values().next(); + assertThat((long) row.next(), greaterThanOrEqualTo(0L)); + + EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo(); + assertNotNull(executionInfo); + assertThat(executionInfo.isCrossClusterSearch(), is(false)); + } + } finally { + SimplePauseFieldPlugin.allowEmitting.countDown(); + assertAcked(deleteAsyncId(client(), asyncExecutionId)); + } + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterCancellationIT.java similarity index 62% rename from x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java rename to x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterCancellationIT.java index cfe6fdeccb190..57041d3d6a06f 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterCancellationIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.action; +import org.elasticsearch.Build; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; @@ -20,12 +21,14 @@ import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.test.AbstractMultiClustersTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.plugin.ComputeService; +import org.junit.After; import org.junit.Before; import java.util.ArrayList; @@ -39,8 +42,9 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; -public class CrossClustersCancellationIT extends AbstractMultiClustersTestCase { +public class CrossClusterCancellationIT extends AbstractMultiClustersTestCase { private static final String REMOTE_CLUSTER = "cluster-a"; @Override @@ -75,6 +79,16 @@ public void resetPlugin() { SimplePauseFieldPlugin.resetPlugin(); } + @After + public void releasePlugin() { + SimplePauseFieldPlugin.release(); + } + + @Override + protected boolean reuseClusters() { + return false; + } + private void createRemoteIndex(int numDocs) throws Exception { XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); mapping.startObject("runtime"); @@ -96,6 +110,27 @@ private void createRemoteIndex(int numDocs) throws Exception { bulk.get(); } + private void createLocalIndex(int numDocs) throws Exception { + XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); + mapping.startObject("runtime"); + { + mapping.startObject("const"); + { + mapping.field("type", "long"); + mapping.startObject("script").field("source", "").field("lang", "pause").endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + mapping.endObject(); + client(LOCAL_CLUSTER).admin().indices().prepareCreate("test").setMapping(mapping).get(); + BulkRequestBuilder bulk = client(LOCAL_CLUSTER).prepareBulk("test").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + for (int i = 0; i < numDocs; i++) { + bulk.add(new IndexRequest().source("foo", i)); + } + bulk.get(); + } + public void testCancel() throws Exception { createRemoteIndex(between(10, 100)); EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); @@ -208,4 +243,88 @@ public void testTasks() throws Exception { } requestFuture.actionGet(30, TimeUnit.SECONDS).close(); } + + // Check that cancelling remote task with skip_unavailable=true produces failure + public void testCancelSkipUnavailable() throws Exception { + createRemoteIndex(between(10, 100)); + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); + request.query("FROM *:test | STATS total=sum(const) | LIMIT 1"); + request.pragmas(randomPragmas()); + request.includeCCSMetadata(true); + PlainActionFuture requestFuture = new PlainActionFuture<>(); + client().execute(EsqlQueryAction.INSTANCE, request, requestFuture); + assertTrue(SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS)); + List rootTasks = new ArrayList<>(); + assertBusy(() -> { + List tasks = client(REMOTE_CLUSTER).admin() + .cluster() + .prepareListTasks() + .setActions(ComputeService.CLUSTER_ACTION_NAME) + .get() + .getTasks(); + assertThat(tasks, hasSize(1)); + rootTasks.addAll(tasks); + }); + var cancelRequest = new CancelTasksRequest().setTargetTaskId(rootTasks.get(0).taskId()).setReason("remote failed"); + client(REMOTE_CLUSTER).execute(TransportCancelTasksAction.TYPE, cancelRequest); + try { + assertBusy(() -> { + List drivers = client(REMOTE_CLUSTER).admin() + .cluster() + .prepareListTasks() + .setActions(DriverTaskRunner.ACTION_NAME) + .get() + .getTasks(); + assertThat(drivers.size(), greaterThanOrEqualTo(1)); + for (TaskInfo driver : drivers) { + assertTrue(driver.cancelled()); + } + }); + } finally { + SimplePauseFieldPlugin.allowEmitting.countDown(); + } + + Exception error = expectThrows(Exception.class, requestFuture::actionGet); + assertThat(error, instanceOf(TaskCancelledException.class)); + } + + // Check that closing remote node with skip_unavailable=true produces partial + public void testCloseSkipUnavailable() throws Exception { + // We are using delay() here because closing cluster while inside pause fields doesn't seem to produce clean closure + assumeTrue("Only snapshot builds have delay()", Build.current().isSnapshot()); + createRemoteIndex(between(1000, 5000)); + createLocalIndex(10); + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); + request.query(""" + FROM test*,cluster-a:test* METADATA _index + | EVAL cluster=MV_FIRST(SPLIT(_index, ":")) + | WHERE CASE(cluster == "cluster-a", delay(1ms), true) + | STATS total = sum(const) | LIMIT 1 + """); + request.pragmas(randomPragmas()); + var requestFuture = client().execute(EsqlQueryAction.INSTANCE, request); + assertTrue(SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS)); + SimplePauseFieldPlugin.allowEmitting.countDown(); + cluster(REMOTE_CLUSTER).close(); + try (var resp = requestFuture.actionGet()) { + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertNotNull(executionInfo); + assertThat(executionInfo.isPartial(), equalTo(true)); + + List> values = getValuesList(resp); + assertThat(values.get(0).size(), equalTo(1)); + // We can't be sure of the exact value here as we don't know if any data from remote came in, but all local data should be there + assertThat((long) values.get(0).get(0), greaterThanOrEqualTo(10L)); + + EsqlExecutionInfo.Cluster cluster = executionInfo.getCluster(REMOTE_CLUSTER); + EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER); + + assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(localCluster.getSuccessfulShards(), equalTo(1)); + + assertThat(cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.PARTIAL)); + assertThat(cluster.getSuccessfulShards(), equalTo(0)); + assertThat(cluster.getFailures().size(), equalTo(1)); + } + } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterEnrichIT.java similarity index 99% rename from x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java rename to x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterEnrichIT.java index 51ad4a0d2053f..d48f8af9c97e3 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterEnrichIT.java @@ -27,7 +27,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -public class CrossClustersEnrichIT extends AbstractEnrichBasedCrossClusterTestCase { +public class CrossClusterEnrichIT extends AbstractEnrichBasedCrossClusterTestCase { @Override protected Collection> nodePlugins(String clusterAlias) { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterEnrichUnavailableClustersIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterEnrichUnavailableClustersIT.java index 9c0447affc754..40ea21371e513 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterEnrichUnavailableClustersIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterEnrichUnavailableClustersIT.java @@ -510,11 +510,17 @@ private static void assertCCSExecutionInfoDetails(EsqlExecutionInfo executionInf assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); assertTrue(executionInfo.isCrossClusterSearch()); + boolean hasPartials = false; for (String clusterAlias : executionInfo.clusterAliases()) { EsqlExecutionInfo.Cluster cluster = executionInfo.getCluster(clusterAlias); assertThat(cluster.getTook().millis(), greaterThanOrEqualTo(0L)); assertThat(cluster.getTook().millis(), lessThanOrEqualTo(executionInfo.overallTook().millis())); + if (cluster.getStatus() == EsqlExecutionInfo.Cluster.Status.PARTIAL + || cluster.getStatus() == EsqlExecutionInfo.Cluster.Status.SKIPPED) { + hasPartials = true; + } } + assertThat(executionInfo.isPartial(), equalTo(hasPartials)); } private void setSkipUnavailable(String clusterAlias, boolean skip) { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueriesWithInvalidLicenseIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueriesWithInvalidLicenseIT.java similarity index 99% rename from x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueriesWithInvalidLicenseIT.java rename to x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueriesWithInvalidLicenseIT.java index a9c8190976a02..be19180e1b4ad 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueriesWithInvalidLicenseIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueriesWithInvalidLicenseIT.java @@ -27,7 +27,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; -public class CrossClustersQueriesWithInvalidLicenseIT extends AbstractEnrichBasedCrossClusterTestCase { +public class CrossClusterQueriesWithInvalidLicenseIT extends AbstractEnrichBasedCrossClusterTestCase { private static final String LICENSE_ERROR_MESSAGE = "A valid Enterprise license is required to run ES|QL cross-cluster searches."; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryIT.java similarity index 90% rename from x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java rename to x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryIT.java index 1a99fd9d0383d..c1976c9fa2ad8 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryIT.java @@ -15,28 +15,24 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.compute.lucene.DataPartitioning; import org.elasticsearch.compute.operator.DriverProfile; -import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.AbstractMultiClustersTestCase; +import org.elasticsearch.test.FailingFieldPlugin; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.XContentTestUtils; -import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Locale; @@ -58,49 +54,19 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; -public class CrossClustersQueryIT extends AbstractMultiClustersTestCase { - private static final String REMOTE_CLUSTER_1 = "cluster-a"; - private static final String REMOTE_CLUSTER_2 = "remote-b"; - private static String LOCAL_INDEX = "logs-1"; - private static String IDX_ALIAS = "alias1"; - private static String FILTERED_IDX_ALIAS = "alias-filtered-1"; - private static String REMOTE_INDEX = "logs-2"; - - @Override - protected List remoteClusterAlias() { - return List.of(REMOTE_CLUSTER_1, REMOTE_CLUSTER_2); - } +public class CrossClusterQueryIT extends AbstractCrossClusterTestCase { + private static final String IDX_ALIAS = "alias1"; + private static final String FILTERED_IDX_ALIAS = "alias-filtered-1"; @Override protected Map skipUnavailableForRemoteClusters() { return Map.of(REMOTE_CLUSTER_1, randomBoolean(), REMOTE_CLUSTER_2, randomBoolean()); } - @Override - protected Collection> nodePlugins(String clusterAlias) { - List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); - plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); - plugins.add(InternalExchangePlugin.class); - return plugins; - } - - public static class InternalExchangePlugin extends Plugin { - @Override - public List> getSettings() { - return List.of( - Setting.timeSetting( - ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, - TimeValue.timeValueSeconds(30), - Setting.Property.NodeScope - ) - ); - } - } - - public void testSuccessfulPathways() { + public void testSuccessfulPathways() throws Exception { Map testClusterInfo = setupTwoClusters(); int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); - int remoteNumShards = (Integer) testClusterInfo.get("remote.num_shards"); + int remoteNumShards = (Integer) testClusterInfo.get("remote1.num_shards"); Tuple includeCCSMetadata = randomIncludeCCSMetadata(); Boolean requestIncludeMeta = includeCCSMetadata.v1(); @@ -183,7 +149,7 @@ public void testSuccessfulPathways() { } } - public void testSearchesAgainstNonMatchingIndicesWithLocalOnly() { + public void testSearchesAgainstNonMatchingIndicesWithLocalOnly() throws Exception { Map testClusterInfo = setupTwoClusters(); String localIndex = (String) testClusterInfo.get("local.index"); @@ -230,7 +196,7 @@ public void testSearchesAgainstNonMatchingIndicesWithLocalOnly() { } } - public void testSearchesAgainstIndicesWithNoMappingsSkipUnavailableTrue() { + public void testSearchesAgainstIndicesWithNoMappingsSkipUnavailableTrue() throws Exception { int numClusters = 2; setupClusters(numClusters); Map clusterToEmptyIndexMap = createEmptyIndicesWithNoMappings(numClusters); @@ -288,13 +254,13 @@ public void testSearchesAgainstIndicesWithNoMappingsSkipUnavailableTrue() { } } - public void testSearchesAgainstNonMatchingIndices() { + public void testSearchesAgainstNonMatchingIndices() throws Exception { int numClusters = 3; Map testClusterInfo = setupClusters(numClusters); int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); - int remote1NumShards = (Integer) testClusterInfo.get("remote.num_shards"); + int remote1NumShards = (Integer) testClusterInfo.get("remote1.num_shards"); String localIndex = (String) testClusterInfo.get("local.index"); - String remote1Index = (String) testClusterInfo.get("remote.index"); + String remote1Index = (String) testClusterInfo.get("remote1.index"); String remote2Index = (String) testClusterInfo.get("remote2.index"); createIndexAliases(numClusters); @@ -470,6 +436,7 @@ public void assertExpectedClustersForMissingIndicesTests(EsqlExecutionInfo execu Set expectedClusterAliases = expected.stream().map(c -> c.clusterAlias()).collect(Collectors.toSet()); assertThat(executionInfo.clusterAliases(), equalTo(expectedClusterAliases)); + boolean hasSkipped = false; for (ExpectedCluster expectedCluster : expected) { EsqlExecutionInfo.Cluster cluster = executionInfo.getCluster(expectedCluster.clusterAlias()); String msg = cluster.getClusterAlias(); @@ -488,13 +455,15 @@ public void assertExpectedClustersForMissingIndicesTests(EsqlExecutionInfo execu assertThat(msg, cluster.getFailures().get(0).getCause(), instanceOf(VerificationException.class)); String expectedMsg = "Unknown index [" + expectedCluster.indexExpression() + "]"; assertThat(msg, cluster.getFailures().get(0).getCause().getMessage(), containsString(expectedMsg)); + hasSkipped = true; } // currently failed shards is always zero - change this once we start allowing partial data for individual shard failures assertThat(msg, cluster.getFailedShards(), equalTo(0)); } + assertThat(executionInfo.isPartial(), equalTo(hasSkipped)); } - public void testSearchesWhereNonExistentClusterIsSpecifiedWithWildcards() { + public void testSearchesWhereNonExistentClusterIsSpecifiedWithWildcards() throws Exception { Map testClusterInfo = setupTwoClusters(); int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); @@ -537,6 +506,7 @@ public void testSearchesWhereNonExistentClusterIsSpecifiedWithWildcards() { assertThat(executionInfo.isCrossClusterSearch(), is(true)); assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); + assertThat(executionInfo.isPartial(), equalTo(true)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER_1, LOCAL_CLUSTER))); @@ -571,7 +541,7 @@ public void testSearchesWhereNonExistentClusterIsSpecifiedWithWildcards() { * Note: the tests covering "nonmatching indices" also do LIMIT 0 tests. * This one is mostly focuses on took time values. */ - public void testCCSExecutionOnSearchesWithLimit0() { + public void testCCSExecutionOnSearchesWithLimit0() throws Exception { setupTwoClusters(); Tuple includeCCSMetadata = randomIncludeCCSMetadata(); Boolean requestIncludeMeta = includeCCSMetadata.v1(); @@ -593,6 +563,7 @@ public void testCCSExecutionOnSearchesWithLimit0() { long overallTookMillis = executionInfo.overallTook().millis(); assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); + assertThat(executionInfo.isPartial(), equalTo(false)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER_1, LOCAL_CLUSTER))); EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER_1); @@ -617,10 +588,10 @@ public void testCCSExecutionOnSearchesWithLimit0() { } } - public void testMetadataIndex() { + public void testMetadataIndex() throws Exception { Map testClusterInfo = setupTwoClusters(); int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); - int remoteNumShards = (Integer) testClusterInfo.get("remote.num_shards"); + int remoteNumShards = (Integer) testClusterInfo.get("remote1.num_shards"); Tuple includeCCSMetadata = randomIncludeCCSMetadata(); Boolean requestIncludeMeta = includeCCSMetadata.v1(); @@ -641,6 +612,7 @@ public void testMetadataIndex() { assertThat(executionInfo.isCrossClusterSearch(), is(true)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(executionInfo.isPartial(), equalTo(false)); EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER_1); assertThat(remoteCluster.getIndexExpression(), equalTo("logs*")); @@ -662,10 +634,10 @@ public void testMetadataIndex() { } } - public void testProfile() { + public void testProfile() throws Exception { Map testClusterInfo = setupTwoClusters(); int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); - int remoteNumShards = (Integer) testClusterInfo.get("remote.num_shards"); + int remoteNumShards = (Integer) testClusterInfo.get("remote1.num_shards"); assumeTrue("pragmas only enabled on snapshot builds", Build.current().isSnapshot()); // uses shard partitioning as segments can be merged during these queries @@ -785,7 +757,7 @@ public void testProfile() { public void testWarnings() throws Exception { Map testClusterInfo = setupTwoClusters(); int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); - int remoteNumShards = (Integer) testClusterInfo.get("remote.num_shards"); + int remoteNumShards = (Integer) testClusterInfo.get("remote1.num_shards"); EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); request.query("FROM logs*,c*:logs* | EVAL ip = to_ip(id) | STATS total = sum(v) by ip | LIMIT 10"); @@ -836,6 +808,17 @@ public void testWarnings() throws Exception { assertTrue(latch.await(30, TimeUnit.SECONDS)); } + // Non-disconnect remote failures still fail the request even if skip_unavailable is true + public void testRemoteFailureSkipUnavailableTrue() throws IOException { + Map testClusterInfo = setupFailClusters(); + String localIndex = (String) testClusterInfo.get("local.index"); + String remote1Index = (String) testClusterInfo.get("remote.index"); + int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); + String q = Strings.format("FROM %s,cluster-a:%s*", localIndex, remote1Index); + IllegalStateException e = expectThrows(IllegalStateException.class, () -> runQuery(q, false)); + assertThat(e.getMessage(), containsString("Accessing failing field")); + } + private static void assertClusterMetadataInResponse(EsqlQueryResponse resp, boolean responseExpectMeta) { try { final Map esqlResponseAsMap = XContentTestUtils.convertToMap(resp); @@ -855,22 +838,6 @@ private static void assertClusterMetadataInResponse(EsqlQueryResponse resp, bool } } - protected EsqlQueryResponse runQuery(String query, Boolean ccsMetadataInResponse) { - EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); - request.query(query); - request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); - request.profile(randomInt(5) == 2); - request.columnar(randomBoolean()); - if (ccsMetadataInResponse != null) { - request.includeCCSMetadata(ccsMetadataInResponse); - } - return runQuery(request); - } - - protected EsqlQueryResponse runQuery(EsqlQueryRequest request) { - return client(LOCAL_CLUSTER).execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS); - } - void waitForNoInitializingShards(Client client, TimeValue timeout, String... indices) { ClusterHealthResponse resp = client.admin() .cluster() @@ -883,41 +850,10 @@ void waitForNoInitializingShards(Client client, TimeValue timeout, String... ind assertFalse(Strings.toString(resp, true, true), resp.isTimedOut()); } - Map setupTwoClusters() { + Map setupTwoClusters() throws IOException { return setupClusters(2); } - Map setupClusters(int numClusters) { - assert numClusters == 2 || numClusters == 3 : "2 or 3 clusters supported not: " + numClusters; - int numShardsLocal = randomIntBetween(1, 5); - populateLocalIndices(LOCAL_INDEX, numShardsLocal); - - int numShardsRemote = randomIntBetween(1, 5); - populateRemoteIndices(REMOTE_CLUSTER_1, REMOTE_INDEX, numShardsRemote); - - Map clusterInfo = new HashMap<>(); - clusterInfo.put("local.num_shards", numShardsLocal); - clusterInfo.put("local.index", LOCAL_INDEX); - clusterInfo.put("remote.num_shards", numShardsRemote); - clusterInfo.put("remote.index", REMOTE_INDEX); - - if (numClusters == 3) { - int numShardsRemote2 = randomIntBetween(1, 5); - populateRemoteIndices(REMOTE_CLUSTER_2, REMOTE_INDEX, numShardsRemote2); - clusterInfo.put("remote2.index", REMOTE_INDEX); - clusterInfo.put("remote2.num_shards", numShardsRemote2); - } - - String skipUnavailableKey = Strings.format("cluster.remote.%s.skip_unavailable", REMOTE_CLUSTER_1); - Setting skipUnavailableSetting = cluster(REMOTE_CLUSTER_1).clusterService().getClusterSettings().get(skipUnavailableKey); - boolean skipUnavailable = (boolean) cluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY).clusterService() - .getClusterSettings() - .get(skipUnavailableSetting); - clusterInfo.put("remote.skip_unavailable", skipUnavailable); - - return clusterInfo; - } - /** * For the local cluster and REMOTE_CLUSTER_1 it creates a standard alias to the index created in populateLocalIndices * and populateRemoteIndices. It also creates a filtered alias against those indices that looks like: @@ -1010,52 +946,45 @@ Map createEmptyIndicesWithNoMappings(int numClusters) { return clusterToEmptyIndexMap; } - void populateLocalIndices(String indexName, int numShards) { - Client localClient = client(LOCAL_CLUSTER); - assertAcked( - localClient.admin() - .indices() - .prepareCreate(indexName) - .setSettings(Settings.builder().put("index.number_of_shards", numShards)) - .setMapping("id", "type=keyword", "tag", "type=keyword", "v", "type=long") - ); - for (int i = 0; i < 10; i++) { - localClient.prepareIndex(indexName).setSource("id", "local-" + i, "tag", "local", "v", i).get(); - } - localClient.admin().indices().prepareRefresh(indexName).get(); + Map setupFailClusters() throws IOException { + int numShardsLocal = randomIntBetween(1, 3); + populateLocalIndices(LOCAL_INDEX, numShardsLocal); + + int numShardsRemote = randomIntBetween(1, 3); + populateRemoteIndicesFail(REMOTE_CLUSTER_1, REMOTE_INDEX, numShardsRemote); + + Map clusterInfo = new HashMap<>(); + clusterInfo.put("local.num_shards", numShardsLocal); + clusterInfo.put("local.index", LOCAL_INDEX); + clusterInfo.put("remote.num_shards", numShardsRemote); + clusterInfo.put("remote.index", REMOTE_INDEX); + setSkipUnavailable(REMOTE_CLUSTER_1, true); + return clusterInfo; } - void populateRemoteIndices(String clusterAlias, String indexName, int numShards) { + void populateRemoteIndicesFail(String clusterAlias, String indexName, int numShards) throws IOException { Client remoteClient = client(clusterAlias); + XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); + mapping.startObject("runtime"); + { + mapping.startObject("fail_me"); + { + mapping.field("type", "long"); + mapping.startObject("script").field("source", "").field("lang", FailingFieldPlugin.FAILING_FIELD_LANG).endObject(); + } + mapping.endObject(); + } + mapping.endObject(); assertAcked( remoteClient.admin() .indices() .prepareCreate(indexName) .setSettings(Settings.builder().put("index.number_of_shards", numShards)) - .setMapping("id", "type=keyword", "tag", "type=keyword", "v", "type=long") + .setMapping(mapping.endObject()) ); - for (int i = 0; i < 10; i++) { - remoteClient.prepareIndex(indexName).setSource("id", "remote-" + i, "tag", "remote", "v", i * i).get(); - } - remoteClient.admin().indices().prepareRefresh(indexName).get(); - } - private void setSkipUnavailable(String clusterAlias, boolean skip) { - client(LOCAL_CLUSTER).admin() - .cluster() - .prepareUpdateSettings(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT) - .setPersistentSettings(Settings.builder().put("cluster.remote." + clusterAlias + ".skip_unavailable", skip).build()) - .get(); + remoteClient.prepareIndex(indexName).setSource("id", 0).get(); + remoteClient.admin().indices().prepareRefresh(indexName).get(); } - private void clearSkipUnavailable() { - Settings.Builder settingsBuilder = Settings.builder() - .putNull("cluster.remote." + REMOTE_CLUSTER_1 + ".skip_unavailable") - .putNull("cluster.remote." + REMOTE_CLUSTER_2 + ".skip_unavailable"); - client(LOCAL_CLUSTER).admin() - .cluster() - .prepareUpdateSettings(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT) - .setPersistentSettings(settingsBuilder.build()) - .get(); - } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryUnavailableRemotesIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryUnavailableRemotesIT.java index eb728895cd00c..667ac23461000 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryUnavailableRemotesIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryUnavailableRemotesIT.java @@ -8,27 +8,13 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.compute.operator.exchange.ExchangeService; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.AbstractMultiClustersTestCase; -import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.xpack.esql.core.type.DataType; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.TimeUnit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; import static org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase.randomIncludeCCSMetadata; import static org.hamcrest.Matchers.equalTo; @@ -37,41 +23,13 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; -public class CrossClusterQueryUnavailableRemotesIT extends AbstractMultiClustersTestCase { - private static final String REMOTE_CLUSTER_1 = "cluster-a"; - private static final String REMOTE_CLUSTER_2 = "cluster-b"; - - @Override - protected List remoteClusterAlias() { - return List.of(REMOTE_CLUSTER_1, REMOTE_CLUSTER_2); - } +public class CrossClusterQueryUnavailableRemotesIT extends AbstractCrossClusterTestCase { @Override protected boolean reuseClusters() { return false; } - @Override - protected Collection> nodePlugins(String clusterAlias) { - List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); - plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); - plugins.add(CrossClustersQueryIT.InternalExchangePlugin.class); - return plugins; - } - - public static class InternalExchangePlugin extends Plugin { - @Override - public List> getSettings() { - return List.of( - Setting.timeSetting( - ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, - TimeValue.timeValueSeconds(30), - Setting.Property.NodeScope - ) - ); - } - } - public void testCCSAgainstDisconnectedRemoteWithSkipUnavailableTrue() throws Exception { int numClusters = 3; Map testClusterInfo = setupClusters(numClusters); @@ -99,6 +57,7 @@ public void testCCSAgainstDisconnectedRemoteWithSkipUnavailableTrue() throws Exc long overallTookMillis = executionInfo.overallTook().millis(); assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); + assertThat(executionInfo.isPartial(), equalTo(true)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER_1, REMOTE_CLUSTER_2, LOCAL_CLUSTER))); @@ -133,7 +92,7 @@ public void testCCSAgainstDisconnectedRemoteWithSkipUnavailableTrue() throws Exc assertThat(localCluster.getFailedShards(), equalTo(0)); // ensure that the _clusters metadata is present only if requested - assertClusterMetadataInResponse(resp, responseExpectMeta); + assertClusterMetadataInResponse(resp, responseExpectMeta, numClusters); } // scenario where there are no indices to match because @@ -151,6 +110,7 @@ public void testCCSAgainstDisconnectedRemoteWithSkipUnavailableTrue() throws Exc long overallTookMillis = executionInfo.overallTook().millis(); assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); + assertThat(executionInfo.isPartial(), equalTo(true)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER_1, REMOTE_CLUSTER_2, LOCAL_CLUSTER))); @@ -186,7 +146,7 @@ public void testCCSAgainstDisconnectedRemoteWithSkipUnavailableTrue() throws Exc assertThat(localCluster.getFailedShards(), equalTo(0)); // ensure that the _clusters metadata is present only if requested - assertClusterMetadataInResponse(resp, responseExpectMeta); + assertClusterMetadataInResponse(resp, responseExpectMeta, numClusters); } // close remote-cluster-2 so that it is also unavailable @@ -203,6 +163,7 @@ public void testCCSAgainstDisconnectedRemoteWithSkipUnavailableTrue() throws Exc long overallTookMillis = executionInfo.overallTook().millis(); assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); + assertThat(executionInfo.isPartial(), equalTo(true)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER_1, REMOTE_CLUSTER_2, LOCAL_CLUSTER))); @@ -237,7 +198,7 @@ public void testCCSAgainstDisconnectedRemoteWithSkipUnavailableTrue() throws Exc assertThat(localCluster.getFailedShards(), equalTo(0)); // ensure that the _clusters metadata is present only if requested - assertClusterMetadataInResponse(resp, responseExpectMeta); + assertClusterMetadataInResponse(resp, responseExpectMeta, numClusters); } } finally { clearSkipUnavailable(numClusters); @@ -275,6 +236,7 @@ public void testRemoteOnlyCCSAgainstDisconnectedRemoteWithSkipUnavailableTrue() long overallTookMillis = executionInfo.overallTook().millis(); assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); + assertThat(executionInfo.isPartial(), equalTo(true)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER_1))); @@ -289,7 +251,7 @@ public void testRemoteOnlyCCSAgainstDisconnectedRemoteWithSkipUnavailableTrue() assertThat(remoteCluster.getFailedShards(), equalTo(0)); // ensure that the _clusters metadata is present only if requested - assertClusterMetadataInResponse(resp, responseExpectMeta); + assertClusterMetadataInResponse(resp, responseExpectMeta, 1); } // close remote cluster 2 so that it is also unavailable @@ -317,6 +279,7 @@ public void testRemoteOnlyCCSAgainstDisconnectedRemoteWithSkipUnavailableTrue() long overallTookMillis = executionInfo.overallTook().millis(); assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); + assertThat(executionInfo.isPartial(), equalTo(true)); assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER_1, REMOTE_CLUSTER_2))); @@ -341,7 +304,7 @@ public void testRemoteOnlyCCSAgainstDisconnectedRemoteWithSkipUnavailableTrue() assertThat(remote2Cluster.getFailedShards(), equalTo(0)); // ensure that the _clusters metadata is present only if requested - assertClusterMetadataInResponse(resp, responseExpectMeta); + assertClusterMetadataInResponse(resp, responseExpectMeta, 2); } } finally { @@ -396,116 +359,4 @@ public void testRemoteOnlyCCSAgainstDisconnectedRemoteWithSkipUnavailableFalse() clearSkipUnavailable(numClusters); } } - - private void setSkipUnavailable(String clusterAlias, boolean skip) { - client(LOCAL_CLUSTER).admin() - .cluster() - .prepareUpdateSettings(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT) - .setPersistentSettings(Settings.builder().put("cluster.remote." + clusterAlias + ".skip_unavailable", skip).build()) - .get(); - } - - private void clearSkipUnavailable(int numClusters) { - assert numClusters == 2 || numClusters == 3 : "Only 2 or 3 clusters supported"; - Settings.Builder settingsBuilder = Settings.builder().putNull("cluster.remote." + REMOTE_CLUSTER_1 + ".skip_unavailable"); - if (numClusters == 3) { - settingsBuilder.putNull("cluster.remote." + REMOTE_CLUSTER_2 + ".skip_unavailable"); - } - client(LOCAL_CLUSTER).admin() - .cluster() - .prepareUpdateSettings(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT) - .setPersistentSettings(settingsBuilder.build()) - .get(); - } - - private static void assertClusterMetadataInResponse(EsqlQueryResponse resp, boolean responseExpectMeta) { - try { - final Map esqlResponseAsMap = XContentTestUtils.convertToMap(resp); - final Object clusters = esqlResponseAsMap.get("_clusters"); - if (responseExpectMeta) { - assertNotNull(clusters); - // test a few entries to ensure it looks correct (other tests do a full analysis of the metadata in the response) - @SuppressWarnings("unchecked") - Map inner = (Map) clusters; - assertTrue(inner.containsKey("total")); - assertTrue(inner.containsKey("details")); - } else { - assertNull(clusters); - } - } catch (IOException e) { - fail("Could not convert ESQL response to Map: " + e); - } - } - - protected EsqlQueryResponse runQuery(String query, Boolean ccsMetadataInResponse) { - EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); - request.query(query); - request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); - request.profile(randomInt(5) == 2); - request.columnar(randomBoolean()); - if (ccsMetadataInResponse != null) { - request.includeCCSMetadata(ccsMetadataInResponse); - } - return runQuery(request); - } - - protected EsqlQueryResponse runQuery(EsqlQueryRequest request) { - return client(LOCAL_CLUSTER).execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS); - } - - Map setupClusters(int numClusters) { - assert numClusters == 2 || numClusters == 3 : "2 or 3 clusters supported not: " + numClusters; - String localIndex = "logs-1"; - int numShardsLocal = randomIntBetween(1, 5); - populateLocalIndices(localIndex, numShardsLocal); - - String remoteIndex = "logs-2"; - int numShardsRemote = randomIntBetween(1, 5); - populateRemoteIndices(REMOTE_CLUSTER_1, remoteIndex, numShardsRemote); - - Map clusterInfo = new HashMap<>(); - clusterInfo.put("local.num_shards", numShardsLocal); - clusterInfo.put("local.index", localIndex); - clusterInfo.put("remote.num_shards", numShardsRemote); - clusterInfo.put("remote.index", remoteIndex); - - if (numClusters == 3) { - int numShardsRemote2 = randomIntBetween(1, 5); - populateRemoteIndices(REMOTE_CLUSTER_2, remoteIndex, numShardsRemote2); - clusterInfo.put("remote2.index", remoteIndex); - clusterInfo.put("remote2.num_shards", numShardsRemote2); - } - - return clusterInfo; - } - - void populateLocalIndices(String indexName, int numShards) { - Client localClient = client(LOCAL_CLUSTER); - assertAcked( - localClient.admin() - .indices() - .prepareCreate(indexName) - .setSettings(Settings.builder().put("index.number_of_shards", numShards)) - .setMapping("id", "type=keyword", "tag", "type=keyword", "v", "type=long") - ); - for (int i = 0; i < 10; i++) { - localClient.prepareIndex(indexName).setSource("id", "local-" + i, "tag", "local", "v", i).get(); - } - localClient.admin().indices().prepareRefresh(indexName).get(); - } - - void populateRemoteIndices(String clusterAlias, String indexName, int numShards) { - Client remoteClient = client(clusterAlias); - assertAcked( - remoteClient.admin() - .indices() - .prepareCreate(indexName) - .setSettings(Settings.builder().put("index.number_of_shards", numShards)) - .setMapping("id", "type=keyword", "tag", "type=keyword", "v", "type=long") - ); - for (int i = 0; i < 10; i++) { - remoteClient.prepareIndex(indexName).setSource("id", "remote-" + i, "tag", "remote", "v", i * i).get(); - } - remoteClient.admin().indices().prepareRefresh(indexName).get(); - } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersUsageTelemetryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterUsageTelemetryIT.java similarity index 99% rename from x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersUsageTelemetryIT.java rename to x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterUsageTelemetryIT.java index 89f7fdca79135..fdfbe9c6bf9d5 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersUsageTelemetryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterUsageTelemetryIT.java @@ -34,14 +34,14 @@ import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.deleteAsyncId; import static org.hamcrest.Matchers.equalTo; -public class CrossClustersUsageTelemetryIT extends AbstractCrossClustersUsageTelemetryIT { +public class CrossClusterUsageTelemetryIT extends AbstractCrossClusterUsageTelemetryIT { private static final String INDEX_WITH_RUNTIME_MAPPING = "blocking"; @Override protected Collection> nodePlugins(String clusterAlias) { List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); - plugins.add(CrossClustersQueryIT.InternalExchangePlugin.class); + plugins.add(CrossClusterQueryIT.InternalExchangePlugin.class); plugins.add(SimplePauseFieldPlugin.class); plugins.add(EsqlAsyncActionIT.LocalStateEsqlAsync.class); // allows the async_search DELETE action return plugins; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersUsageTelemetryNoLicenseIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterUsageTelemetryNoLicenseIT.java similarity index 89% rename from x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersUsageTelemetryNoLicenseIT.java rename to x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterUsageTelemetryNoLicenseIT.java index 2b993e9474062..70a93f4e35120 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersUsageTelemetryNoLicenseIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterUsageTelemetryNoLicenseIT.java @@ -17,13 +17,13 @@ import static org.hamcrest.Matchers.equalTo; -public class CrossClustersUsageTelemetryNoLicenseIT extends AbstractCrossClustersUsageTelemetryIT { +public class CrossClusterUsageTelemetryNoLicenseIT extends AbstractCrossClusterUsageTelemetryIT { @Override protected Collection> nodePlugins(String clusterAlias) { List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); plugins.add(EsqlPluginWithNonEnterpriseOrExpiredLicense.class); - plugins.add(CrossClustersQueryIT.InternalExchangePlugin.class); + plugins.add(CrossClusterQueryIT.InternalExchangePlugin.class); return plugins; } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index 2d0a15436bf82..b15e4cfe739f0 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -38,6 +38,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import org.hamcrest.Matcher; import org.junit.Before; import java.io.IOException; @@ -75,9 +76,6 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { private static final Logger LOGGER = LogManager.getLogger(EsqlActionTaskIT.class); - private String READ_DESCRIPTION; - private String MERGE_DESCRIPTION; - private String REDUCE_DESCRIPTION; private boolean nodeLevelReduction; /** @@ -89,21 +87,6 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { public void setup() { assumeTrue("requires query pragmas", canUseQueryPragmas()); nodeLevelReduction = randomBoolean(); - READ_DESCRIPTION = """ - \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647, scoreMode = COMPLETE_NO_SCORES] - \\_ValuesSourceReaderOperator[fields = [pause_me]] - \\_AggregationOperator[mode = INITIAL, aggs = sum of longs] - \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())); - MERGE_DESCRIPTION = """ - \\_ExchangeSourceOperator[] - \\_AggregationOperator[mode = FINAL, aggs = sum of longs] - \\_ProjectOperator[projection = [0]] - \\_LimitOperator[limit = 1000] - \\_OutputOperator[columns = [sum(pause_me)]]"""; - REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" - + (nodeLevelReduction ? "\\_AggregationOperator[mode = INTERMEDIATE, aggs = sum of longs]\n" : "") - + "\\_ExchangeSinkOperator"; - } public void testTaskContents() throws Exception { @@ -120,9 +103,11 @@ public void testTaskContents() throws Exception { for (TaskInfo task : foundTasks) { DriverStatus status = (DriverStatus) task.status(); assertThat(status.sessionId(), not(emptyOrNullString())); + String taskDescription = status.taskDescription(); for (DriverStatus.OperatorStatus o : status.activeOperators()) { logger.info("status {}", o); if (o.operator().startsWith("LuceneSourceOperator[maxPageSize = " + pageSize())) { + assertThat(taskDescription, equalTo("data")); LuceneSourceOperator.Status oStatus = (LuceneSourceOperator.Status) o.status(); assertThat(oStatus.processedSlices(), lessThanOrEqualTo(oStatus.totalSlices())); assertThat(oStatus.processedQueries(), equalTo(Set.of("*:*"))); @@ -142,6 +127,7 @@ public void testTaskContents() throws Exception { continue; } if (o.operator().equals("ValuesSourceReaderOperator[fields = [pause_me]]")) { + assertThat(taskDescription, equalTo("data")); ValuesSourceReaderOperator.Status oStatus = (ValuesSourceReaderOperator.Status) o.status(); assertMap( oStatus.readersBuilt(), @@ -152,6 +138,7 @@ public void testTaskContents() throws Exception { continue; } if (o.operator().equals("ExchangeSourceOperator")) { + assertThat(taskDescription, either(equalTo("node_reduce")).or(equalTo("final"))); ExchangeSourceOperator.Status oStatus = (ExchangeSourceOperator.Status) o.status(); assertThat(oStatus.pagesWaiting(), greaterThanOrEqualTo(0)); assertThat(oStatus.pagesEmitted(), greaterThanOrEqualTo(0)); @@ -159,6 +146,7 @@ public void testTaskContents() throws Exception { continue; } if (o.operator().equals("ExchangeSinkOperator")) { + assertThat(taskDescription, either(equalTo("data")).or(equalTo("node_reduce"))); ExchangeSinkOperator.Status oStatus = (ExchangeSinkOperator.Status) o.status(); assertThat(oStatus.pagesReceived(), greaterThanOrEqualTo(0)); exchangeSinks++; @@ -169,6 +157,29 @@ public void testTaskContents() throws Exception { assertThat(valuesSourceReaders, equalTo(1)); assertThat(exchangeSinks, greaterThanOrEqualTo(1)); assertThat(exchangeSources, equalTo(2)); + assertThat( + dataTasks(foundTasks).get(0).description(), + equalTo( + """ + \\_LuceneSourceOperator[sourceStatus] + \\_ValuesSourceReaderOperator[fields = [pause_me]] + \\_AggregationOperator[mode = INITIAL, aggs = sum of longs] + \\_ExchangeSinkOperator""".replace( + "sourceStatus", + "dataPartitioning = SHARD, maxPageSize = " + pageSize() + ", limit = 2147483647, scoreMode = COMPLETE_NO_SCORES" + ) + ) + ); + assertThat( + nodeReduceTasks(foundTasks).get(0).description(), + nodeLevelReduceDescriptionMatcher(foundTasks, "\\_AggregationOperator[mode = INTERMEDIATE, aggs = sum of longs]\n") + ); + assertThat(coordinatorTasks(foundTasks).get(0).description(), equalTo(""" + \\_ExchangeSourceOperator[] + \\_AggregationOperator[mode = FINAL, aggs = sum of longs] + \\_ProjectOperator[projection = [0]] + \\_LimitOperator[limit = 1000] + \\_OutputOperator[columns = [sum(pause_me)]]""")); } finally { scriptPermits.release(numberOfDocs()); try (EsqlQueryResponse esqlResponse = response.get()) { @@ -181,7 +192,7 @@ public void testCancelRead() throws Exception { ActionFuture response = startEsql(); try { List infos = getTasksStarting(); - TaskInfo running = infos.stream().filter(t -> t.description().equals(READ_DESCRIPTION)).findFirst().get(); + TaskInfo running = infos.stream().filter(t -> ((DriverStatus) t.status()).taskDescription().equals("data")).findFirst().get(); cancelTask(running.taskId()); assertCancelled(response); } finally { @@ -193,7 +204,7 @@ public void testCancelMerge() throws Exception { ActionFuture response = startEsql(); try { List infos = getTasksStarting(); - TaskInfo running = infos.stream().filter(t -> t.description().equals(MERGE_DESCRIPTION)).findFirst().get(); + TaskInfo running = infos.stream().filter(t -> ((DriverStatus) t.status()).taskDescription().equals("final")).findFirst().get(); cancelTask(running.taskId()); assertCancelled(response); } finally { @@ -277,8 +288,8 @@ private List getTasksStarting() throws Exception { for (TaskInfo task : tasks) { assertThat(task.action(), equalTo(DriverTaskRunner.ACTION_NAME)); DriverStatus status = (DriverStatus) task.status(); - logger.info("task {} {}", task.description(), status); - assertThat(task.description(), anyOf(equalTo(READ_DESCRIPTION), equalTo(MERGE_DESCRIPTION), equalTo(REDUCE_DESCRIPTION))); + logger.info("task {} {} {}", status.taskDescription(), task.description(), status); + assertThat(status.taskDescription(), anyOf(equalTo("data"), equalTo("node_reduce"), equalTo("final"))); /* * Accept tasks that are either starting or have gone * immediately async. The coordinating task is likely @@ -302,8 +313,8 @@ private List getTasksRunning() throws Exception { for (TaskInfo task : tasks) { assertThat(task.action(), equalTo(DriverTaskRunner.ACTION_NAME)); DriverStatus status = (DriverStatus) task.status(); - assertThat(task.description(), anyOf(equalTo(READ_DESCRIPTION), equalTo(MERGE_DESCRIPTION), equalTo(REDUCE_DESCRIPTION))); - if (task.description().equals(READ_DESCRIPTION)) { + assertThat(status.taskDescription(), anyOf(equalTo("data"), equalTo("node_reduce"), equalTo("final"))); + if (status.taskDescription().equals("data")) { assertThat(status.status(), equalTo(DriverStatus.Status.RUNNING)); } else { assertThat(status.status(), equalTo(DriverStatus.Status.ASYNC)); @@ -328,23 +339,26 @@ private List getDriverTasks() throws Exception { .get() .getTasks(); assertThat(tasks, hasSize(equalTo(3))); - List readTasks = tasks.stream().filter(t -> t.description().equals(READ_DESCRIPTION)).toList(); - List mergeTasks = tasks.stream().filter(t -> t.description().equals(MERGE_DESCRIPTION)).toList(); - assertThat(readTasks, hasSize(1)); - assertThat(mergeTasks, hasSize(1)); - // node-level reduction is disabled when the target data node is also the coordinator - if (readTasks.get(0).node().equals(mergeTasks.get(0).node())) { - REDUCE_DESCRIPTION = """ - \\_ExchangeSourceOperator[] - \\_ExchangeSinkOperator"""; - } - List reduceTasks = tasks.stream().filter(t -> t.description().equals(REDUCE_DESCRIPTION)).toList(); - assertThat(reduceTasks, hasSize(1)); + assertThat(dataTasks(tasks), hasSize(1)); + assertThat(nodeReduceTasks(tasks), hasSize(1)); + assertThat(coordinatorTasks(tasks), hasSize(1)); foundTasks.addAll(tasks); }); return foundTasks; } + private List dataTasks(List tasks) { + return tasks.stream().filter(t -> ((DriverStatus) t.status()).taskDescription().equals("data")).toList(); + } + + private List nodeReduceTasks(List tasks) { + return tasks.stream().filter(t -> ((DriverStatus) t.status()).taskDescription().equals("node_reduce")).toList(); + } + + private List coordinatorTasks(List tasks) { + return tasks.stream().filter(t -> ((DriverStatus) t.status()).taskDescription().equals("final")).toList(); + } + private void assertCancelled(ActionFuture response) throws Exception { Exception e = expectThrows(Exception.class, response); Throwable cancelException = ExceptionsHelper.unwrap(e, TaskCancelledException.class); @@ -462,7 +476,9 @@ protected void doRun() throws Exception { } Exception failure = expectThrows(Exception.class, () -> future.actionGet().close()); EsqlTestUtils.assertEsqlFailure(failure); - assertThat(failure.getMessage(), containsString("failed to fetch pages")); + Throwable cause = ExceptionsHelper.unwrap(failure, IOException.class); + assertNotNull(cause); + assertThat(cause.getMessage(), containsString("failed to fetch pages")); // If we proceed without waiting for pages, we might cancel the main request before starting the data-node request. // As a result, the exchange sinks on data-nodes won't be removed until the inactive_timeout elapses, which is // longer than the assertBusy timeout. @@ -475,30 +491,41 @@ protected void doRun() throws Exception { } public void testTaskContentsForTopNQuery() throws Exception { - READ_DESCRIPTION = ("\\_LuceneTopNSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 1000, " - + "scoreMode = TOP_DOCS, " - + "sorts = [{\"pause_me\":{\"order\":\"asc\",\"missing\":\"_last\",\"unmapped_type\":\"long\"}}]]\n" - + "\\_ValuesSourceReaderOperator[fields = [pause_me]]\n" - + "\\_ProjectOperator[projection = [1]]\n" - + "\\_ExchangeSinkOperator").replace("pageSize()", Integer.toString(pageSize())); - MERGE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" - + "\\_TopNOperator[count=1000, elementTypes=[LONG], encoders=[DefaultSortable], " - + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]\n" - + "\\_ProjectOperator[projection = [0]]\n" - + "\\_OutputOperator[columns = [pause_me]]"; - REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" - + (nodeLevelReduction - ? "\\_TopNOperator[count=1000, elementTypes=[LONG], encoders=[DefaultSortable], " - + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]\n" - : "") - + "\\_ExchangeSinkOperator"; - ActionFuture response = startEsql("from test | sort pause_me | keep pause_me"); try { getTasksStarting(); logger.info("unblocking script"); scriptPermits.release(pageSize()); - getTasksRunning(); + List tasks = getTasksRunning(); + String sortStatus = """ + [{"pause_me":{"order":"asc","missing":"_last","unmapped_type":"long"}}]"""; + String sourceStatus = "dataPartitioning = SHARD, maxPageSize = " + + pageSize() + + ", limit = 1000, scoreMode = TOP_DOCS, sorts = " + + sortStatus; + assertThat(dataTasks(tasks).get(0).description(), equalTo(""" + \\_LuceneTopNSourceOperator[sourceStatus] + \\_ValuesSourceReaderOperator[fields = [pause_me]] + \\_ProjectOperator[projection = [1]] + \\_ExchangeSinkOperator""".replace("sourceStatus", sourceStatus))); + assertThat( + nodeReduceTasks(tasks).get(0).description(), + nodeLevelReduceDescriptionMatcher( + tasks, + "\\_TopNOperator[count=1000, elementTypes=[LONG], encoders=[DefaultSortable], " + + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]\n" + ) + ); + assertThat( + coordinatorTasks(tasks).get(0).description(), + equalTo( + "\\_ExchangeSourceOperator[]\n" + + "\\_TopNOperator[count=1000, elementTypes=[LONG], encoders=[DefaultSortable], " + + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]\n" + + "\\_ProjectOperator[projection = [0]]\n" + + "\\_OutputOperator[columns = [pause_me]]" + ) + ); } finally { // each scripted field "emit" is called by LuceneTopNSourceOperator and by ValuesSourceReaderOperator scriptPermits.release(2 * numberOfDocs()); @@ -510,26 +537,26 @@ public void testTaskContentsForTopNQuery() throws Exception { public void testTaskContentsForLimitQuery() throws Exception { String limit = Integer.toString(randomIntBetween(pageSize() + 1, 2 * numberOfDocs())); - READ_DESCRIPTION = """ - \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = limit(), scoreMode = COMPLETE_NO_SCORES] - \\_ValuesSourceReaderOperator[fields = [pause_me]] - \\_ProjectOperator[projection = [1]] - \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())).replace("limit()", limit); - MERGE_DESCRIPTION = """ - \\_ExchangeSourceOperator[] - \\_LimitOperator[limit = limit()] - \\_ProjectOperator[projection = [0]] - \\_OutputOperator[columns = [pause_me]]""".replace("limit()", limit); - REDUCE_DESCRIPTION = ("\\_ExchangeSourceOperator[]\n" - + (nodeLevelReduction ? "\\_LimitOperator[limit = limit()]\n" : "") - + "\\_ExchangeSinkOperator").replace("limit()", limit); - ActionFuture response = startEsql("from test | keep pause_me | limit " + limit); try { getTasksStarting(); logger.info("unblocking script"); scriptPermits.release(pageSize() - prereleasedDocs); - getTasksRunning(); + List tasks = getTasksRunning(); + assertThat(dataTasks(tasks).get(0).description(), equalTo(""" + \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = limit(), scoreMode = COMPLETE_NO_SCORES] + \\_ValuesSourceReaderOperator[fields = [pause_me]] + \\_ProjectOperator[projection = [1]] + \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())).replace("limit()", limit))); + assertThat( + nodeReduceTasks(tasks).get(0).description(), + nodeLevelReduceDescriptionMatcher(tasks, "\\_LimitOperator[limit = " + limit + "]\n") + ); + assertThat(coordinatorTasks(tasks).get(0).description(), equalTo(""" + \\_ExchangeSourceOperator[] + \\_LimitOperator[limit = limit()] + \\_ProjectOperator[projection = [0]] + \\_OutputOperator[columns = [pause_me]]""".replace("limit()", limit))); } finally { scriptPermits.release(numberOfDocs()); try (EsqlQueryResponse esqlResponse = response.get()) { @@ -539,27 +566,35 @@ public void testTaskContentsForLimitQuery() throws Exception { } public void testTaskContentsForGroupingStatsQuery() throws Exception { - READ_DESCRIPTION = """ - \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647, scoreMode = COMPLETE_NO_SCORES] - \\_ValuesSourceReaderOperator[fields = [foo]] - \\_OrdinalsGroupingOperator(aggs = max of longs) - \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())); - MERGE_DESCRIPTION = """ - \\_ExchangeSourceOperator[] - \\_HashAggregationOperator[mode = , aggs = max of longs] - \\_ProjectOperator[projection = [1, 0]] - \\_LimitOperator[limit = 1000] - \\_OutputOperator[columns = [max(foo), pause_me]]"""; - REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" - + (nodeLevelReduction ? "\\_HashAggregationOperator[mode = , aggs = max of longs]\n" : "") - + "\\_ExchangeSinkOperator"; - ActionFuture response = startEsql("from test | stats max(foo) by pause_me"); try { getTasksStarting(); logger.info("unblocking script"); scriptPermits.release(pageSize()); - getTasksRunning(); + List tasks = getTasksRunning(); + String sourceStatus = "dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647, scoreMode = COMPLETE_NO_SCORES" + .replace("pageSize()", Integer.toString(pageSize())); + assertThat( + dataTasks(tasks).get(0).description(), + equalTo( + """ + \\_LuceneSourceOperator[sourceStatus] + \\_ValuesSourceReaderOperator[fields = [foo]] + \\_OrdinalsGroupingOperator(aggs = max of longs) + \\_ExchangeSinkOperator""".replace("sourceStatus", sourceStatus) + + ) + ); + assertThat( + nodeReduceTasks(tasks).get(0).description(), + nodeLevelReduceDescriptionMatcher(tasks, "\\_HashAggregationOperator[mode = , aggs = max of longs]\n") + ); + assertThat(coordinatorTasks(tasks).get(0).description(), equalTo(""" + \\_ExchangeSourceOperator[] + \\_HashAggregationOperator[mode = , aggs = max of longs] + \\_ProjectOperator[projection = [1, 0]] + \\_LimitOperator[limit = 1000] + \\_OutputOperator[columns = [max(foo), pause_me]]""")); } finally { scriptPermits.release(numberOfDocs()); try (EsqlQueryResponse esqlResponse = response.get()) { @@ -570,6 +605,13 @@ public void testTaskContentsForGroupingStatsQuery() throws Exception { } } + private Matcher nodeLevelReduceDescriptionMatcher(List tasks, String nodeReduce) { + boolean matchNodeReduction = nodeLevelReduction + // If the data node and the coordinator are the same node then we don't reduce aggs in it. + && false == dataTasks(tasks).get(0).node().equals(coordinatorTasks(tasks).get(0).node()); + return equalTo("\\_ExchangeSourceOperator[]\n" + (matchNodeReduction ? nodeReduce : "") + "\\_ExchangeSinkOperator"); + } + @Override protected Collection> nodePlugins() { return CollectionUtils.appendToCopy(super.nodePlugins(), MockTransportService.TestPlugin.class); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncTestUtils.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncTestUtils.java index d7117fb5e0750..ca10e5d3419ac 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncTestUtils.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncTestUtils.java @@ -98,7 +98,7 @@ public static void waitForCluster(Client client, String clusterName, String asyn } assertThat(clusterInfo.getStatus(), not(equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING))); } - }); + }, 30, TimeUnit.SECONDS); } public static EsqlQueryResponse runAsyncQuery(Client client, EsqlQueryRequest request) { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java index 1e34421097aac..1118121b0becb 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.FailingFieldPlugin; @@ -27,9 +29,23 @@ */ @ESIntegTestCase.ClusterScope(minNumDataNodes = 2) public class EsqlNodeFailureIT extends AbstractEsqlIntegTestCase { + @Override protected Collection> nodePlugins() { - return CollectionUtils.appendToCopy(super.nodePlugins(), FailingFieldPlugin.class); + var plugins = new ArrayList<>(super.nodePlugins()); + plugins.add(FailingFieldPlugin.class); + plugins.add(InternalExchangePlugin.class); + return plugins; + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + Settings settings = Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueMillis(between(3000, 4000))) + .build(); + logger.info("settings {}", settings); + return settings; } /** @@ -49,7 +65,7 @@ public void testFailureLoadingFields() throws IOException { mapping.endObject(); client().admin().indices().prepareCreate("fail").setSettings(indexSettings(1, 0)).setMapping(mapping.endObject()).get(); - int docCount = 100; + int docCount = 50; List docs = new ArrayList<>(docCount); for (int d = 0; d < docCount; d++) { docs.add(client().prepareIndex("ok").setSource("foo", d)); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlRetryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlRetryIT.java new file mode 100644 index 0000000000000..05b2211deecb8 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlRetryIT.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.plugin.ComputeService; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.elasticsearch.index.shard.IndexShardTestCase.closeShardNoCheck; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; + +public class EsqlRetryIT extends AbstractEsqlIntegTestCase { + + @Override + protected Collection> nodePlugins() { + List> plugins = new ArrayList<>(super.nodePlugins()); + plugins.add(MockTransportService.TestPlugin.class); + return plugins; + } + + public void testRetryOnShardFailures() throws Exception { + populateIndices(); + try { + final AtomicBoolean relocated = new AtomicBoolean(); + for (String node : internalCluster().getNodeNames()) { + // fail some target shards while handling the data node request + MockTransportService.getInstance(node) + .addRequestHandlingBehavior(ComputeService.DATA_ACTION_NAME, (handler, request, channel, task) -> { + if (relocated.compareAndSet(false, true)) { + closeOrFailShards(node); + } + handler.messageReceived(request, channel, task); + }); + } + try (var resp = run("FROM log-* | STATS COUNT(timestamp) | LIMIT 1")) { + assertThat(EsqlTestUtils.getValuesList(resp).get(0).get(0), equalTo(7L)); + } + } finally { + for (String node : internalCluster().getNodeNames()) { + MockTransportService.getInstance(node).clearAllRules(); + } + } + } + + private void populateIndices() { + internalCluster().ensureAtLeastNumDataNodes(2); + assertAcked(prepareCreate("log-index-1").setSettings(indexSettings(between(1, 3), 1)).setMapping("timestamp", "type=date")); + assertAcked(prepareCreate("log-index-2").setSettings(indexSettings(between(1, 3), 1)).setMapping("timestamp", "type=date")); + List reqs = new ArrayList<>(); + reqs.add(prepareIndex("log-index-1").setSource("timestamp", "2015-07-08")); + reqs.add(prepareIndex("log-index-1").setSource("timestamp", "2018-07-08")); + reqs.add(prepareIndex("log-index-1").setSource("timestamp", "2020-03-03")); + reqs.add(prepareIndex("log-index-1").setSource("timestamp", "2020-09-09")); + reqs.add(prepareIndex("log-index-2").setSource("timestamp", "2019-10-12")); + reqs.add(prepareIndex("log-index-2").setSource("timestamp", "2020-02-02")); + reqs.add(prepareIndex("log-index-2").setSource("timestamp", "2020-10-10")); + indexRandom(true, reqs); + ensureGreen("log-index-1", "log-index-2"); + indicesAdmin().prepareRefresh("log-index-1", "log-index-2").get(); + } + + private void closeOrFailShards(String nodeName) throws Exception { + final IndicesService indicesService = internalCluster().getInstance(IndicesService.class, nodeName); + for (IndexService indexService : indicesService) { + for (IndexShard indexShard : indexService) { + if (randomBoolean()) { + indexShard.failShard("simulated", new IOException("simulated failure")); + } else if (randomBoolean()) { + closeShardNoCheck(indexShard); + } + } + } + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/FailingPauseFieldPlugin.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/FailingPauseFieldPlugin.java index 010931432e2e8..ad21a700d764f 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/FailingPauseFieldPlugin.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/FailingPauseFieldPlugin.java @@ -29,6 +29,10 @@ public static void resetPlugin() { startEmitting = new CountDownLatch(1); } + public static void release() { + allowEmitting.countDown(); + } + @Override public void onStartExecute() { startEmitting.countDown(); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java index 15bbc06836def..1bbcc46c0555f 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java @@ -227,6 +227,7 @@ private void runLookup(DataType keyType, PopulateIndices populateIndices) throws DriverContext driverContext = driverContext(); try ( var driver = new Driver( + "test", driverContext, source.get(driverContext), List.of(reader.get(driverContext), lookup.get(driverContext)), diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/SimplePauseFieldPlugin.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/SimplePauseFieldPlugin.java index 3ba73dd9a402e..7802383f8e181 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/SimplePauseFieldPlugin.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/SimplePauseFieldPlugin.java @@ -24,6 +24,10 @@ public static void resetPlugin() { startEmitting = new CountDownLatch(1); } + public static void release() { + allowEmitting.countDown(); + } + @Override public void onStartExecute() { startEmitting.countDown(); @@ -31,6 +35,10 @@ public void onStartExecute() { @Override public boolean onWait() throws InterruptedException { - return allowEmitting.await(30, TimeUnit.SECONDS); + try { + return allowEmitting.await(30, TimeUnit.SECONDS); + } catch (InterruptedException e) { + return true; + } } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java index 25603acece3cb..c8dc134e0e706 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java @@ -20,7 +20,7 @@ import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.telemetry.Measurement; import org.elasticsearch.telemetry.TestTelemetryPlugin; -import org.elasticsearch.xpack.esql.stats.PlanningMetricsManager; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetryManager; import org.junit.Before; import java.util.Collection; @@ -113,6 +113,39 @@ public static Iterable parameters() { Map.ofEntries(Map.entry("TO_IP", 1), Map.entry("TO_STRING", 2)), true ) }, + new Object[] { + new Test( + // Using the `::` cast operator and a function alias + """ + ROW host = "1.1.1.1" + | EVAL ip = host::ip::string, y = to_str(host) + """, + Map.ofEntries(Map.entry("ROW", 1), Map.entry("EVAL", 1)), + Map.ofEntries(Map.entry("TO_IP", 1), Map.entry("TO_STRING", 2)), + true + ) }, + new Object[] { + new Test( + // Using the `::` cast operator and a function alias + """ + FROM idx + | EVAL ip = host::ip::string, y = to_str(host) + """, + Map.ofEntries(Map.entry("FROM", 1), Map.entry("EVAL", 1)), + Map.ofEntries(Map.entry("TO_IP", 1), Map.entry("TO_STRING", 2)), + true + ) }, + new Object[] { + new Test( + """ + FROM idx + | EVAL y = to_str(host) + | LOOKUP JOIN lookup_idx ON host + """, + Map.ofEntries(Map.entry("FROM", 1), Map.entry("EVAL", 1), Map.entry("LOOKUP JOIN", 1)), + Map.ofEntries(Map.entry("TO_STRING", 1)), + true + ) }, new Object[] { new Test( "METRICS idx | LIMIT 10", @@ -123,9 +156,7 @@ public static Iterable parameters() { new Object[] { new Test( "METRICS idx max(id) BY host | LIMIT 10", - Build.current().isSnapshot() - ? Map.ofEntries(Map.entry("METRICS", 1), Map.entry("LIMIT", 1), Map.entry("FROM TS", 1)) - : Collections.emptyMap(), + Build.current().isSnapshot() ? Map.ofEntries(Map.entry("METRICS", 1), Map.entry("LIMIT", 1)) : Collections.emptyMap(), Build.current().isSnapshot() ? Map.ofEntries(Map.entry("MAX", 1)) : Collections.emptyMap(), Build.current().isSnapshot() ) } @@ -138,7 +169,7 @@ public static Iterable parameters() { // | EVAL ip = to_ip(host), x = to_string(host), y = to_string(host) // | INLINESTATS max(id) // """, - // Build.current().isSnapshot() ? Map.of("FROM", 1, "EVAL", 1, "INLINESTATS", 1, "STATS", 1) : Collections.emptyMap(), + // Build.current().isSnapshot() ? Map.of("FROM", 1, "EVAL", 1, "INLINESTATS", 1) : Collections.emptyMap(), // Build.current().isSnapshot() // ? Map.ofEntries(Map.entry("MAX", 1), Map.entry("TO_IP", 1), Map.entry("TO_STRING", 2)) // : Collections.emptyMap(), @@ -186,19 +217,19 @@ private static void testQuery( client(dataNode.getName()).execute(EsqlQueryAction.INSTANCE, request, ActionListener.running(() -> { try { // test total commands used - final List commandMeasurementsAll = measurements(plugin, PlanningMetricsManager.FEATURE_METRICS_ALL); + final List commandMeasurementsAll = measurements(plugin, PlanTelemetryManager.FEATURE_METRICS_ALL); assertAllUsages(expectedCommands, commandMeasurementsAll, iteration, success); // test num of queries using a command - final List commandMeasurements = measurements(plugin, PlanningMetricsManager.FEATURE_METRICS); + final List commandMeasurements = measurements(plugin, PlanTelemetryManager.FEATURE_METRICS); assertUsageInQuery(expectedCommands, commandMeasurements, iteration, success); // test total functions used - final List functionMeasurementsAll = measurements(plugin, PlanningMetricsManager.FUNCTION_METRICS_ALL); + final List functionMeasurementsAll = measurements(plugin, PlanTelemetryManager.FUNCTION_METRICS_ALL); assertAllUsages(expectedFunctions, functionMeasurementsAll, iteration, success); // test number of queries using a function - final List functionMeasurements = measurements(plugin, PlanningMetricsManager.FUNCTION_METRICS); + final List functionMeasurements = measurements(plugin, PlanTelemetryManager.FUNCTION_METRICS); assertUsageInQuery(expectedFunctions, functionMeasurements, iteration, success); } finally { latch.countDown(); @@ -216,8 +247,8 @@ private static void assertAllUsages(Map expected, List found = featureNames(metrics); assertThat(found, is(expected.keySet())); for (Measurement metric : metrics) { - assertThat(metric.attributes().get(PlanningMetricsManager.SUCCESS), is(success)); - String featureName = (String) metric.attributes().get(PlanningMetricsManager.FEATURE_NAME); + assertThat(metric.attributes().get(PlanTelemetryManager.SUCCESS), is(success)); + String featureName = (String) metric.attributes().get(PlanTelemetryManager.FEATURE_NAME); assertThat(metric.getLong(), is(iteration * expected.get(featureName))); } } @@ -227,7 +258,7 @@ private static void assertUsageInQuery(Map expected, List measurements(TestTelemetryPlugin plugin, String private static Set featureNames(List functionMeasurements) { return functionMeasurements.stream() - .map(x -> x.attributes().get(PlanningMetricsManager.FEATURE_NAME)) + .map(x -> x.attributes().get(PlanTelemetryManager.FEATURE_NAME)) .map(String.class::cast) .collect(Collectors.toSet()); } @@ -268,6 +299,19 @@ private static void loadData(String nodeName) { } client().admin().indices().prepareRefresh("idx").get(); + + assertAcked( + client().admin() + .indices() + .prepareCreate("lookup_idx") + .setSettings( + Settings.builder() + .put("index.routing.allocation.require._name", nodeName) + .put("index.mode", "lookup") + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + ) + .setMapping("ip", "type=ip", "host", "type=keyword") + ); } private DiscoveryNode randomDataNode() { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java index b928b25929401..4ec309ff05cee 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.junit.Before; import java.util.List; @@ -105,7 +104,6 @@ public void testNotWhereMatch() { } public void testWhereMatchWithScoring() { - assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); var query = """ FROM test METADATA _score @@ -122,7 +120,7 @@ public void testWhereMatchWithScoring() { } public void testWhereMatchWithScoringDifferentSort() { - assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); + var query = """ FROM test METADATA _score @@ -139,7 +137,6 @@ public void testWhereMatchWithScoringDifferentSort() { } public void testWhereMatchWithScoringSortScore() { - assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); var query = """ FROM test METADATA _score @@ -156,7 +153,6 @@ public void testWhereMatchWithScoringSortScore() { } public void testWhereMatchWithScoringNoSort() { - assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); var query = """ FROM test METADATA _score @@ -246,7 +242,7 @@ public void testWhereMatchWithRow() { var error = expectThrows(ElasticsearchException.class, () -> run(query)); assertThat( error.getMessage(), - containsString("[MATCH] function cannot operate on [\"a brown fox\"], which is not a field from an index mapping") + containsString("line 2:15: [MATCH] function cannot operate on [content], which is not a field from an index mapping") ); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java index bd7246518c958..c978dead8f4fd 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.junit.Before; import java.util.List; @@ -106,7 +105,6 @@ public void testNotWhereMatch() { } public void testWhereMatchWithScoring() { - assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); var query = """ FROM test METADATA _score @@ -123,7 +121,6 @@ public void testWhereMatchWithScoring() { } public void testWhereMatchWithScoringDifferentSort() { - assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); var query = """ FROM test METADATA _score @@ -140,7 +137,6 @@ public void testWhereMatchWithScoringDifferentSort() { } public void testWhereMatchWithScoringNoSort() { - assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); var query = """ FROM test METADATA _score @@ -230,7 +226,7 @@ public void testWhereMatchWithRow() { var error = expectThrows(ElasticsearchException.class, () -> run(query)); assertThat( error.getMessage(), - containsString("[:] operator cannot operate on [\"a brown fox\"], which is not a field from an index mapping") + containsString("line 2:9: [:] operator cannot operate on [content], which is not a field from an index mapping") ); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java index a3d1ac931528c..97a98da6e8291 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java @@ -13,7 +13,6 @@ import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.junit.Before; import java.util.List; @@ -140,7 +139,6 @@ private void createAndPopulateIndex() { } public void testWhereQstrWithScoring() { - assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); var query = """ FROM test METADATA _score @@ -165,7 +163,6 @@ public void testWhereQstrWithScoring() { } public void testWhereQstrWithScoringSorted() { - assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); var query = """ FROM test METADATA _score @@ -191,7 +188,6 @@ public void testWhereQstrWithScoringSorted() { } public void testWhereQstrWithScoringNoSort() { - assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); var query = """ FROM test METADATA _score @@ -215,7 +211,6 @@ public void testWhereQstrWithScoringNoSort() { } public void testWhereQstrWithNonPushableAndScoring() { - assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); var query = """ FROM test METADATA _score diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index 5b731b5dac9d2..9f900200d5b2e 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -73,6 +73,7 @@ SHOW : 'show' -> pushMode(SHOW_MODE); SORT : 'sort' -> pushMode(EXPRESSION_MODE); STATS : 'stats' -> pushMode(EXPRESSION_MODE); WHERE : 'where' -> pushMode(EXPRESSION_MODE); +JOIN_LOOKUP : 'lookup' -> pushMode(JOIN_MODE); // // in development // @@ -84,15 +85,14 @@ WHERE : 'where' -> pushMode(EXPRESSION_MODE); // Once the command has been stabilized, remove the DEV_ prefix and the {}? conditional and move the command to the // main section while preserving alphabetical order: // MYCOMMAND : 'mycommand' -> ... -DEV_INLINESTATS : {this.isDevVersion()}? 'inlinestats' -> pushMode(EXPRESSION_MODE); -DEV_LOOKUP : {this.isDevVersion()}? 'lookup_🐔' -> pushMode(LOOKUP_MODE); -DEV_METRICS : {this.isDevVersion()}? 'metrics' -> pushMode(METRICS_MODE); +DEV_CHANGE_POINT : {this.isDevVersion()}? 'change_point' -> pushMode(CHANGE_POINT_MODE); +DEV_INLINESTATS : {this.isDevVersion()}? 'inlinestats' -> pushMode(EXPRESSION_MODE); +DEV_LOOKUP : {this.isDevVersion()}? 'lookup_🐔' -> pushMode(LOOKUP_MODE); +DEV_METRICS : {this.isDevVersion()}? 'metrics' -> pushMode(METRICS_MODE); // list of all JOIN commands -DEV_JOIN : {this.isDevVersion()}? 'join' -> pushMode(JOIN_MODE); -DEV_JOIN_FULL : {this.isDevVersion()}? 'full' -> pushMode(JOIN_MODE); -DEV_JOIN_LEFT : {this.isDevVersion()}? 'left' -> pushMode(JOIN_MODE); -DEV_JOIN_RIGHT : {this.isDevVersion()}? 'right' -> pushMode(JOIN_MODE); -DEV_JOIN_LOOKUP : {this.isDevVersion()}? 'lookup' -> pushMode(JOIN_MODE); +DEV_JOIN_FULL : {this.isDevVersion()}? 'full' -> pushMode(JOIN_MODE); +DEV_JOIN_LEFT : {this.isDevVersion()}? 'left' -> pushMode(JOIN_MODE); +DEV_JOIN_RIGHT : {this.isDevVersion()}? 'right' -> pushMode(JOIN_MODE); // @@ -315,8 +315,8 @@ mode PROJECT_MODE; PROJECT_PIPE : PIPE -> type(PIPE), popMode; PROJECT_DOT: DOT -> type(DOT); PROJECT_COMMA : COMMA -> type(COMMA); -PROJECT_PARAM : {this.isDevVersion()}? PARAM -> type(PARAM); -PROJECT_NAMED_OR_POSITIONAL_PARAM : {this.isDevVersion()}? NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); +PROJECT_PARAM : PARAM -> type(PARAM); +PROJECT_NAMED_OR_POSITIONAL_PARAM : NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); fragment UNQUOTED_ID_BODY_WITH_PATTERN : (LETTER | DIGIT | UNDERSCORE | ASTERISK) @@ -350,8 +350,8 @@ RENAME_PIPE : PIPE -> type(PIPE), popMode; RENAME_ASSIGN : ASSIGN -> type(ASSIGN); RENAME_COMMA : COMMA -> type(COMMA); RENAME_DOT: DOT -> type(DOT); -RENAME_PARAM : {this.isDevVersion()}? PARAM -> type(PARAM); -RENAME_NAMED_OR_POSITIONAL_PARAM : {this.isDevVersion()}? NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); +RENAME_PARAM : PARAM -> type(PARAM); +RENAME_NAMED_OR_POSITIONAL_PARAM : NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); AS : 'as'; @@ -423,8 +423,8 @@ ENRICH_FIELD_QUOTED_IDENTIFIER : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) ; -ENRICH_FIELD_PARAM : {this.isDevVersion()}? PARAM -> type(PARAM); -ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM : {this.isDevVersion()}? NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); +ENRICH_FIELD_PARAM : PARAM -> type(PARAM); +ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM : NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); ENRICH_FIELD_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN) @@ -441,8 +441,8 @@ ENRICH_FIELD_WS mode MVEXPAND_MODE; MVEXPAND_PIPE : PIPE -> type(PIPE), popMode; MVEXPAND_DOT: DOT -> type(DOT); -MVEXPAND_PARAM : {this.isDevVersion()}? PARAM -> type(PARAM); -MVEXPAND_NAMED_OR_POSITIONAL_PARAM : {this.isDevVersion()}? NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); +MVEXPAND_PARAM : PARAM -> type(PARAM); +MVEXPAND_NAMED_OR_POSITIONAL_PARAM : NAMED_OR_POSITIONAL_PARAM -> type(NAMED_OR_POSITIONAL_PARAM); MVEXPAND_QUOTED_IDENTIFIER : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) @@ -556,7 +556,7 @@ LOOKUP_FIELD_WS // mode JOIN_MODE; JOIN_PIPE : PIPE -> type(PIPE), popMode; -JOIN_JOIN : DEV_JOIN -> type(DEV_JOIN); +JOIN : 'join'; JOIN_AS : AS -> type(AS); JOIN_ON : ON -> type(ON), popMode, pushMode(EXPRESSION_MODE); USING : 'USING' -> popMode, pushMode(EXPRESSION_MODE); @@ -639,3 +639,19 @@ CLOSING_METRICS_BY CLOSING_METRICS_PIPE : PIPE -> type(PIPE), popMode ; + +/// +/// CHANGE_POINT command +/// +mode CHANGE_POINT_MODE; + +CHANGE_POINT_PIPE : PIPE -> type(PIPE), popMode; +CHANGE_POINT_ON : ON -> type(ON); +CHANGE_POINT_AS : AS -> type(AS); +CHANGE_POINT_DOT: DOT -> type(DOT); +CHANGE_POINT_COMMA: COMMA -> type(COMMA); +CHANGE_POINT_QUOTED_IDENTIFIER: QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER); +CHANGE_POINT_UNQUOTED_IDENTIFIER: UNQUOTED_IDENTIFIER -> type(UNQUOTED_IDENTIFIER); +CHANGE_POINT_LINE_COMMENT: LINE_COMMENT -> channel(HIDDEN); +CHANGE_POINT_MULTILINE_COMMENT: MULTILINE_COMMENT -> channel(HIDDEN); +CHANGE_POINT_WS: WS -> channel(HIDDEN); diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 218884913960f..7ab99c293bc93 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -14,14 +14,14 @@ SHOW=13 SORT=14 STATS=15 WHERE=16 -DEV_INLINESTATS=17 -DEV_LOOKUP=18 -DEV_METRICS=19 -DEV_JOIN=20 -DEV_JOIN_FULL=21 -DEV_JOIN_LEFT=22 -DEV_JOIN_RIGHT=23 -DEV_JOIN_LOOKUP=24 +JOIN_LOOKUP=17 +DEV_CHANGE_POINT=18 +DEV_INLINESTATS=19 +DEV_LOOKUP=20 +DEV_METRICS=21 +DEV_JOIN_FULL=22 +DEV_JOIN_LEFT=23 +DEV_JOIN_RIGHT=24 UNKNOWN_CMD=25 LINE_COMMENT=26 MULTILINE_COMMENT=27 @@ -118,16 +118,20 @@ LOOKUP_WS=117 LOOKUP_FIELD_LINE_COMMENT=118 LOOKUP_FIELD_MULTILINE_COMMENT=119 LOOKUP_FIELD_WS=120 -USING=121 -JOIN_LINE_COMMENT=122 -JOIN_MULTILINE_COMMENT=123 -JOIN_WS=124 -METRICS_LINE_COMMENT=125 -METRICS_MULTILINE_COMMENT=126 -METRICS_WS=127 -CLOSING_METRICS_LINE_COMMENT=128 -CLOSING_METRICS_MULTILINE_COMMENT=129 -CLOSING_METRICS_WS=130 +JOIN=121 +USING=122 +JOIN_LINE_COMMENT=123 +JOIN_MULTILINE_COMMENT=124 +JOIN_WS=125 +METRICS_LINE_COMMENT=126 +METRICS_MULTILINE_COMMENT=127 +METRICS_WS=128 +CLOSING_METRICS_LINE_COMMENT=129 +CLOSING_METRICS_MULTILINE_COMMENT=130 +CLOSING_METRICS_WS=131 +CHANGE_POINT_LINE_COMMENT=132 +CHANGE_POINT_MULTILINE_COMMENT=133 +CHANGE_POINT_WS=134 'dissect'=1 'drop'=2 'enrich'=3 @@ -144,6 +148,7 @@ CLOSING_METRICS_WS=130 'sort'=14 'stats'=15 'where'=16 +'lookup'=17 '|'=29 'by'=33 'and'=34 @@ -189,4 +194,5 @@ CLOSING_METRICS_WS=130 'on'=95 'with'=96 'info'=107 -'USING'=121 +'join'=121 +'USING'=122 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index c66da879a5709..3e30dd0cb4a04 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -51,10 +51,11 @@ processingCommand | grokCommand | enrichCommand | mvExpandCommand + | joinCommand // in development | {this.isDevVersion()}? inlinestatsCommand | {this.isDevVersion()}? lookupCommand - | {this.isDevVersion()}? joinCommand + | {this.isDevVersion()}? changePointCommand ; whereCommand @@ -143,6 +144,7 @@ indexPattern clusterString : UNQUOTED_SOURCE + | QUOTED_STRING ; indexString @@ -193,7 +195,7 @@ identifier identifierPattern : ID_PATTERN - | {this.isDevVersion()}? parameter + | parameter ; constant @@ -216,7 +218,7 @@ parameter identifierOrParameter : identifier - | {this.isDevVersion()}? parameter + | parameter ; limitCommand @@ -324,11 +326,11 @@ inlinestatsCommand ; joinCommand - : type=(DEV_JOIN_LOOKUP | DEV_JOIN_LEFT | DEV_JOIN_RIGHT)? DEV_JOIN joinTarget joinCondition + : type=(JOIN_LOOKUP | DEV_JOIN_LEFT | DEV_JOIN_RIGHT) JOIN joinTarget joinCondition ; joinTarget - : index=indexPattern (AS alias=identifier)? + : index=indexPattern ; joinCondition @@ -338,3 +340,7 @@ joinCondition joinPredicate : valueExpression ; + +changePointCommand + : DEV_CHANGE_POINT value=qualifiedName (ON key=qualifiedName)? (AS targetType=qualifiedName COMMA targetPvalue=qualifiedName)? + ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 218884913960f..7ab99c293bc93 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -14,14 +14,14 @@ SHOW=13 SORT=14 STATS=15 WHERE=16 -DEV_INLINESTATS=17 -DEV_LOOKUP=18 -DEV_METRICS=19 -DEV_JOIN=20 -DEV_JOIN_FULL=21 -DEV_JOIN_LEFT=22 -DEV_JOIN_RIGHT=23 -DEV_JOIN_LOOKUP=24 +JOIN_LOOKUP=17 +DEV_CHANGE_POINT=18 +DEV_INLINESTATS=19 +DEV_LOOKUP=20 +DEV_METRICS=21 +DEV_JOIN_FULL=22 +DEV_JOIN_LEFT=23 +DEV_JOIN_RIGHT=24 UNKNOWN_CMD=25 LINE_COMMENT=26 MULTILINE_COMMENT=27 @@ -118,16 +118,20 @@ LOOKUP_WS=117 LOOKUP_FIELD_LINE_COMMENT=118 LOOKUP_FIELD_MULTILINE_COMMENT=119 LOOKUP_FIELD_WS=120 -USING=121 -JOIN_LINE_COMMENT=122 -JOIN_MULTILINE_COMMENT=123 -JOIN_WS=124 -METRICS_LINE_COMMENT=125 -METRICS_MULTILINE_COMMENT=126 -METRICS_WS=127 -CLOSING_METRICS_LINE_COMMENT=128 -CLOSING_METRICS_MULTILINE_COMMENT=129 -CLOSING_METRICS_WS=130 +JOIN=121 +USING=122 +JOIN_LINE_COMMENT=123 +JOIN_MULTILINE_COMMENT=124 +JOIN_WS=125 +METRICS_LINE_COMMENT=126 +METRICS_MULTILINE_COMMENT=127 +METRICS_WS=128 +CLOSING_METRICS_LINE_COMMENT=129 +CLOSING_METRICS_MULTILINE_COMMENT=130 +CLOSING_METRICS_WS=131 +CHANGE_POINT_LINE_COMMENT=132 +CHANGE_POINT_MULTILINE_COMMENT=133 +CHANGE_POINT_WS=134 'dissect'=1 'drop'=2 'enrich'=3 @@ -144,6 +148,7 @@ CLOSING_METRICS_WS=130 'sort'=14 'stats'=15 'where'=16 +'lookup'=17 '|'=29 'by'=33 'and'=34 @@ -189,4 +194,5 @@ CLOSING_METRICS_WS=130 'on'=95 'with'=96 'info'=107 -'USING'=121 +'join'=121 +'USING'=122 diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java index b25f81a79b7ce..255c162df3495 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Not}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java index 27345b384375e..581ae82afbd21 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreatestBooleanEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java index c8b8c3ac501ec..9926644551faf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreatestBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java index e62b0f9877cdc..a0129f7762379 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreatestDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java index 8c05f00fb1e0d..7b2d3e76a027b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreatestIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java index 78237c8d389bd..94c6bbd934751 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreatestLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java index 82fc13cffbe7e..8bf830d030f63 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeastBooleanEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java index b37408e434148..fe06aa9b5f32e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeastBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java index bf501d53211e6..2c9c45e363d63 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeastDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java index 803f23d994bbe..e14d83dafb951 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeastIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java index 7408dd1165a01..da2e98b59220f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeastLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java index 9ba95999f052b..c4a22cc06900d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link FromBase64}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class FromBase64Evaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java index 2ed07e440a301..3fafd237030db 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToBase64}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ToBase64Evaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java index c4264fb78be92..d5df80376c484 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToBoolean}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToBooleanFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToBooleanFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java index 43ac58d1f0fc4..a4f17f9892b82 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToBoolean}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToBooleanFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToBooleanFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java index c8b2814a3f6da..5f62c70b04972 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToBoolean}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToBooleanFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToBooleanFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java index 8859bfce25ba1..9e12947199948 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToBoolean}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToBooleanFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToBooleanFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java index 2f4037ff3b116..40436df749e04 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToBoolean}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToBooleanFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToBooleanFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointFromStringEvaluator.java index 7c47e39dfba19..68d755000902d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointFromStringEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToCartesianPoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToCartesianPointFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToCartesianPointFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java index 6ae079e153e0b..fcbe066acda25 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToCartesianShape}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToCartesianShapeFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToCartesianShapeFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDatetimeEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDatetimeEvaluator.java index e00e7e044ae12..2b7262c19128e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDatetimeEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDatetimeEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDateNanos}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDateNanosFromDatetimeEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDateNanosFromDatetimeEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDoubleEvaluator.java index 23b30e669241b..49f894998312d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDoubleEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDateNanos}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDateNanosFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDateNanosFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromLongEvaluator.java index cc52208ce5a25..a5084102ffe5a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromLongEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDateNanos}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDateNanosFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDateNanosFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromStringEvaluator.java index c5a20ac298da7..3bc9ecfcfaf4b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromStringEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDateNanos}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDateNanosFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDateNanosFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromDateNanosEvaluator.java index 92b629657b95b..56b8decbe7e4e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromDateNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromDateNanosEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDatetime}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDatetimeFromDateNanosEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDatetimeFromDateNanosEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java index 3e074dba3d456..46bba56031163 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDatetime}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDatetimeFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDatetimeFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java index 11bf9ffed0fbd..a5d0e8af694b5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDegrees}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDegreesEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDegreesEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java index 60433ea5efae7..250147a255ec2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDouble}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDoubleFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDoubleFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java index 1e3c48f472ad2..7b2ff9f39d56e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDouble}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDoubleFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDoubleFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java index 6e959a28459aa..e19f9c4cb4432 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDouble}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDoubleFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDoubleFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java index 6613fc1dd6b94..4ca9427149250 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDouble}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDoubleFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDoubleFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java index 9badb00fc472c..6015e631093a6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDouble}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDoubleFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDoubleFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointFromStringEvaluator.java index ad33737f3da11..4444256cf6b2c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointFromStringEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToGeoPoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToGeoPointFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToGeoPointFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java index db59fd3a16da8..ede98457d21dc 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToGeoShape}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToGeoShapeFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToGeoShapeFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java index 7a2b2a016d60f..20808c3493285 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToIP}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToIPFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToIPFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java index 9bd1304024ad6..72837d5c7d6aa 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToInteger}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToIntegerFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToIntegerFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java index 5057037993f60..c45f332f0d22e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToInteger}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToIntegerFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToIntegerFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java index b2e891a6e65d5..b829a506dc5a0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToInteger}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToIntegerFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToIntegerFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java index d50c18501e37f..1a0abf5ed6300 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToInteger}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToIntegerFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToIntegerFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java index 31fadc9f28845..433ad4815dc06 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToInteger}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToIntegerFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToIntegerFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java index 668bedfa4440e..d74861f4ef11d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToLongFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToLongFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java index cb1c10558f10e..a63c54184fb88 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToLongFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToLongFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java index 74be177061f7a..95623d0ef2672 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToLongFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToLongFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java index 1d58a05c7d970..283d97606e9a4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToLongFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToLongFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java index af911e5b787ac..134ea9698c47f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToLongFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToLongFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java index 6aed22da1b015..b1a480878a9c2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java @@ -16,7 +16,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToRadians}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToRadiansEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToRadiansEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java index 47af1b25c88e8..d9d1cecd2af93 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianPointEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianPointEvaluator.java index d42c945c0cee6..aeaf940d51ff0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianPointEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianPointEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromCartesianPointEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromCartesianPointEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java index 93901e1c4486c..2d4c04e6afcdd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromCartesianShapeEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromCartesianShapeEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDateNanosEvaluator.java index 37f13ea340a26..189607a077d74 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDateNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDateNanosEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromDateNanosEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromDateNanosEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java index e179f92665a7c..a7f2d537e1801 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromDatetimeEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromDatetimeEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java index 7815b33845394..29091a037dada 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoPointEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoPointEvaluator.java index 42b3c37fed892..5dfdbf37257e0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoPointEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoPointEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromGeoPointEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromGeoPointEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java index a8c1b8e241ba4..a207b4dbf1875 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromGeoShapeEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromGeoShapeEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java index d51ae78956c21..a9663c2a4fdf5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromIPEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromIPEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java index cfff78cf3b550..604061865dcd4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java index f4e0046f93f4b..c444c5949a4c5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java index 57275460a1813..6c3789ac182a4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java index 816963dd73536..183cfaba27e42 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromVersionEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromVersionEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java index 3b7dd65b68f2d..74d41a0489987 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToUnsignedLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToUnsignedLongFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToUnsignedLongFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java index 1a6b9ee26557d..6f0e1a676920e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToUnsignedLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToUnsignedLongFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToUnsignedLongFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java index 56c3c0cecc222..f1c65e0e5f1a7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToUnsignedLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToUnsignedLongFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToUnsignedLongFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java index 323661261ce56..5b16dc8bc5b98 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToUnsignedLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToUnsignedLongFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToUnsignedLongFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java index 0f3096c4824da..b13be58bfcbc8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToUnsignedLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToUnsignedLongFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToUnsignedLongFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java index fecd2b62e53ab..b9ad6ded04f1c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToVersion}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToVersionFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToVersionFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java deleted file mode 100644 index 0ad09ee55ca1f..0000000000000 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java +++ /dev/null @@ -1,168 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.scalar.date; - -import java.lang.IllegalArgumentException; -import java.lang.Override; -import java.lang.String; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.compute.operator.Warnings; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.xpack.esql.core.InvalidArgumentException; -import org.elasticsearch.xpack.esql.core.tree.Source; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. - */ -public final class DateDiffConstantEvaluator implements EvalOperator.ExpressionEvaluator { - private final Source source; - - private final DateDiff.Part datePartFieldUnit; - - private final EvalOperator.ExpressionEvaluator startTimestamp; - - private final EvalOperator.ExpressionEvaluator endTimestamp; - - private final DriverContext driverContext; - - private Warnings warnings; - - public DateDiffConstantEvaluator(Source source, DateDiff.Part datePartFieldUnit, - EvalOperator.ExpressionEvaluator startTimestamp, - EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { - this.source = source; - this.datePartFieldUnit = datePartFieldUnit; - this.startTimestamp = startTimestamp; - this.endTimestamp = endTimestamp; - this.driverContext = driverContext; - } - - @Override - public Block eval(Page page) { - try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { - try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { - LongVector startTimestampVector = startTimestampBlock.asVector(); - if (startTimestampVector == null) { - return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); - } - LongVector endTimestampVector = endTimestampBlock.asVector(); - if (endTimestampVector == null) { - return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); - } - return eval(page.getPositionCount(), startTimestampVector, endTimestampVector); - } - } - } - - public IntBlock eval(int positionCount, LongBlock startTimestampBlock, - LongBlock endTimestampBlock) { - try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - if (startTimestampBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (startTimestampBlock.getValueCount(p) != 1) { - if (startTimestampBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - if (endTimestampBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (endTimestampBlock.getValueCount(p) != 1) { - if (endTimestampBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - try { - result.appendInt(DateDiff.process(this.datePartFieldUnit, startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); - } catch (IllegalArgumentException | InvalidArgumentException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public IntBlock eval(int positionCount, LongVector startTimestampVector, - LongVector endTimestampVector) { - try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendInt(DateDiff.process(this.datePartFieldUnit, startTimestampVector.getLong(p), endTimestampVector.getLong(p))); - } catch (IllegalArgumentException | InvalidArgumentException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - @Override - public String toString() { - return "DateDiffConstantEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; - } - - @Override - public void close() { - Releasables.closeExpectNoException(startTimestamp, endTimestamp); - } - - private Warnings warnings() { - if (warnings == null) { - this.warnings = Warnings.createWarnings( - driverContext.warningsMode(), - source.source().getLineNumber(), - source.source().getColumnNumber(), - source.text() - ); - } - return warnings; - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final DateDiff.Part datePartFieldUnit; - - private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; - - private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; - - public Factory(Source source, DateDiff.Part datePartFieldUnit, - EvalOperator.ExpressionEvaluator.Factory startTimestamp, - EvalOperator.ExpressionEvaluator.Factory endTimestamp) { - this.source = source; - this.datePartFieldUnit = datePartFieldUnit; - this.startTimestamp = startTimestamp; - this.endTimestamp = endTimestamp; - } - - @Override - public DateDiffConstantEvaluator get(DriverContext context) { - return new DateDiffConstantEvaluator(source, datePartFieldUnit, startTimestamp.get(context), endTimestamp.get(context), context); - } - - @Override - public String toString() { - return "DateDiffConstantEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java index 0ff047f9bd819..eee8c756930ab 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffConstantMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java index 880531ca53707..5a0a6051abebb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffConstantMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java index 99f7d1cb2e247..dc28b97fef9aa 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffConstantNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java index 842930a040ed0..477fb22548d2e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffConstantNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java deleted file mode 100644 index 82fb55e97f1f2..0000000000000 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java +++ /dev/null @@ -1,190 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.scalar.date; - -import java.lang.IllegalArgumentException; -import java.lang.Override; -import java.lang.String; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.compute.operator.Warnings; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.xpack.esql.core.InvalidArgumentException; -import org.elasticsearch.xpack.esql.core.tree.Source; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. - */ -public final class DateDiffEvaluator implements EvalOperator.ExpressionEvaluator { - private final Source source; - - private final EvalOperator.ExpressionEvaluator unit; - - private final EvalOperator.ExpressionEvaluator startTimestamp; - - private final EvalOperator.ExpressionEvaluator endTimestamp; - - private final DriverContext driverContext; - - private Warnings warnings; - - public DateDiffEvaluator(Source source, EvalOperator.ExpressionEvaluator unit, - EvalOperator.ExpressionEvaluator startTimestamp, - EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { - this.source = source; - this.unit = unit; - this.startTimestamp = startTimestamp; - this.endTimestamp = endTimestamp; - this.driverContext = driverContext; - } - - @Override - public Block eval(Page page) { - try (BytesRefBlock unitBlock = (BytesRefBlock) unit.eval(page)) { - try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { - try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { - BytesRefVector unitVector = unitBlock.asVector(); - if (unitVector == null) { - return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); - } - LongVector startTimestampVector = startTimestampBlock.asVector(); - if (startTimestampVector == null) { - return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); - } - LongVector endTimestampVector = endTimestampBlock.asVector(); - if (endTimestampVector == null) { - return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); - } - return eval(page.getPositionCount(), unitVector, startTimestampVector, endTimestampVector); - } - } - } - } - - public IntBlock eval(int positionCount, BytesRefBlock unitBlock, LongBlock startTimestampBlock, - LongBlock endTimestampBlock) { - try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { - BytesRef unitScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - if (unitBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (unitBlock.getValueCount(p) != 1) { - if (unitBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - if (startTimestampBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (startTimestampBlock.getValueCount(p) != 1) { - if (startTimestampBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - if (endTimestampBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (endTimestampBlock.getValueCount(p) != 1) { - if (endTimestampBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - try { - result.appendInt(DateDiff.process(unitBlock.getBytesRef(unitBlock.getFirstValueIndex(p), unitScratch), startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); - } catch (IllegalArgumentException | InvalidArgumentException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public IntBlock eval(int positionCount, BytesRefVector unitVector, - LongVector startTimestampVector, LongVector endTimestampVector) { - try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { - BytesRef unitScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendInt(DateDiff.process(unitVector.getBytesRef(p, unitScratch), startTimestampVector.getLong(p), endTimestampVector.getLong(p))); - } catch (IllegalArgumentException | InvalidArgumentException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - @Override - public String toString() { - return "DateDiffEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; - } - - @Override - public void close() { - Releasables.closeExpectNoException(unit, startTimestamp, endTimestamp); - } - - private Warnings warnings() { - if (warnings == null) { - this.warnings = Warnings.createWarnings( - driverContext.warningsMode(), - source.source().getLineNumber(), - source.source().getColumnNumber(), - source.text() - ); - } - return warnings; - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final EvalOperator.ExpressionEvaluator.Factory unit; - - private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; - - private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; - - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory unit, - EvalOperator.ExpressionEvaluator.Factory startTimestamp, - EvalOperator.ExpressionEvaluator.Factory endTimestamp) { - this.source = source; - this.unit = unit; - this.startTimestamp = startTimestamp; - this.endTimestamp = endTimestamp; - } - - @Override - public DateDiffEvaluator get(DriverContext context) { - return new DateDiffEvaluator(source, unit.get(context), startTimestamp.get(context), endTimestamp.get(context), context); - } - - @Override - public String toString() { - return "DateDiffEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java index a464d0c5cafc7..bf938f135b6fa 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java index 4586e2cb720fd..2e4f71d8636b4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java index 95a54c3a24ec5..1fb8e2c744cd3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java index a7694647aec54..f08424a09d1b4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java deleted file mode 100644 index 37e900245a877..0000000000000 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java +++ /dev/null @@ -1,137 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.scalar.date; - -import java.lang.IllegalArgumentException; -import java.lang.Override; -import java.lang.String; -import java.time.ZoneId; -import java.time.temporal.ChronoField; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.compute.operator.Warnings; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.xpack.esql.core.tree.Source; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. - * This class is generated. Do not edit it. - */ -public final class DateExtractConstantEvaluator implements EvalOperator.ExpressionEvaluator { - private final Source source; - - private final EvalOperator.ExpressionEvaluator value; - - private final ChronoField chronoField; - - private final ZoneId zone; - - private final DriverContext driverContext; - - private Warnings warnings; - - public DateExtractConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator value, - ChronoField chronoField, ZoneId zone, DriverContext driverContext) { - this.source = source; - this.value = value; - this.chronoField = chronoField; - this.zone = zone; - this.driverContext = driverContext; - } - - @Override - public Block eval(Page page) { - try (LongBlock valueBlock = (LongBlock) value.eval(page)) { - LongVector valueVector = valueBlock.asVector(); - if (valueVector == null) { - return eval(page.getPositionCount(), valueBlock); - } - return eval(page.getPositionCount(), valueVector).asBlock(); - } - } - - public LongBlock eval(int positionCount, LongBlock valueBlock) { - try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - if (valueBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (valueBlock.getValueCount(p) != 1) { - if (valueBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - result.appendLong(DateExtract.process(valueBlock.getLong(valueBlock.getFirstValueIndex(p)), this.chronoField, this.zone)); - } - return result.build(); - } - } - - public LongVector eval(int positionCount, LongVector valueVector) { - try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - result.appendLong(p, DateExtract.process(valueVector.getLong(p), this.chronoField, this.zone)); - } - return result.build(); - } - } - - @Override - public String toString() { - return "DateExtractConstantEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; - } - - @Override - public void close() { - Releasables.closeExpectNoException(value); - } - - private Warnings warnings() { - if (warnings == null) { - this.warnings = Warnings.createWarnings( - driverContext.warningsMode(), - source.source().getLineNumber(), - source.source().getColumnNumber(), - source.text() - ); - } - return warnings; - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final EvalOperator.ExpressionEvaluator.Factory value; - - private final ChronoField chronoField; - - private final ZoneId zone; - - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory value, - ChronoField chronoField, ZoneId zone) { - this.source = source; - this.value = value; - this.chronoField = chronoField; - this.zone = zone; - } - - @Override - public DateExtractConstantEvaluator get(DriverContext context) { - return new DateExtractConstantEvaluator(source, value.get(context), chronoField, zone, context); - } - - @Override - public String toString() { - return "DateExtractConstantEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantMillisEvaluator.java index 11da518a01ce1..fe283a95f3c2e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateExtractConstantMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantNanosEvaluator.java index bbd0a59c87ceb..35bca67388d78 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateExtractConstantNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java deleted file mode 100644 index 6d56fd1c0d6a2..0000000000000 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java +++ /dev/null @@ -1,169 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.scalar.date; - -import java.lang.IllegalArgumentException; -import java.lang.Override; -import java.lang.String; -import java.time.ZoneId; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.compute.operator.Warnings; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.xpack.esql.core.tree.Source; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. - * This class is generated. Do not edit it. - */ -public final class DateExtractEvaluator implements EvalOperator.ExpressionEvaluator { - private final Source source; - - private final EvalOperator.ExpressionEvaluator value; - - private final EvalOperator.ExpressionEvaluator chronoField; - - private final ZoneId zone; - - private final DriverContext driverContext; - - private Warnings warnings; - - public DateExtractEvaluator(Source source, EvalOperator.ExpressionEvaluator value, - EvalOperator.ExpressionEvaluator chronoField, ZoneId zone, DriverContext driverContext) { - this.source = source; - this.value = value; - this.chronoField = chronoField; - this.zone = zone; - this.driverContext = driverContext; - } - - @Override - public Block eval(Page page) { - try (LongBlock valueBlock = (LongBlock) value.eval(page)) { - try (BytesRefBlock chronoFieldBlock = (BytesRefBlock) chronoField.eval(page)) { - LongVector valueVector = valueBlock.asVector(); - if (valueVector == null) { - return eval(page.getPositionCount(), valueBlock, chronoFieldBlock); - } - BytesRefVector chronoFieldVector = chronoFieldBlock.asVector(); - if (chronoFieldVector == null) { - return eval(page.getPositionCount(), valueBlock, chronoFieldBlock); - } - return eval(page.getPositionCount(), valueVector, chronoFieldVector); - } - } - } - - public LongBlock eval(int positionCount, LongBlock valueBlock, BytesRefBlock chronoFieldBlock) { - try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { - BytesRef chronoFieldScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - if (valueBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (valueBlock.getValueCount(p) != 1) { - if (valueBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - if (chronoFieldBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (chronoFieldBlock.getValueCount(p) != 1) { - if (chronoFieldBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - try { - result.appendLong(DateExtract.process(valueBlock.getLong(valueBlock.getFirstValueIndex(p)), chronoFieldBlock.getBytesRef(chronoFieldBlock.getFirstValueIndex(p), chronoFieldScratch), this.zone)); - } catch (IllegalArgumentException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public LongBlock eval(int positionCount, LongVector valueVector, - BytesRefVector chronoFieldVector) { - try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { - BytesRef chronoFieldScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendLong(DateExtract.process(valueVector.getLong(p), chronoFieldVector.getBytesRef(p, chronoFieldScratch), this.zone)); - } catch (IllegalArgumentException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - @Override - public String toString() { - return "DateExtractEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; - } - - @Override - public void close() { - Releasables.closeExpectNoException(value, chronoField); - } - - private Warnings warnings() { - if (warnings == null) { - this.warnings = Warnings.createWarnings( - driverContext.warningsMode(), - source.source().getLineNumber(), - source.source().getColumnNumber(), - source.text() - ); - } - return warnings; - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final EvalOperator.ExpressionEvaluator.Factory value; - - private final EvalOperator.ExpressionEvaluator.Factory chronoField; - - private final ZoneId zone; - - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory value, - EvalOperator.ExpressionEvaluator.Factory chronoField, ZoneId zone) { - this.source = source; - this.value = value; - this.chronoField = chronoField; - this.zone = zone; - } - - @Override - public DateExtractEvaluator get(DriverContext context) { - return new DateExtractEvaluator(source, value.get(context), chronoField.get(context), zone, context); - } - - @Override - public String toString() { - return "DateExtractEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractMillisEvaluator.java index edc0b2cb0f0ce..dcb8a543f5c35 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractMillisEvaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateExtractMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractNanosEvaluator.java index 97a04f0d06a74..6b961447df830 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractNanosEvaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateExtractNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisConstantEvaluator.java index 2f41a7440bb06..77aa06913c565 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisConstantEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateFormat}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateFormatMillisConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisEvaluator.java index 29da191dbe781..428b932df3978 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisEvaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateFormat}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateFormatMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosConstantEvaluator.java index 1488833227dcb..e1a5dd3272900 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosConstantEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateFormat}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateFormatNanosConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosEvaluator.java index a94d522014813..8d27a1aaeede2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosEvaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateFormat}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateFormatNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java index 0ddc731827894..4f792b640f560 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateParse}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateParseConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java index 6c432855e38fb..752cc72971fe8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateParse}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateParseEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDateNanosEvaluator.java index 2d34fc613bc74..1b21c2fc872d5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDateNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDateNanosEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateTrunc}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateTruncDateNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDatetimeEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDatetimeEvaluator.java index b9e49dd7e795c..96a6c192f53cf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDatetimeEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDatetimeEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateTrunc}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateTruncDatetimeEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java index 3eca39f980347..b45856d6012ca 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java @@ -16,7 +16,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Now}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NowEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java index 0120af54299e3..f65c74af6be09 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link CIDRMatch}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CIDRMatchEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java index 53a367aff7cd6..fcc084a7bf240 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link IpPrefix}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class IpPrefixEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java index 69697d81b8bcd..8cda84da6e192 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Abs}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AbsDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java index 44007fcb9c6f4..eeb7f46d2224d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Abs}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AbsIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java index 2bb17f9f2512d..92946a8691ee9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Abs}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AbsLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java index 17d8c34a63731..8f5ab21f30b1a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Acos}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AcosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java index 267ba167572ae..0e0ac4b93d11f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Asin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AsinEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java index 2f1fdabc4097f..09d0b1a7fd35c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Atan2}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class Atan2Evaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java index 2e4d4c80e6a9a..0cd8adda340e5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Atan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AtanEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java index d99d6d918a215..e43daae68ed66 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastIntToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java index 585b0f392bee9..f1d850fa935f5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastIntToLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java index ff5ffcb6710cd..69e2a69a59027 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastIntToUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java index 9d6851cde0510..e82ed233839f1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastLongToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java index b72ea03cef25d..ba78e37cf5ee3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastLongToUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java index d7ab56113ebfc..458a74ad704cc 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastUnsignedLongToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtDoubleEvaluator.java index 66727d2ba0db7..923ab07f61ce0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtDoubleEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cbrt}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CbrtDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtIntEvaluator.java index dd811c2ef7c5d..63f41e08a65f1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtIntEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cbrt}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CbrtIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtLongEvaluator.java index d931106c65b0a..a94ea6dab1446 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cbrt}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CbrtLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java index 13e5878ec524a..a959e01f44a1f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cbrt}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CbrtUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java index 02617726145dc..32233fbc24d04 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Ceil}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CeilDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java index 8718eb606c209..5c854d91e9aa8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cos}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java index 409432a83da00..8baec8554bb9a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cosh}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CoshEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpDoubleEvaluator.java index 4d7362b8bc8ea..6b7fa6df9798e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpDoubleEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Exp}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ExpDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpIntEvaluator.java index 482d44b1087ac..9a46ca17081a0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpIntEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Exp}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ExpIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpLongEvaluator.java index bbd17fe57c184..38dcd454cfb38 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Exp}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ExpLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpUnsignedLongEvaluator.java index 9fd90dca32f20..57f4b53ce0ba1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpUnsignedLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Exp}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ExpUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java index df9e533b94309..41fd18c464367 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Floor}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class FloorDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java index 22094f7e623e6..fed6a8becea4b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Hypot}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class HypotEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java index 75a1f57520b39..ca6f38e573cdf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Log10}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class Log10DoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java index d702185235cce..bb0963750e4ac 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Log10}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class Log10IntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java index c13c477e4f689..88f518cbe2654 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Log10}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class Log10LongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java index 8c955f499c9cf..ab2ebdd4f2ec8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Log10}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class Log10UnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java index c6b4009e6a779..2cdbb295126d9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Log}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LogConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java index 583cc06ba7dba..50917b21add4d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Log}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LogEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java index d1c9f91463922..9bf553632a98e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Pow}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class PowEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java index 347549bb78cca..cebb54ee59cc5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java index bfabc34721c67..d81d10e1519b9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundDoubleNoDecimalsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java index 220dd7a547cc9..034bad3fdc1a3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java index a2b7b51d42e0f..b72465cd14b8a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java index 9cc233b8aff0c..70cc9986f2d9d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java index e80559397464f..9f73c895b6f42 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Signum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SignumDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java index 410c818fcf926..e6a270c3e344d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Signum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SignumIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java index b5406bab5ee39..35569ecd80476 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Signum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SignumLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java index 269ea507bfd05..677a8ec1fe6b5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Signum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SignumUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java index b4a4a1b1a2a41..51514290e8254 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SinEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java index ccce05ee8f7cf..99a6afb3b1843 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sinh}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SinhEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java index db8cb73222062..30fa92073cc29 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sqrt}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SqrtDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java index fc791b22aae8e..f2e8c3c14bc2b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sqrt}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SqrtIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java index 1e656dbfd7a3d..040ddae13ce5f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sqrt}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SqrtLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java index f5dc994c3ea83..21d026cb44c07 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sqrt}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SqrtUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java index 1dcc611410a60..978d202c7f3ce 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Tan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class TanEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java index 860a4f5b0e60d..a7b594d130ba4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Tanh}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class TanhEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBooleanEvaluator.java index d4fab518a0e5d..c126bd7bef196 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBooleanEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAppend}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvAppendBooleanEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBytesRefEvaluator.java index 959449310ce92..3afd3534b92f6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBytesRefEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAppend}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvAppendBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendDoubleEvaluator.java index d2fb9ce2926db..315150a20e354 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendDoubleEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAppend}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvAppendDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendIntEvaluator.java index dd5a491281c45..0291e8c07d9ff 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendIntEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAppend}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvAppendIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendLongEvaluator.java index 6f6050e7f245b..c23d036550fc8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendLongEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAppend}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvAppendLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java index d87444746d2c6..b0a99ab33320d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java @@ -15,7 +15,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAvg}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvAvgDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvAvgDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java index 63e6b4eb12106..abc2157d30d03 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java @@ -16,7 +16,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAvg}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvAvgIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvAvgIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java index d699070747b49..b323f92b3b02f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java @@ -16,7 +16,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAvg}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvAvgLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvAvgLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java index 5c63508fa3560..f12634f721c94 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java @@ -16,7 +16,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAvg}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvAvgUnsignedLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvAvgUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBooleanEvaluator.java index bf946aab347d2..72cc92114d9a5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBooleanEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvFirst}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvFirstBooleanEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvFirstBooleanEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBytesRefEvaluator.java index e0cb6ca4c289b..d0bcfda7a4209 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBytesRefEvaluator.java @@ -15,7 +15,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvFirst}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvFirstBytesRefEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvFirstBytesRefEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstDoubleEvaluator.java index 584319cc1ab82..d38ec51990ac0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstDoubleEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvFirst}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvFirstDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvFirstDoubleEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstIntEvaluator.java index 1e9c50d135559..7cb6e53326b7a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstIntEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvFirst}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvFirstIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvFirstIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstLongEvaluator.java index 7e118a1eb9eb8..3a34c55940248 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstLongEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvFirst}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvFirstLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvFirstLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBooleanEvaluator.java index 75b49f2c3e8ee..08022d6580ebf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBooleanEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvLast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvLastBooleanEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvLastBooleanEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBytesRefEvaluator.java index fcca356b38576..29c5e19aee827 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBytesRefEvaluator.java @@ -15,7 +15,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvLast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvLastBytesRefEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvLastBytesRefEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastDoubleEvaluator.java index b0cf7bf59900b..b4f5c8d147f03 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastDoubleEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvLast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvLastDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvLastDoubleEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastIntEvaluator.java index 5c2af9218308d..d43e11571102f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastIntEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvLast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvLastIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvLastIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastLongEvaluator.java index 37b95378f1f5b..897f7e513aebb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastLongEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvLast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvLastLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvLastLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java index 44b4432edbf6f..2b7decc4c5537 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMaxBooleanEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMaxBooleanEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java index 4e2180f2ec467..ed90337cb1947 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java @@ -15,7 +15,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMaxBytesRefEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMaxBytesRefEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java index 4a1be1673bb7c..9fcce8e6c6538 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMaxDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMaxDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java index b0ed499efd84f..5b03f65e27374 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMaxIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMaxIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java index 24397b64c9ccc..2c9e89a5d3c2a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMaxLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMaxLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationDoubleEvaluator.java index 7cefde819dedc..a94f92f203e9a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationDoubleEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMedianAbsoluteDeviation}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianAbsoluteDeviationDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMedianAbsoluteDeviationDoubleEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationIntEvaluator.java index 76013ca1115db..4fb12ff939a31 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationIntEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMedianAbsoluteDeviation}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianAbsoluteDeviationIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMedianAbsoluteDeviationIntEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationLongEvaluator.java index e7883d92708b7..92c87dd6df912 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationLongEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMedianAbsoluteDeviation}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianAbsoluteDeviationLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMedianAbsoluteDeviationLongEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationUnsignedLongEvaluator.java index ef8781e1dc048..657e98c0e4d01 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationUnsignedLongEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMedianAbsoluteDeviation}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianAbsoluteDeviationUnsignedLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMedianAbsoluteDeviationUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java index e3b539d8210aa..14ca0c301159f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMedian}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMedianDoubleEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java index d07dc41e1d04b..0f3aa297ae521 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMedian}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMedianIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java index f1cd87aefd3d0..1af845514baf9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMedian}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMedianLongEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java index 031280b767b41..edc68673d3f4c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMedian}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianUnsignedLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMedianUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java index ea8b04cc5c4a5..3a3f87a518f20 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMinBooleanEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMinBooleanEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java index 31d41ff61e196..a8258c86a3f42 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java @@ -15,7 +15,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMinBytesRefEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMinBytesRefEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java index 5390350751ee7..14656910b7c7b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMinDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMinDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java index 918b049780905..36c3682dc3c0a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMinIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMinIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java index 37a6709d46d4d..0bcfdf0036e52 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMinLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMinLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumDoubleEvaluator.java index 7ff79b0a0708b..11864b18a65d4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumDoubleEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvPSeriesWeightedSum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvPSeriesWeightedSumDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileDoubleEvaluator.java index 7f7fdd42a237b..014e9230ce4ed 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileDoubleEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvPercentile}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvPercentileDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileIntegerEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileIntegerEvaluator.java index ed55fe6f556a2..63f16bc6f7466 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileIntegerEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileIntegerEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvPercentile}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvPercentileIntegerEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileLongEvaluator.java index f57de7c35d824..c5d2232f52e22 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileLongEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvPercentile}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvPercentileLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBooleanEvaluator.java index ae04f0916c471..a0d8274a1dead 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBooleanEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSlice}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvSliceBooleanEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBytesRefEvaluator.java index a366b4ae765e1..84c97343c7b47 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBytesRefEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSlice}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvSliceBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceDoubleEvaluator.java index 1607bcf078706..f71f2ecf9fdb8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceDoubleEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSlice}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvSliceDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceIntEvaluator.java index 69bcc2f8998f4..fa4b6ffa2130b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceIntEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSlice}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvSliceIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceLongEvaluator.java index ad5a55a506214..7aa76eb53952e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceLongEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSlice}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvSliceLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java index b49a92404ecd1..b275415b2dd77 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java @@ -15,7 +15,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvSumDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvSumDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java index 8f27c4f472ba6..f22773bfb1540 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java @@ -16,7 +16,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvSumIntEvaluator extends AbstractMultivalueFunction.AbstractNullableEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java index 8e4d183279e76..ae009b97852cf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java @@ -16,7 +16,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvSumLongEvaluator extends AbstractMultivalueFunction.AbstractNullableEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java index e17b5934271d6..93566b531e06c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java @@ -16,7 +16,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvSumUnsignedLongEvaluator extends AbstractMultivalueFunction.AbstractNullableEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipEvaluator.java index 9f204abbe0b43..557ba45f02cbe 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvZip}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvZipEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java index 6d22335fb91e1..d99e7086ee895 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java index 593bfdec01325..956df2deb42f0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java index 79639505283ab..6375ba99122e0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java index 3d91a4323ba4c..27c1c608faf6f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java index 0a4c1e5c69bff..de384995136b8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java index 523546ffa5a38..373d4829f46ed 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java index 328565fd089c1..83a927517b0db 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java index da46839694c49..87e58c8320ea2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java index 5560d1d90b6aa..61302b49d2564 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java index 4f6197a3cde4b..c09daa0f3f8d3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java index 45b9a1f2251ef..ac8c63a11ebbe 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java index 0a18ad86cf8bb..77fae3ea04b23 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java index f379e6502e9f7..acba535905292 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java index 0411ca8e61dfb..a600b69f1ec34 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java index f24c8991e0ba4..c3f535e9b2dad 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java index ae9c3d2c4d323..b1d849749af60 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java index c9d1493c4e10b..f85521e790f93 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java index 330e06a00f481..3575f6a4130ce 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java index 6b5167d136cf5..02ad8b8e95c5f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java index c96e912b64924..8d277443653ae 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java index 9cdd7e345f8cc..861bc6a7d8f91 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java index 50497429eac41..5b428fec29a5b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java index 92d2bd55021b2..47b5e68d03bef 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java index 06033360dd6cf..fdf6c21d4a05e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java index 55e86b398a9a9..f342bbdcdd2ca 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java index 0d9e7d8c460aa..032e20003c788 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java index 011ba3df96dae..3df427c4e03f4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java index e07f5203a45ca..ebbef2086182b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java index 82285639768da..eacce4c73d714 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java index 33bd70b76ae99..06666a4cfe8b0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java index 9335e0d93e0ab..7fdba8dfecf1d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java index 665f4c94722d4..54488302c7487 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java index 4917b71464dce..f1ebad92bea1a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StDistance}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndSourceEvaluator.java index 03c8ba1a04ab6..3af1fa1f990b8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndSourceEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StDistance}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java index 1085f71e95b73..08d882b0b2cf2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StDistance}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndSourceEvaluator.java index c8554a3041c89..b29915ff22c7e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndSourceEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StDistance}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java index 8f37d3157fac6..fc042e0f31c13 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StDistance}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndSourceEvaluator.java index a7664987739e2..7e68261503800 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StDistance}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java index 06e44f996daf5..44e7b49ded915 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StDistance}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndSourceEvaluator.java index 31e20d9f42197..3bf5b1499ebe7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndSourceEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StDistance}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBEvaluator.java index 3d6dc7277080a..67530172987ca 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StEnvelope}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StEnvelopeFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StEnvelopeFromWKBEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBGeoEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBGeoEvaluator.java index c61e825c0ee71..63ac0b27bd46d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBGeoEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBGeoEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StEnvelope}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StEnvelopeFromWKBGeoEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StEnvelopeFromWKBGeoEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXFromWKBEvaluator.java index d396529f532ed..9fdffdb4a047c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXFromWKBEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StX}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StXFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StXFromWKBEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBEvaluator.java index 0d51ef709c217..3e5e70648704e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StXMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StXMaxFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StXMaxFromWKBEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBGeoEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBGeoEvaluator.java index 3707bf421d550..bf0d72af3e254 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBGeoEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBGeoEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StXMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StXMaxFromWKBGeoEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StXMaxFromWKBGeoEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBEvaluator.java index 699402ad68dee..c1126b4826056 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StXMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StXMinFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StXMinFromWKBEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBGeoEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBGeoEvaluator.java index 6a8c041595c1c..feb4610134cea 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBGeoEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBGeoEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StXMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StXMinFromWKBGeoEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StXMinFromWKBGeoEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYFromWKBEvaluator.java index 4e6e3a2ccd75a..765f2d20f0862 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYFromWKBEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StY}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StYFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StYFromWKBEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBEvaluator.java index e8b50099f38f6..6953433fa83bb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StYMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StYMaxFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StYMaxFromWKBEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBGeoEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBGeoEvaluator.java index 00e75f862a86c..9163932dd5a33 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBGeoEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBGeoEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StYMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StYMaxFromWKBGeoEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StYMaxFromWKBGeoEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBEvaluator.java index cab66683261aa..94cd8169a8dc6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StYMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StYMinFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StYMinFromWKBEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBGeoEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBGeoEvaluator.java index 8bae9d369fbb4..751fc84930dfe 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBGeoEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBGeoEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StYMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StYMinFromWKBGeoEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StYMinFromWKBGeoEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java index 1352e5650bbfe..acf9613b29e7a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link AutomataMatch}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AutomataMatchEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthEvaluator.java index 6564a2f3ef167..b7fc1d5f84ad4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link BitLength}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class BitLengthEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ByteLengthEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ByteLengthEvaluator.java index 1b0bff92d7d04..93c264a2e5b87 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ByteLengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ByteLengthEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ByteLength}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ByteLengthEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ChangeCaseEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ChangeCaseEvaluator.java index 02d1b1c86ea32..735bddc9918dd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ChangeCaseEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ChangeCaseEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ChangeCase}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ChangeCaseEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java index e0cff017c14fd..9d22936ba7d02 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Concat}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ConcatEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java index d3d6e02bd9d73..f9b96479e87a4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link EndsWith}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EndsWithEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashConstantEvaluator.java index 34cff73018634..6eac2084410c8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashConstantEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Hash}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class HashConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashEvaluator.java index 8b01cc0330142..aee3d333fd517 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashEvaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Hash}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class HashEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java index 7925787425d6e..ce46bad9159f5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LTrim}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LTrimEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java index 9adcfbbdd9f39..443d41a7c9ff2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Left}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeftEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java index 799f422414060..cbfcc4f81221c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Length}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LengthEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java index 3ca7db9e5685e..afbd759f4bbb8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Locate}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LocateEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java index 378252a4bbea9..5c015d2a04f6f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Locate}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LocateNoStartEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java index 6fdeeddd3ab94..9b4a5a4165b42 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RTrim}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RTrimEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java index e679842ed47a1..c0799f7187322 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Repeat}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RepeatConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java index 58e0aeb6af318..169df1f7faaae 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Repeat}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RepeatEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java index 02a495285e6c9..a5aa37a0db56e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Replace}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ReplaceConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java index 8c58a76cc481e..7a7a947453d0a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Replace}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ReplaceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseEvaluator.java index 408c16a9c4f7e..5dbcc7c38d90b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Reverse}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ReverseEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java index a296096a13fb3..18c4087b23cb2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Right}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RightEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SpaceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SpaceEvaluator.java index 14228522d9419..80167de794eec 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SpaceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SpaceEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Space}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpaceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java index 8b80b4ec06189..512222880b630 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Split}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SplitSingleByteEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java index 54a1d6863cd84..d5bc8e7dcfd6d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Split}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SplitVariableEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java index b175ea0b0d17e..347bd8c0747f6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StartsWith}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StartsWithEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java index 2ceb2230fb8f0..4a754daae9453 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Substring}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubstringEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java index 31268b4557a62..f9d168e21548c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Substring}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubstringNoLengthEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java index 44215c06c9068..b756fd69302e5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Trim}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class TrimEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDateNanosEvaluator.java index fe80536ea5d0d..a484dd87d0829 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDateNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDateNanosEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Add}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddDateNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java index 3cd91fdeb7ea2..2493924276af1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Add}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddDatetimesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java index 61ef4215b3e29..cd052b38c7791 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Add}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java index 3616711127ce6..680b70089b105 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Add}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java index fd046049980fe..b309702ccae6a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Add}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java index d89a0c83acd80..9267d93104541 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Add}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddUnsignedLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java index fb2fc81b25230..e8d2d440dc475 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Div}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DivDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java index fc3e587596881..c86acaaf8c05b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Div}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DivIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java index 49f15a6c90a4e..40c0df781f9ad 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Div}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DivLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java index 2307b4984f491..84547b719e3cb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Div}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DivUnsignedLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java index b9a3e1419a124..1d48d6ba595b8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mod}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ModDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java index 38aaab26eccda..ed5fec274e62c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mod}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ModIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java index ec6cdaa93b1b0..f061968a07167 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mod}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ModLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java index e85291598ba53..b22af43813552 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mod}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ModUnsignedLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java index 65ea4fe3e0a1d..7e11f0828b5af 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mul}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MulDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java index cfc30966deae3..dbb4157798e4e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mul}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MulIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java index 8e5a12b9ea1be..00433c86570c4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mul}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MulLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java index b4babd6b93176..d3b0b3c6d54be 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mul}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MulUnsignedLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java index 8f573c9f431b5..ad0bbfd4f9bb7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Neg}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NegDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java index 7da1c10802933..bd186e723a86f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Neg}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NegIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java index d259edb076e40..2f2ef86cbaa3c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Neg}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NegLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDateNanosEvaluator.java index 3b6f4c1046d40..7418d000281f1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDateNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDateNanosEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sub}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubDateNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java index 0ad395b4e5753..11a3a97416ef9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sub}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubDatetimesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java index 30e44ed5b72ed..21776d21cadea 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sub}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java index 83680e58640f4..8a2f431908406 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sub}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java index fbead2000b585..5ff2efe3f6683 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sub}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java index 6acb5deb06225..856399ba0e4af 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sub}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubUnsignedLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java index 9403efd709083..70a3f0bd70aab 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsBoolsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java index 9b3daa7317677..80e20c27d5485 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java index 73c0ab28f154b..16f83442f79d5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsGeometriesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java index 93a776f558e90..b78f45d7fbd1e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java index 578ec20bfd183..e65359e8ee156 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java index 606e8d0a39efd..c8bde6b05afd2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsMillisNanosEvaluator.java index b5013c4080507..91cd4614cc9d5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsMillisNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsNanosMillisEvaluator.java index 3ed1e922608e6..cdcc4c931fb19 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsNanosMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java index 0f24db5826999..e3f1649048c9d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java index f7d0e46efd5d3..5014310820b06 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java index 970f42f80bdf2..d99a2ff6bda70 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java index 9a5c2b03b2b98..7d202da760601 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanMillisNanosEvaluator.java index bdd877c7f866e..c8c337f2af085 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanMillisNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanNanosMillisEvaluator.java index d509547eb17ce..54683dd07523a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanNanosMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java index 149e5c62a6975..10ec48cbbb8dd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java index dfcf205342938..9ae1c86788d86 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java index fb1d92c45a75a..e4fba4970409b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java index b64c8093e5be5..085e41ec5fc20 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualMillisNanosEvaluator.java index 7a0da0a55d0dc..ffb411ca82d42 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualMillisNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualNanosMillisEvaluator.java index d4386a64aaf8a..1419308f4ba4a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualNanosMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java index eedaf97545380..1199a9d5ab7d1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link InsensitiveEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class InsensitiveEqualsConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java index 432c70dd1ae55..8c7abcfe891d5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link InsensitiveEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class InsensitiveEqualsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java index cc6661a3f02c7..7f0b7e8f66b66 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java index 93be674d49725..cd0997a513c85 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java index 69d73a7f134a1..e88a9ae30d00b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java index 86bb587e1af46..5377441b5e8b5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanMillisNanosEvaluator.java index 21d7d50af5b1e..dd63a3c364cd3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanMillisNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanNanosMillisEvaluator.java index 48593f9d537f3..317a861b16ded 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanNanosMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java index 07d1dad29fcad..bcfe416941b33 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java index 819878ff7c0ef..83a8b4abc1c4d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java index bfc192443b402..13e6b336286aa 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java index 7ebf8695839f6..0e01abc93ce82 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualMillisNanosEvaluator.java index 06973e71e834a..38d84fbd7a6d4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualMillisNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualNanosMillisEvaluator.java index 4763629873d02..ee46cb74e10ca 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualNanosMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java index ceb67a59021fa..1d96506ea34cf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsBoolsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java index 0f0a60fc03f23..11e3c7d1021ac 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java index a745eaffaf27d..d871a0cad6879 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsGeometriesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java index a3c803169b98e..15103562050cb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java index 9e28b6d1dfe4b..5eb75c0bcf604 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java index 8114c6cb934af..0ba697142944f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsMillisNanosEvaluator.java index 9bede03737a5f..8716cb3fee431 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsMillisNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsNanosMillisEvaluator.java index e8e28eec7ee27..15fd009e7046e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsNanosMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index e8c5edc1c8b58..9d550ad328044 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -20,6 +20,8 @@ import java.util.Locale; import java.util.Set; +import static org.elasticsearch.xpack.esql.core.plugin.EsqlCorePlugin.AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG; + /** * A {@link Set} of "capabilities" supported by the {@link RestEsqlQueryAction} * and {@link RestEsqlAsyncQueryAction} APIs. These are exposed over the @@ -119,12 +121,17 @@ public enum Cap { * Cast string literals to a desired data type for IN predicate and more types for BinaryComparison. */ STRING_LITERAL_AUTO_CASTING_EXTENDED, - /** * Support for metadata fields. */ METADATA_FIELDS, + /** + * Support specifically for *just* the _index METADATA field. Used by CsvTests, since that is the only metadata field currently + * supported. + */ + INDEX_METADATA_FIELD, + /** * Support for timespan units abbreviations */ @@ -668,7 +675,7 @@ public enum Cap { /** * Support simplified syntax for named parameters for field and function names. */ - NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX(Build.current().isSnapshot()), + NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX(), /** * Fix pushdown of LIMIT past MV_EXPAND @@ -694,22 +701,22 @@ public enum Cap { /** * LOOKUP JOIN */ - JOIN_LOOKUP_V12(Build.current().isSnapshot()), + JOIN_LOOKUP_V12, /** * LOOKUP JOIN with TEXT fields on the right (right side of the join) (#119473) */ - LOOKUP_JOIN_TEXT(Build.current().isSnapshot()), + LOOKUP_JOIN_TEXT(JOIN_LOOKUP_V12.isEnabled()), /** - * LOOKUP JOIN without MV matching (https://github.com/elastic/elasticsearch/issues/118780) + * LOOKUP JOIN skipping MVs and sending warnings (https://github.com/elastic/elasticsearch/issues/118780) */ - JOIN_LOOKUP_SKIP_MV(JOIN_LOOKUP_V12.isEnabled()), + JOIN_LOOKUP_SKIP_MV_WARNINGS(JOIN_LOOKUP_V12.isEnabled()), /** - * LOOKUP JOIN without MV matching on lookup index key (https://github.com/elastic/elasticsearch/issues/118780) + * Fix pushing down LIMIT past LOOKUP JOIN in case of multiple matching join keys. */ - JOIN_LOOKUP_SKIP_MV_ON_LOOKUP_KEY(JOIN_LOOKUP_V12.isEnabled()), + JOIN_LOOKUP_FIX_LIMIT_PUSHDOWN(JOIN_LOOKUP_V12.isEnabled()), /** * Fix for https://github.com/elastic/elasticsearch/issues/117054 @@ -774,7 +781,35 @@ public enum Cap { /** * Support match options in match function */ - MATCH_FUNCTION_OPTIONS; + MATCH_FUNCTION_OPTIONS, + + /** + * Support for aggregate_metric_double type + */ + AGGREGATE_METRIC_DOUBLE(AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG), + + /** + * Support for partial subset of metrics in aggregate_metric_double type + */ + AGGREGATE_METRIC_DOUBLE_PARTIAL_SUBMETRICS(AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG), + + /** + * Support change point detection "CHANGE_POINT". + */ + CHANGE_POINT(Build.current().isSnapshot()), + + /** + * Fix for https://github.com/elastic/elasticsearch/issues/120817 + * and https://github.com/elastic/elasticsearch/issues/120803 + * Support for queries that have multiple SORTs that cannot become TopN + */ + REMOVE_REDUNDANT_SORT, + + /** + * Fixes a series of issues with inlinestats which had an incomplete implementation after lookup and inlinestats + * were refactored. + */ + INLINESTATS_V3(EsqlPlugin.INLINESTATS_FEATURE_FLAG); private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java index 0def56c70dc35..a065d0bd5e3a7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java @@ -148,7 +148,8 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa return builder.value(versionToString(val)); } }; - case NULL -> new PositionToXContent(block) { + // TODO: Add implementation for aggregate_metric_double + case NULL, AGGREGATE_METRIC_DOUBLE -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java index d8904288523a7..e77d7b41aaca6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java @@ -171,8 +171,7 @@ private static QueryParams parseParams(XContentParser p) throws IOException { String paramName = entry.getKey(); checkParamNameValidity(paramName, errors, loc); - if (EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - && entry.getValue() instanceof Map value) {// parameter specified as a key:value pair + if (entry.getValue() instanceof Map value) {// parameter specified as a key:value pair checkParamValueSize(paramName, value, loc, errors); for (Object keyName : value.keySet()) { classification = getParamClassification(keyName.toString(), errors, loc); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java index 49fcc167dce0f..710a66fb1d9f4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java @@ -132,7 +132,7 @@ private static Object valueAt(DataType dataType, Block block, int offset, BytesR case GEO_POINT, GEO_SHAPE, CARTESIAN_POINT, CARTESIAN_SHAPE -> spatialToString( ((BytesRefBlock) block).getBytesRef(offset, scratch) ); - case UNSUPPORTED -> (String) null; + case UNSUPPORTED, AGGREGATE_METRIC_DOUBLE -> (String) null; case SOURCE -> { BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); try { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 4f5ff35b84054..1351b5ce51f44 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -92,7 +92,7 @@ import org.elasticsearch.xpack.esql.rule.Rule; import org.elasticsearch.xpack.esql.rule.RuleExecutor; import org.elasticsearch.xpack.esql.session.Configuration; -import org.elasticsearch.xpack.esql.stats.FeatureMetric; +import org.elasticsearch.xpack.esql.telemetry.FeatureMetric; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import java.time.Duration; @@ -133,7 +133,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION; import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.elasticsearch.xpack.esql.core.type.DataType.isTemporalAmount; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.LIMIT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.maybeParseTemporalAmount; /** @@ -220,7 +220,7 @@ private LogicalPlan resolveIndex(UnresolvedRelation plan, IndexResolution indexR plan.metadataFields(), plan.indexMode(), indexResolutionMessage, - plan.commandName() + plan.telemetryLabel() ); } IndexPattern table = plan.indexPattern(); @@ -233,7 +233,7 @@ private LogicalPlan resolveIndex(UnresolvedRelation plan, IndexResolution indexR plan.metadataFields(), plan.indexMode(), "invalid [" + table + "] resolution to [" + indexResolution + "]", - plan.commandName() + plan.telemetryLabel() ); } @@ -549,8 +549,7 @@ private LogicalPlan resolveMvExpand(MvExpand p, List childrenOutput) resolved, resolved.resolved() ? new ReferenceAttribute(resolved.source(), resolved.name(), resolved.dataType(), resolved.nullable(), null, false) - : resolved, - p.limit() + : resolved ); } return p; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index b59a112b1adb6..c2663650685eb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -32,8 +32,8 @@ import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.Project; -import org.elasticsearch.xpack.esql.stats.FeatureMetric; -import org.elasticsearch.xpack.esql.stats.Metrics; +import org.elasticsearch.xpack.esql.telemetry.FeatureMetric; +import org.elasticsearch.xpack.esql.telemetry.Metrics; import java.util.ArrayList; import java.util.BitSet; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TelemetryAware.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TelemetryAware.java new file mode 100644 index 0000000000000..9116c18b7a9bc --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TelemetryAware.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.capabilities; + +import java.util.Locale; + +/** + * Interface for plan nodes that need to be accounted in the statistics + */ +public interface TelemetryAware { + + /** + * @return the label reported in the telemetry data. Only needs to be overwritten if the label doesn't match the class name. + */ + default String telemetryLabel() { + return getClass().getSimpleName().toUpperCase(Locale.ROOT); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java index cb2582db2ad33..4542fc196a192 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -177,7 +176,13 @@ public ThreadContext getThreadContext() { /** * Build a list of queries to perform inside the actual lookup. */ - protected abstract QueryList queryList(T request, SearchExecutionContext context, Block inputBlock, DataType inputDataType); + protected abstract QueryList queryList( + T request, + SearchExecutionContext context, + Block inputBlock, + DataType inputDataType, + Warnings warnings + ); /** * Build the response. @@ -207,7 +212,7 @@ protected static QueryList termQueryList( */ public final void lookupAsync(R request, CancellableTask parentTask, ActionListener> outListener) { ClusterState clusterState = clusterService.state(); - GroupShardsIterator shardIterators = clusterService.operationRouting() + List shardIterators = clusterService.operationRouting() .searchShards(clusterState, new String[] { request.index }, Map.of(), "_local"); if (shardIterators.size() != 1) { outListener.onFailure(new EsqlIllegalArgumentException("target index {} has more than one shard", request.index)); @@ -297,13 +302,13 @@ private void doLookup(T request, CancellableTask task, ActionListener } } releasables.add(finishPages); - QueryList queryList = queryList(request, shardContext.executionContext, inputBlock, request.inputDataType); var warnings = Warnings.createWarnings( DriverContext.WarningsMode.COLLECT, request.source.source().getLineNumber(), request.source.source().getColumnNumber(), request.source.text() ); + QueryList queryList = queryList(request, shardContext.executionContext, inputBlock, request.inputDataType, warnings); var queryOperator = new EnrichQuerySourceOperator( driverContext.blockFactory(), EnrichQuerySourceOperator.DEFAULT_MAX_PAGE_SIZE, @@ -327,6 +332,7 @@ private void doLookup(T request, CancellableTask task, ActionListener releasables.add(outputOperator); Driver driver = new Driver( "enrich-lookup:" + request.sessionId, + "enrich", System.currentTimeMillis(), System.nanoTime(), driverContext, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java index 8083d67e5a19d..8ebd67ffd5e9f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java @@ -16,7 +16,6 @@ import org.elasticsearch.compute.operator.AsyncOperator; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; -import org.elasticsearch.compute.operator.ResponseHeadersCollector; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xcontent.XContentBuilder; @@ -38,7 +37,6 @@ public final class EnrichLookupOperator extends AsyncOperator { private final String matchType; private final String matchField; private final List enrichFields; - private final ResponseHeadersCollector responseHeadersCollector; private final Source source; private long totalTerms = 0L; @@ -101,7 +99,7 @@ public EnrichLookupOperator( List enrichFields, Source source ) { - super(driverContext, maxOutstandingRequests); + super(driverContext, enrichLookupService.getThreadContext(), maxOutstandingRequests); this.sessionId = sessionId; this.parentTask = parentTask; this.inputChannel = inputChannel; @@ -112,7 +110,6 @@ public EnrichLookupOperator( this.matchField = matchField; this.enrichFields = enrichFields; this.source = source; - this.responseHeadersCollector = new ResponseHeadersCollector(enrichLookupService.getThreadContext()); } @Override @@ -135,11 +132,7 @@ protected void performAsync(Page inputPage, ActionListener listener) { } return inputPage.appendPage(pages.getFirst()); }; - enrichLookupService.lookupAsync( - request, - parentTask, - ActionListener.runBefore(listener.map(handleResponse), responseHeadersCollector::collect) - ); + enrichLookupService.lookupAsync(request, parentTask, listener.map(handleResponse)); } @Override @@ -171,12 +164,11 @@ public String toString() { protected void doClose() { // TODO: Maybe create a sub-task as the parent task of all the lookup tasks // then cancel it when this operator terminates early (e.g., have enough result). - responseHeadersCollector.finish(); } @Override - protected Operator.Status status(long receivedPages, long completedPages, long totalTimeInMillis) { - return new EnrichLookupOperator.Status(receivedPages, completedPages, totalTimeInMillis, totalTerms); + protected Operator.Status status(long receivedPages, long completedPages, long processNanos) { + return new EnrichLookupOperator.Status(receivedPages, completedPages, processNanos, totalTerms); } public static class Status extends AsyncOperator.Status { @@ -188,8 +180,8 @@ public static class Status extends AsyncOperator.Status { final long totalTerms; - Status(long receivedPages, long completedPages, long totalTimeInMillis, long totalTerms) { - super(receivedPages, completedPages, totalTimeInMillis); + Status(long receivedPages, long completedPages, long processNanos, long totalTerms) { + super(receivedPages, completedPages, processNanos); this.totalTerms = totalTerms; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 480b69ecd8e60..1dc18c090c1dd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -22,6 +22,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.compute.operator.lookup.QueryList; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.MappedFieldType; @@ -98,7 +99,13 @@ protected TransportRequest transportRequest(EnrichLookupService.Request request, } @Override - protected QueryList queryList(TransportRequest request, SearchExecutionContext context, Block inputBlock, DataType inputDataType) { + protected QueryList queryList( + TransportRequest request, + SearchExecutionContext context, + Block inputBlock, + DataType inputDataType, + Warnings warnings + ) { MappedFieldType fieldType = context.getFieldType(request.matchField); validateTypes(inputDataType, fieldType); return switch (request.matchType) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java index c8e993b7dbf0b..cd571ebb676ac 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java @@ -434,8 +434,8 @@ public void messageReceived(LookupRequest request, TransportChannel channel, Tas } protected Map availablePolicies() { - final EnrichMetadata metadata = clusterService.state().metadata().custom(EnrichMetadata.TYPE); - return metadata == null ? Map.of() : metadata.getPolicies(); + final EnrichMetadata metadata = clusterService.state().metadata().custom(EnrichMetadata.TYPE, EnrichMetadata.EMPTY); + return metadata.getPolicies(); } protected void getRemoteConnection(String cluster, ActionListener listener) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java index e966b1346e28a..1f1361876d645 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java @@ -113,7 +113,7 @@ public LookupFromIndexOperator( List loadFields, Source source ) { - super(driverContext, maxOutstandingRequests); + super(driverContext, lookupService.getThreadContext(), maxOutstandingRequests); this.sessionId = sessionId; this.parentTask = parentTask; this.inputChannel = inputChannel; @@ -217,8 +217,8 @@ protected void doClose() { } @Override - protected Operator.Status status(long receivedPages, long completedPages, long totalTimeInMillis) { - return new LookupFromIndexOperator.Status(receivedPages, completedPages, totalTimeInMillis, totalTerms, emittedPages); + protected Operator.Status status(long receivedPages, long completedPages, long processNanos) { + return new LookupFromIndexOperator.Status(receivedPages, completedPages, processNanos, totalTerms, emittedPages); } public static class Status extends AsyncOperator.Status { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java index 131d8ddfa5ccd..62d9733a04581 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java @@ -17,6 +17,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.compute.operator.lookup.QueryList; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.query.SearchExecutionContext; @@ -34,6 +35,8 @@ import java.util.List; import java.util.Objects; +import static java.lang.System.in; + /** * {@link LookupFromIndexService} performs lookup against a Lookup index for * a given input page. See {@link AbstractLookupService} for how it works @@ -76,8 +79,17 @@ protected TransportRequest transportRequest(LookupFromIndexService.Request reque } @Override - protected QueryList queryList(TransportRequest request, SearchExecutionContext context, Block inputBlock, DataType inputDataType) { - return termQueryList(context.getFieldType(request.matchField), context, inputBlock, inputDataType).onlySingleValues(); + protected QueryList queryList( + TransportRequest request, + SearchExecutionContext context, + Block inputBlock, + DataType inputDataType, + Warnings warnings + ) { + return termQueryList(context.getFieldType(request.matchField), context, inputBlock, inputDataType).onlySingleValues( + warnings, + "LOOKUP JOIN encountered multi-value" + ); } @Override @@ -140,6 +152,12 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { source = Source.readFrom(planIn); } + // Source.readFrom() requires the query from the Configuration passed to PlanStreamInput. + // As we don't have the Configuration here, and it may be heavy to serialize, we directly pass the Source text. + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_LOOKUP_JOIN_SOURCE_TEXT)) { + String sourceText = in.readString(); + source = new Source(source.source(), sourceText); + } TransportRequest result = new TransportRequest( sessionId, shardId, @@ -167,6 +185,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { source.writeTo(planOut); } + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_LOOKUP_JOIN_SOURCE_TEXT)) { + out.writeString(source.text()); + } } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java index 94913581f696d..611516fc55342 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java @@ -21,15 +21,15 @@ import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; import org.elasticsearch.xpack.esql.planner.mapper.Mapper; +import org.elasticsearch.xpack.esql.plugin.TransportActionServices; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.session.EsqlSession; import org.elasticsearch.xpack.esql.session.IndexResolver; -import org.elasticsearch.xpack.esql.session.QueryBuilderResolver; import org.elasticsearch.xpack.esql.session.Result; -import org.elasticsearch.xpack.esql.stats.Metrics; -import org.elasticsearch.xpack.esql.stats.PlanningMetrics; -import org.elasticsearch.xpack.esql.stats.PlanningMetricsManager; -import org.elasticsearch.xpack.esql.stats.QueryMetric; +import org.elasticsearch.xpack.esql.telemetry.Metrics; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetryManager; +import org.elasticsearch.xpack.esql.telemetry.QueryMetric; import static org.elasticsearch.action.ActionListener.wrap; @@ -41,7 +41,7 @@ public class PlanExecutor { private final Mapper mapper; private final Metrics metrics; private final Verifier verifier; - private final PlanningMetricsManager planningMetricsManager; + private final PlanTelemetryManager planTelemetryManager; public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry, XPackLicenseState licenseState) { this.indexResolver = indexResolver; @@ -50,7 +50,7 @@ public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry, XP this.mapper = new Mapper(); this.metrics = new Metrics(functionRegistry); this.verifier = new Verifier(metrics, licenseState); - this.planningMetricsManager = new PlanningMetricsManager(meterRegistry); + this.planTelemetryManager = new PlanTelemetryManager(meterRegistry); } public void esql( @@ -62,10 +62,10 @@ public void esql( EsqlExecutionInfo executionInfo, IndicesExpressionGrouper indicesExpressionGrouper, EsqlSession.PlanRunner planRunner, - QueryBuilderResolver queryBuilderResolver, + TransportActionServices services, ActionListener listener ) { - final PlanningMetrics planningMetrics = new PlanningMetrics(); + final PlanTelemetry planTelemetry = new PlanTelemetry(functionRegistry); final var session = new EsqlSession( sessionId, cfg, @@ -76,20 +76,20 @@ public void esql( new LogicalPlanOptimizer(new LogicalOptimizerContext(cfg, foldContext)), mapper, verifier, - planningMetrics, + planTelemetry, indicesExpressionGrouper, - queryBuilderResolver + services ); QueryMetric clientId = QueryMetric.fromString("rest"); metrics.total(clientId); ActionListener executeListener = wrap(x -> { - planningMetricsManager.publish(planningMetrics, true); + planTelemetryManager.publish(planTelemetry, true); listener.onResponse(x); }, ex -> { // TODO when we decide if we will differentiate Kibana from REST, this String value will likely come from the request metrics.failed(clientId); - planningMetricsManager.publish(planningMetrics, false); + planTelemetryManager.publish(planTelemetry, false); listener.onFailure(ex); }); // Wrap it in a listener so that if we have any exceptions during execution, the listener picks it up diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index d1622daaa5e33..0535beab3e780 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -224,6 +224,7 @@ public class EsqlFunctionRegistry { // it has with the alias name associated to the FunctionDefinition instance private final Map defs = new LinkedHashMap<>(); private final Map aliases = new HashMap<>(); + private final Map, String> names = new HashMap<>(); private SnapshotFunctionRegistry snapshotRegistry = null; @@ -258,6 +259,12 @@ public boolean functionExists(String functionName) { return defs.containsKey(functionName); } + public String functionName(Class clazz) { + String name = names.get(clazz); + Check.notNull(name, "Cannot find function by class {}", clazz); + return name; + } + public Collection listFunctions() { // It is worth double checking if we need this copy. These are immutable anyway. return defs.values(); @@ -576,7 +583,7 @@ public record FunctionDescription( String[] returnType, String description, boolean variadic, - boolean isAggregation + FunctionType type ) { /** * The name of every argument. @@ -619,7 +626,7 @@ private static DataType getTargetType(String[] names) { public static FunctionDescription description(FunctionDefinition def) { Constructor constructor = constructorFor(def.clazz()); if (constructor == null) { - return new FunctionDescription(def.name(), List.of(), null, null, false, false); + return new FunctionDescription(def.name(), List.of(), null, null, false, FunctionType.SCALAR); } FunctionInfo functionInfo = functionInfo(def); String functionDescription = functionInfo == null ? "" : functionInfo.description().replace('\n', ' '); @@ -628,7 +635,6 @@ public static FunctionDescription description(FunctionDefinition def) { List args = new ArrayList<>(params.length); boolean variadic = false; - boolean isAggregation = functionInfo != null && functionInfo.isAggregation(); for (int i = 1; i < params.length; i++) { // skipping 1st argument, the source if (Configuration.class.isAssignableFrom(params[i].getType()) == false) { variadic |= List.class.isAssignableFrom(params[i].getType()); @@ -641,7 +647,7 @@ public static FunctionDescription description(FunctionDefinition def) { } } } - return new FunctionDescription(def.name(), args, returnType, functionDescription, variadic, isAggregation); + return new FunctionDescription(def.name(), args, returnType, functionDescription, variadic, functionInfo.type()); } public static ArgSignature param(Param param) { @@ -758,6 +764,14 @@ void register(FunctionDefinition... functions) { } aliases.put(alias, f.name()); } + Check.isTrue( + names.containsKey(f.clazz()) == false, + "function type [{}} is registered twice with names [{}] and [{}]", + f.clazz(), + names.get(f.clazz()), + f.name() + ); + names.put(f.clazz(), f.name()); } // sort the temporary map by key name and add it to the global map of functions defs.putAll( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionDoc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionDoc.java deleted file mode 100644 index f6514c2a44ecd..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionDoc.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.function; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -@Retention(RetentionPolicy.RUNTIME) -@Target({ ElementType.TYPE }) -public @interface FunctionDoc { - enum FunctionType { - AGGREGATE, - DATE_TIME, - MATH, - STRING, - } - - FunctionType type(); - - String description(); - - String synopsis(); - - String[] arguments() default {}; - - String output(); - - String examples(); - -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java index f3cdd324769e5..5c5c2f26279a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java @@ -66,9 +66,9 @@ String appendix() default ""; /** - * Is this an aggregation (true) or a scalar function (false). + * The position the function can appear in the language. */ - boolean isAggregation() default false; + FunctionType type() default FunctionType.SCALAR; /** * Examples of using this function that are rendered in the docs. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionType.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionType.java new file mode 100644 index 0000000000000..876ac48eacaf4 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionType.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function; + +/** + * The position the function can appear in the language. + */ +public enum FunctionType { + /** + * Functions that can appear anywhere. For example, {@code LENGTH} in + * {@code | STATS MAX(LENGTH(string))} and {@code | EVAL l = LENGTH(string)}. + */ + SCALAR, + /** + * Functions that can only appear in the "aggregate" position of a {@code STATS}. + * For example, {@code MAX} in {@code | STATS MAX(LENGTH(string))}. + */ + AGGREGATE, + /** + * Functions that can only appear in the "grouping" position of a {@code STATS}. + * For example, {@code CATEGORIZE} in {@code | STATS MAX(a) BY CATEGORIZE(message)}. + */ + GROUPING, +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java index 82c0f9d24899e..41feee0e63661 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java @@ -17,6 +17,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAvg; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; @@ -34,7 +35,7 @@ public class Avg extends AggregateFunction implements SurrogateExpression { @FunctionInfo( returnType = "double", description = "The average of a numeric field.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats", tag = "avg"), @Example( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java index 3a0d616d407a3..1d6a88ddcec3c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.CountAggregatorFunction; +import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.Nullability; @@ -21,7 +22,9 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromAggregateMetricDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvCount; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; @@ -40,7 +43,7 @@ public class Count extends AggregateFunction implements ToAggregator, SurrogateE @FunctionInfo( returnType = "long", description = "Returns the total number (count) of input values.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats", tag = "count"), @Example(description = "To count the number of rows, use `COUNT()` or `COUNT(*)`", file = "docs", tag = "countAll"), @@ -71,6 +74,7 @@ public Count( optional = true, name = "field", type = { + "aggregate_metric_double", "boolean", "cartesian_point", "date", @@ -123,8 +127,8 @@ public DataType dataType() { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { - return CountAggregatorFunction.supplier(inputChannels); + public AggregatorFunctionSupplier supplier() { + return CountAggregatorFunction.supplier(); } @Override @@ -141,6 +145,9 @@ protected TypeResolution resolveType() { public Expression surrogate() { var s = source(); var field = field(); + if (field.dataType() == DataType.AGGREGATE_METRIC_DOUBLE) { + return new Sum(s, FromAggregateMetricDouble.withMetric(source(), field, AggregateMetricDoubleBlockBuilder.Metric.COUNT)); + } if (field.foldable()) { if (field instanceof Literal l) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java index 3170ae8f132c2..f97ead54c7be9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; @@ -40,7 +41,7 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.function.BiFunction; +import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -56,9 +57,9 @@ public class CountDistinct extends AggregateFunction implements OptionalArgument CountDistinct::new ); - private static final Map, Integer, AggregatorFunctionSupplier>> SUPPLIERS = Map.ofEntries( + private static final Map> SUPPLIERS = Map.ofEntries( // Booleans ignore the precision because there are only two possible values anyway - Map.entry(DataType.BOOLEAN, (inputChannels, precision) -> new CountDistinctBooleanAggregatorFunctionSupplier(inputChannels)), + Map.entry(DataType.BOOLEAN, (precision) -> new CountDistinctBooleanAggregatorFunctionSupplier()), Map.entry(DataType.LONG, CountDistinctLongAggregatorFunctionSupplier::new), Map.entry(DataType.DATETIME, CountDistinctLongAggregatorFunctionSupplier::new), Map.entry(DataType.DATE_NANOS, CountDistinctLongAggregatorFunctionSupplier::new), @@ -101,7 +102,7 @@ public class CountDistinct extends AggregateFunction implements OptionalArgument maximum supported value is 40000, thresholds above this number will have the same effect as a threshold of 40000. The default value is `3000`. """, - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats_count_distinct", tag = "count-distinct"), @Example( @@ -209,7 +210,7 @@ protected TypeResolution resolveType() { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { + public AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); int precision = this.precision == null ? DEFAULT_PRECISION @@ -218,7 +219,7 @@ public AggregatorFunctionSupplier supplier(List inputChannels) { // If the type checking did its job, this should never happen throw EsqlIllegalArgumentException.illegalDataType(type); } - return SUPPLIERS.get(type).apply(inputChannels, precision); + return SUPPLIERS.get(type).apply(precision); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java index a67b87c7617c4..bb9ed1780053f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java @@ -19,6 +19,7 @@ import org.elasticsearch.compute.aggregation.FromPartialGroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.GroupingAggregator; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -110,38 +111,44 @@ public FromPartial withFilter(Expression filter) { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { - final ToAggregator toAggregator = (ToAggregator) function; - if (inputChannels.size() != 1) { - assert false : "from_partial aggregation requires exactly one input channel; got " + inputChannels; - throw new IllegalArgumentException("from_partial aggregation requires exactly one input channel; got " + inputChannels); - } - final int inputChannel = inputChannels.get(0); + public AggregatorFunctionSupplier supplier() { + final AggregatorFunctionSupplier supplier = ((ToAggregator) function).supplier(); return new AggregatorFunctionSupplier() { @Override - public AggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return FromPartialAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return FromPartialGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { assert false : "aggregatorFactory() is override"; throw new UnsupportedOperationException(); } @Override - public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext, List channels) { assert false : "groupingAggregatorFactory() is override"; throw new UnsupportedOperationException(); } @Override - public Aggregator.Factory aggregatorFactory(AggregatorMode mode) { - final AggregatorFunctionSupplier supplier; - try (var dummy = toAggregator.supplier(inputChannels).aggregator(DriverContext.getLocalDriver())) { - var intermediateChannels = IntStream.range(0, dummy.intermediateBlockCount()).boxed().toList(); - supplier = toAggregator.supplier(intermediateChannels); + public Aggregator.Factory aggregatorFactory(AggregatorMode mode, List channels) { + if (channels.size() != 1) { + assert false : "from_partial aggregation requires exactly one input channel; got " + channels; + throw new IllegalArgumentException("from_partial aggregation requires exactly one input channel; got " + channels); } + final int inputChannel = channels.get(0); + var intermediateChannels = IntStream.range(0, supplier.nonGroupingIntermediateStateDesc().size()).boxed().toList(); return new Aggregator.Factory() { @Override public Aggregator apply(DriverContext driverContext) { // use groupingAggregator since we can receive intermediate output from a grouping aggregate - final var groupingAggregator = supplier.groupingAggregator(driverContext); + final var groupingAggregator = supplier.groupingAggregator(driverContext, intermediateChannels); return new Aggregator(new FromPartialAggregatorFunction(driverContext, groupingAggregator, inputChannel), mode); } @@ -153,16 +160,17 @@ public String describe() { } @Override - public GroupingAggregator.Factory groupingAggregatorFactory(AggregatorMode mode) { - final AggregatorFunctionSupplier supplier; - try (var dummy = toAggregator.supplier(inputChannels).aggregator(DriverContext.getLocalDriver())) { - var intermediateChannels = IntStream.range(0, dummy.intermediateBlockCount()).boxed().toList(); - supplier = toAggregator.supplier(intermediateChannels); + public GroupingAggregator.Factory groupingAggregatorFactory(AggregatorMode mode, List channels) { + if (channels.size() != 1) { + assert false : "from_partial aggregation requires exactly one input channel; got " + channels; + throw new IllegalArgumentException("from_partial aggregation requires exactly one input channel; got " + channels); } + final int inputChannel = channels.get(0); + var intermediateChannels = IntStream.range(0, supplier.nonGroupingIntermediateStateDesc().size()).boxed().toList(); return new GroupingAggregator.Factory() { @Override public GroupingAggregator apply(DriverContext driverContext) { - final GroupingAggregatorFunction aggregator = supplier.groupingAggregator(driverContext); + final GroupingAggregatorFunction aggregator = supplier.groupingAggregator(driverContext, intermediateChannels); return new GroupingAggregator(new FromPartialGroupingAggregatorFunction(aggregator, inputChannel), mode); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java index eb0c8abd1080b..be08627a4fd6d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java @@ -16,6 +16,7 @@ import org.elasticsearch.compute.aggregation.MaxIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MaxIpAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MaxLongAggregatorFunctionSupplier; +import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; @@ -26,14 +27,16 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromAggregateMetricDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMax; import org.elasticsearch.xpack.esql.planner.ToAggregator; import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.function.Function; +import java.util.function.Supplier; import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; @@ -41,7 +44,7 @@ public class Max extends AggregateFunction implements ToAggregator, SurrogateExpression { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Max", Max::new); - private static final Map, AggregatorFunctionSupplier>> SUPPLIERS = Map.ofEntries( + private static final Map> SUPPLIERS = Map.ofEntries( Map.entry(DataType.BOOLEAN, MaxBooleanAggregatorFunctionSupplier::new), Map.entry(DataType.LONG, MaxLongAggregatorFunctionSupplier::new), Map.entry(DataType.DATETIME, MaxLongAggregatorFunctionSupplier::new), @@ -58,7 +61,7 @@ public class Max extends AggregateFunction implements ToAggregator, SurrogateExp @FunctionInfo( returnType = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "long", "version" }, description = "The maximum value of a field.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats", tag = "max"), @Example( @@ -73,7 +76,19 @@ public Max( Source source, @Param( name = "field", - type = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "text", "long", "version" } + type = { + "aggregate_metric_double", + "boolean", + "double", + "integer", + "long", + "date", + "date_nanos", + "ip", + "keyword", + "text", + "long", + "version" } ) Expression field ) { this(source, field, Literal.TRUE); @@ -111,7 +126,7 @@ public Max replaceChildren(List newChildren) { protected TypeResolution resolveType() { return TypeResolutions.isType( field(), - SUPPLIERS::containsKey, + dt -> SUPPLIERS.containsKey(dt) || dt == DataType.AGGREGATE_METRIC_DOUBLE, sourceText(), DEFAULT, "representable except unsigned_long and spatial types" @@ -120,21 +135,27 @@ protected TypeResolution resolveType() { @Override public DataType dataType() { + if (field().dataType() == DataType.AGGREGATE_METRIC_DOUBLE) { + return DataType.DOUBLE; + } return field().dataType().noText(); } @Override - public final AggregatorFunctionSupplier supplier(List inputChannels) { + public final AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); if (SUPPLIERS.containsKey(type) == false) { // If the type checking did its job, this should never happen throw EsqlIllegalArgumentException.illegalDataType(type); } - return SUPPLIERS.get(type).apply(inputChannels); + return SUPPLIERS.get(type).get(); } @Override public Expression surrogate() { + if (field().dataType() == DataType.AGGREGATE_METRIC_DOUBLE) { + return new Max(source(), FromAggregateMetricDouble.withMetric(source(), field(), AggregateMetricDoubleBlockBuilder.Metric.MAX)); + } return field().foldable() ? new MvMax(source(), field()) : null; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java index c47fa612c1c49..41f3ea0efea06 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMedian; @@ -44,7 +45,7 @@ public class Median extends AggregateFunction implements SurrogateExpression { `MEDIAN` is also {wikipedia}/Nondeterministic_algorithm[non-deterministic]. This means you can get slightly different results using the same data. ====""", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats_percentile", tag = "median"), @Example( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java index 42960cafdfd3a..5c7db4e31502a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMedianAbsoluteDeviation; @@ -54,7 +55,7 @@ public class MedianAbsoluteDeviation extends NumericAggregate implements Surroga `MEDIAN_ABSOLUTE_DEVIATION` is also {wikipedia}/Nondeterministic_algorithm[non-deterministic]. This means you can get slightly different results using the same data. ====""", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "median_absolute_deviation", tag = "median-absolute-deviation"), @Example( @@ -99,18 +100,18 @@ public MedianAbsoluteDeviation withFilter(Expression filter) { } @Override - protected AggregatorFunctionSupplier longSupplier(List inputChannels) { - return new MedianAbsoluteDeviationLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier longSupplier() { + return new MedianAbsoluteDeviationLongAggregatorFunctionSupplier(); } @Override - protected AggregatorFunctionSupplier intSupplier(List inputChannels) { - return new MedianAbsoluteDeviationIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier intSupplier() { + return new MedianAbsoluteDeviationIntAggregatorFunctionSupplier(); } @Override - protected AggregatorFunctionSupplier doubleSupplier(List inputChannels) { - return new MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier doubleSupplier() { + return new MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java index 472f0b1ff5cd1..1b1c4ea7b0296 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java @@ -16,6 +16,7 @@ import org.elasticsearch.compute.aggregation.MinIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MinIpAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MinLongAggregatorFunctionSupplier; +import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; @@ -26,14 +27,16 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromAggregateMetricDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin; import org.elasticsearch.xpack.esql.planner.ToAggregator; import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.function.Function; +import java.util.function.Supplier; import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; @@ -41,7 +44,7 @@ public class Min extends AggregateFunction implements ToAggregator, SurrogateExpression { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Min", Min::new); - private static final Map, AggregatorFunctionSupplier>> SUPPLIERS = Map.ofEntries( + private static final Map> SUPPLIERS = Map.ofEntries( Map.entry(DataType.BOOLEAN, MinBooleanAggregatorFunctionSupplier::new), Map.entry(DataType.LONG, MinLongAggregatorFunctionSupplier::new), Map.entry(DataType.DATETIME, MinLongAggregatorFunctionSupplier::new), @@ -58,7 +61,7 @@ public class Min extends AggregateFunction implements ToAggregator, SurrogateExp @FunctionInfo( returnType = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "long", "version" }, description = "The minimum value of a field.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats", tag = "min"), @Example( @@ -73,7 +76,19 @@ public Min( Source source, @Param( name = "field", - type = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "text", "long", "version" } + type = { + "aggregate_metric_double", + "boolean", + "double", + "integer", + "long", + "date", + "date_nanos", + "ip", + "keyword", + "text", + "long", + "version" } ) Expression field ) { this(source, field, Literal.TRUE); @@ -111,7 +126,7 @@ public Min withFilter(Expression filter) { protected TypeResolution resolveType() { return TypeResolutions.isType( field(), - SUPPLIERS::containsKey, + dt -> SUPPLIERS.containsKey(dt) || dt == DataType.AGGREGATE_METRIC_DOUBLE, sourceText(), DEFAULT, "representable except unsigned_long and spatial types" @@ -120,21 +135,27 @@ protected TypeResolution resolveType() { @Override public DataType dataType() { + if (field().dataType() == DataType.AGGREGATE_METRIC_DOUBLE) { + return DataType.DOUBLE; + } return field().dataType().noText(); } @Override - public final AggregatorFunctionSupplier supplier(List inputChannels) { + public final AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); if (SUPPLIERS.containsKey(type) == false) { // If the type checking did its job, this should never happen throw EsqlIllegalArgumentException.illegalDataType(type); } - return SUPPLIERS.get(type).apply(inputChannels); + return SUPPLIERS.get(type).get(); } @Override public Expression surrogate() { + if (field().dataType() == DataType.AGGREGATE_METRIC_DOUBLE) { + return new Min(source(), FromAggregateMetricDouble.withMetric(source(), field(), AggregateMetricDoubleBlockBuilder.Metric.MIN)); + } return field().foldable() ? new MvMin(source(), field()) : null; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java index 5c639c465c649..3289e1aded4ea 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java @@ -92,26 +92,26 @@ public DataType dataType() { } @Override - public final AggregatorFunctionSupplier supplier(List inputChannels) { + public final AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); if (supportsDates() && type == DataType.DATETIME) { - return longSupplier(inputChannels); + return longSupplier(); } if (type == DataType.LONG) { - return longSupplier(inputChannels); + return longSupplier(); } if (type == DataType.INTEGER) { - return intSupplier(inputChannels); + return intSupplier(); } if (type == DataType.DOUBLE) { - return doubleSupplier(inputChannels); + return doubleSupplier(); } throw EsqlIllegalArgumentException.illegalDataType(type); } - protected abstract AggregatorFunctionSupplier longSupplier(List inputChannels); + protected abstract AggregatorFunctionSupplier longSupplier(); - protected abstract AggregatorFunctionSupplier intSupplier(List inputChannels); + protected abstract AggregatorFunctionSupplier intSupplier(); - protected abstract AggregatorFunctionSupplier doubleSupplier(List inputChannels); + protected abstract AggregatorFunctionSupplier doubleSupplier(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java index 8c943c991d501..fb61db603486b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvPercentile; @@ -65,7 +66,7 @@ public class Percentile extends NumericAggregate implements SurrogateExpression This means you can get slightly different results using the same data. ==== """, - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats_percentile", tag = "percentile"), @Example( @@ -156,18 +157,18 @@ protected TypeResolution resolveType() { } @Override - protected AggregatorFunctionSupplier longSupplier(List inputChannels) { - return new PercentileLongAggregatorFunctionSupplier(inputChannels, percentileValue()); + protected AggregatorFunctionSupplier longSupplier() { + return new PercentileLongAggregatorFunctionSupplier(percentileValue()); } @Override - protected AggregatorFunctionSupplier intSupplier(List inputChannels) { - return new PercentileIntAggregatorFunctionSupplier(inputChannels, percentileValue()); + protected AggregatorFunctionSupplier intSupplier() { + return new PercentileIntAggregatorFunctionSupplier(percentileValue()); } @Override - protected AggregatorFunctionSupplier doubleSupplier(List inputChannels) { - return new PercentileDoubleAggregatorFunctionSupplier(inputChannels, percentileValue()); + protected AggregatorFunctionSupplier doubleSupplier() { + return new PercentileDoubleAggregatorFunctionSupplier(percentileValue()); } private int percentileValue() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java index 85ae65b6c5dc3..ae385da4c86e3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; @@ -49,7 +50,7 @@ public class Rate extends AggregateFunction implements OptionalArgument, ToAggre @FunctionInfo( returnType = { "double" }, description = "compute the rate of a counter field. Available in METRICS command only", - isAggregation = true + type = FunctionType.AGGREGATE ) public Rate( Source source, @@ -168,16 +169,13 @@ long unitInMillis() { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { - if (inputChannels.size() != 2 && inputChannels.size() != 3) { - throw new IllegalArgumentException("rate requires two for raw input or three channels for partial input; got " + inputChannels); - } + public AggregatorFunctionSupplier supplier() { final long unitInMillis = unitInMillis(); final DataType type = field().dataType(); return switch (type) { - case COUNTER_LONG -> new RateLongAggregatorFunctionSupplier(inputChannels, unitInMillis); - case COUNTER_INTEGER -> new RateIntAggregatorFunctionSupplier(inputChannels, unitInMillis); - case COUNTER_DOUBLE -> new RateDoubleAggregatorFunctionSupplier(inputChannels, unitInMillis); + case COUNTER_LONG -> new RateLongAggregatorFunctionSupplier(unitInMillis); + case COUNTER_INTEGER -> new RateIntAggregatorFunctionSupplier(unitInMillis); + case COUNTER_DOUBLE -> new RateDoubleAggregatorFunctionSupplier(unitInMillis); default -> throw EsqlIllegalArgumentException.illegalDataType(type); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java index fad308e38cb26..70f264129a06c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.ToAggregator; @@ -45,7 +46,7 @@ public class SpatialCentroid extends SpatialAggregateFunction implements ToAggre @FunctionInfo( returnType = { "geo_point", "cartesian_point" }, description = "Calculate the spatial centroid over a field with spatial point geometry type.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = @Example(file = "spatial", tag = "st_centroid_agg-airports") ) public SpatialCentroid(Source source, @Param(name = "field", type = { "geo_point", "cartesian_point" }) Expression field) { @@ -98,16 +99,16 @@ public SpatialCentroid replaceChildren(List newChildren) { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { + public AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); return switch (type) { case DataType.GEO_POINT -> switch (fieldExtractPreference) { - case DOC_VALUES -> new SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier(inputChannels); - case NONE, EXTRACT_SPATIAL_BOUNDS -> new SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier(inputChannels); + case DOC_VALUES -> new SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier(); + case NONE, EXTRACT_SPATIAL_BOUNDS -> new SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier(); }; case DataType.CARTESIAN_POINT -> switch (fieldExtractPreference) { - case DOC_VALUES -> new SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier(inputChannels); - case NONE, EXTRACT_SPATIAL_BOUNDS -> new SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier(inputChannels); + case DOC_VALUES -> new SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier(); + case NONE, EXTRACT_SPATIAL_BOUNDS -> new SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier(); }; default -> throw EsqlIllegalArgumentException.illegalDataType(type); }; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtent.java index 5d56fe1e1169a..419c1a8416c9a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtent.java @@ -26,6 +26,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.ToAggregator; @@ -48,7 +49,7 @@ public final class SpatialExtent extends SpatialAggregateFunction implements ToA @FunctionInfo( returnType = { "geo_shape", "cartesian_shape" }, description = "Calculate the spatial extent over a field with geometry type. Returns a bounding box for all values of the field.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = @Example(file = "spatial", tag = "st_extent_agg-airports") ) public SpatialExtent( @@ -102,25 +103,25 @@ public SpatialExtent replaceChildren(List newChildren) { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { + public AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); return switch (type) { case DataType.GEO_POINT -> switch (fieldExtractPreference) { - case DOC_VALUES -> new SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier(inputChannels); - case NONE, EXTRACT_SPATIAL_BOUNDS -> new SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier(inputChannels); + case DOC_VALUES -> new SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier(); + case NONE, EXTRACT_SPATIAL_BOUNDS -> new SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier(); }; case DataType.CARTESIAN_POINT -> switch (fieldExtractPreference) { - case DOC_VALUES -> new SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier(inputChannels); - case NONE, EXTRACT_SPATIAL_BOUNDS -> new SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier(inputChannels); + case DOC_VALUES -> new SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier(); + case NONE, EXTRACT_SPATIAL_BOUNDS -> new SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier(); }; case DataType.GEO_SHAPE -> switch (fieldExtractPreference) { - case EXTRACT_SPATIAL_BOUNDS -> new SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier(inputChannels); - case NONE -> new SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier(inputChannels); + case EXTRACT_SPATIAL_BOUNDS -> new SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier(); + case NONE -> new SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier(); case DOC_VALUES -> throw new EsqlIllegalArgumentException("Illegal field extract preference: " + fieldExtractPreference); }; case DataType.CARTESIAN_SHAPE -> switch (fieldExtractPreference) { - case EXTRACT_SPATIAL_BOUNDS -> new SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier(inputChannels); - case NONE -> new SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier(inputChannels); + case EXTRACT_SPATIAL_BOUNDS -> new SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier(); + case NONE -> new SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier(); case DOC_VALUES -> throw new EsqlIllegalArgumentException("Illegal field extract preference: " + fieldExtractPreference); }; default -> throw EsqlIllegalArgumentException.illegalDataType(type); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDev.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDev.java index 189b6a81912cb..19365c3166d13 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDev.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDev.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.ToAggregator; @@ -37,7 +38,7 @@ public class StdDev extends AggregateFunction implements ToAggregator { @FunctionInfo( returnType = "double", description = "The standard deviation of a numeric field.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats", tag = "stdev"), @Example( @@ -96,16 +97,16 @@ public StdDev withFilter(Expression filter) { } @Override - public final AggregatorFunctionSupplier supplier(List inputChannels) { + public final AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); if (type == DataType.LONG) { - return new StdDevLongAggregatorFunctionSupplier(inputChannels); + return new StdDevLongAggregatorFunctionSupplier(); } if (type == DataType.INTEGER) { - return new StdDevIntAggregatorFunctionSupplier(inputChannels); + return new StdDevIntAggregatorFunctionSupplier(); } if (type == DataType.DOUBLE) { - return new StdDevDoubleAggregatorFunctionSupplier(inputChannels); + return new StdDevDoubleAggregatorFunctionSupplier(); } throw EsqlIllegalArgumentException.illegalDataType(type); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java index 37c2abaae1e4e..f8fe28d85a929 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java @@ -12,8 +12,10 @@ import org.elasticsearch.compute.aggregation.SumDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumLongAggregatorFunctionSupplier; +import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -21,7 +23,9 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromAggregateMetricDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvSum; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; @@ -29,6 +33,9 @@ import java.util.List; import static java.util.Collections.emptyList; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; +import static org.elasticsearch.xpack.esql.core.type.DataType.AGGREGATE_METRIC_DOUBLE; import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; @@ -42,7 +49,7 @@ public class Sum extends NumericAggregate implements SurrogateExpression { @FunctionInfo( returnType = { "long", "double" }, description = "The sum of a numeric expression.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = { @Example(file = "stats", tag = "sum"), @Example( @@ -53,7 +60,7 @@ public class Sum extends NumericAggregate implements SurrogateExpression { tag = "docsStatsSumNestedExpression" ) } ) - public Sum(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { + public Sum(Source source, @Param(name = "number", type = { "aggregate_metric_double", "double", "integer", "long" }) Expression field) { this(source, field, Literal.TRUE); } @@ -92,24 +99,48 @@ public DataType dataType() { } @Override - protected AggregatorFunctionSupplier longSupplier(List inputChannels) { - return new SumLongAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier longSupplier() { + return new SumLongAggregatorFunctionSupplier(); } @Override - protected AggregatorFunctionSupplier intSupplier(List inputChannels) { - return new SumIntAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier intSupplier() { + return new SumIntAggregatorFunctionSupplier(); } @Override - protected AggregatorFunctionSupplier doubleSupplier(List inputChannels) { - return new SumDoubleAggregatorFunctionSupplier(inputChannels); + protected AggregatorFunctionSupplier doubleSupplier() { + return new SumDoubleAggregatorFunctionSupplier(); + } + + @Override + protected TypeResolution resolveType() { + if (supportsDates()) { + return TypeResolutions.isType( + this, + e -> e == DataType.DATETIME || e == DataType.AGGREGATE_METRIC_DOUBLE || e.isNumeric() && e != DataType.UNSIGNED_LONG, + sourceText(), + DEFAULT, + "datetime", + "aggregate_metric_double or numeric except unsigned_long or counter types" + ); + } + return isType( + field(), + dt -> dt == DataType.AGGREGATE_METRIC_DOUBLE || dt.isNumeric() && dt != DataType.UNSIGNED_LONG, + sourceText(), + DEFAULT, + "aggregate_metric_double or numeric except unsigned_long or counter types" + ); } @Override public Expression surrogate() { var s = source(); var field = field(); + if (field.dataType() == AGGREGATE_METRIC_DOUBLE) { + return new Sum(s, FromAggregateMetricDouble.withMetric(source(), field, AggregateMetricDoubleBlockBuilder.Metric.SUM)); + } // SUM(const) is equivalent to MV_SUM(const)*COUNT(*). return field.foldable() diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java index a2856f60e4c51..04dadb5e3bb91 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java @@ -18,6 +18,7 @@ import org.elasticsearch.compute.aggregation.FromPartialGroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.GroupingAggregator; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.aggregation.ToPartialAggregatorFunction; import org.elasticsearch.compute.aggregation.ToPartialGroupingAggregatorFunction; import org.elasticsearch.compute.operator.DriverContext; @@ -127,37 +128,41 @@ protected NodeInfo info() { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { - final ToAggregator toAggregator = (ToAggregator) function; + public AggregatorFunctionSupplier supplier() { + final AggregatorFunctionSupplier supplier = ((ToAggregator) function).supplier(); return new AggregatorFunctionSupplier() { @Override - public AggregatorFunction aggregator(DriverContext driverContext) { + public List nonGroupingIntermediateStateDesc() { + return ToPartialAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return ToPartialGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext, List channels) { assert false : "aggregatorFactory() is override"; throw new UnsupportedOperationException(); } @Override - public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext, List channels) { assert false : "groupingAggregatorFactory() is override"; throw new UnsupportedOperationException(); } @Override - public Aggregator.Factory aggregatorFactory(AggregatorMode mode) { - final AggregatorFunctionSupplier supplier; - if (mode.isInputPartial()) { - try (var dummy = toAggregator.supplier(inputChannels).aggregator(DriverContext.getLocalDriver())) { - var intermediateChannels = IntStream.range(0, dummy.intermediateBlockCount()).boxed().toList(); - supplier = toAggregator.supplier(intermediateChannels); - } - } else { - supplier = toAggregator.supplier(inputChannels); - } + public Aggregator.Factory aggregatorFactory(AggregatorMode mode, List channels) { + List intermediateChannels = mode.isInputPartial() + ? IntStream.range(0, supplier.nonGroupingIntermediateStateDesc().size()).boxed().toList() + : channels; return new Aggregator.Factory() { @Override public Aggregator apply(DriverContext driverContext) { - final AggregatorFunction aggregatorFunction = supplier.aggregator(driverContext); - return new Aggregator(new ToPartialAggregatorFunction(aggregatorFunction, inputChannels), mode); + final AggregatorFunction aggregatorFunction = supplier.aggregator(driverContext, intermediateChannels); + return new Aggregator(new ToPartialAggregatorFunction(aggregatorFunction, channels), mode); } @Override @@ -168,21 +173,18 @@ public String describe() { } @Override - public GroupingAggregator.Factory groupingAggregatorFactory(AggregatorMode mode) { - final AggregatorFunctionSupplier supplier; - if (mode.isInputPartial()) { - try (var dummy = toAggregator.supplier(inputChannels).aggregator(DriverContext.getLocalDriver())) { - var intermediateChannels = IntStream.range(0, dummy.intermediateBlockCount()).boxed().toList(); - supplier = toAggregator.supplier(intermediateChannels); - } - } else { - supplier = toAggregator.supplier(inputChannels); - } + public GroupingAggregator.Factory groupingAggregatorFactory(AggregatorMode mode, List channels) { + List intermediateChannels = mode.isInputPartial() + ? IntStream.range(0, supplier.nonGroupingIntermediateStateDesc().size()).boxed().toList() + : channels; return new GroupingAggregator.Factory() { @Override public GroupingAggregator apply(DriverContext driverContext) { - final GroupingAggregatorFunction aggregatorFunction = supplier.groupingAggregator(driverContext); - return new GroupingAggregator(new ToPartialGroupingAggregatorFunction(aggregatorFunction, inputChannels), mode); + final GroupingAggregatorFunction aggregatorFunction = supplier.groupingAggregator( + driverContext, + intermediateChannels + ); + return new GroupingAggregator(new ToPartialGroupingAggregatorFunction(aggregatorFunction, channels), mode); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java index 9be8c94266ee8..f31153d228e74 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.planner.ToAggregator; @@ -54,7 +55,7 @@ public class Top extends AggregateFunction implements ToAggregator, SurrogateExp @FunctionInfo( returnType = { "boolean", "double", "integer", "long", "date", "ip", "keyword" }, description = "Collects the top values for a field. Includes repeated values.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = @Example(file = "stats_top", tag = "top") ) public Top( @@ -188,25 +189,25 @@ public Top replaceChildren(List newChildren) { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { + public AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); if (type == DataType.LONG || type == DataType.DATETIME) { - return new TopLongAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); + return new TopLongAggregatorFunctionSupplier(limitValue(), orderValue()); } if (type == DataType.INTEGER) { - return new TopIntAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); + return new TopIntAggregatorFunctionSupplier(limitValue(), orderValue()); } if (type == DataType.DOUBLE) { - return new TopDoubleAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); + return new TopDoubleAggregatorFunctionSupplier(limitValue(), orderValue()); } if (type == DataType.BOOLEAN) { - return new TopBooleanAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); + return new TopBooleanAggregatorFunctionSupplier(limitValue(), orderValue()); } if (type == DataType.IP) { - return new TopIpAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); + return new TopIpAggregatorFunctionSupplier(limitValue(), orderValue()); } if (DataType.isString(type)) { - return new TopBytesRefAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); + return new TopBytesRefAggregatorFunctionSupplier(limitValue(), orderValue()); } throw EsqlIllegalArgumentException.illegalDataType(type); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java index 5260b3e8fa279..4dbe0e93b5017 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java @@ -24,13 +24,14 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.ToAggregator; import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.function.Function; +import java.util.function.Supplier; import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; @@ -38,7 +39,7 @@ public class Values extends AggregateFunction implements ToAggregator { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Values", Values::new); - private static final Map, AggregatorFunctionSupplier>> SUPPLIERS = Map.ofEntries( + private static final Map> SUPPLIERS = Map.ofEntries( Map.entry(DataType.INTEGER, ValuesIntAggregatorFunctionSupplier::new), Map.entry(DataType.LONG, ValuesLongAggregatorFunctionSupplier::new), Map.entry(DataType.DATETIME, ValuesLongAggregatorFunctionSupplier::new), @@ -66,7 +67,7 @@ public class Values extends AggregateFunction implements ToAggregator { collects too many values it will fail the query with a <>. ====""", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = @Example(file = "string", tag = "values-grouped") ) public Values( @@ -124,12 +125,12 @@ protected TypeResolution resolveType() { } @Override - public AggregatorFunctionSupplier supplier(List inputChannels) { + public AggregatorFunctionSupplier supplier() { DataType type = field().dataType(); if (SUPPLIERS.containsKey(type) == false) { // If the type checking did its job, this should never happen throw EsqlIllegalArgumentException.illegalDataType(type); } - return SUPPLIERS.get(type).apply(inputChannels); + return SUPPLIERS.get(type).get(); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java index bab65653ba576..c58bc997527b0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAvg; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; @@ -48,7 +49,7 @@ public class WeightedAvg extends AggregateFunction implements SurrogateExpressio @FunctionInfo( returnType = "double", description = "The weighted average of a numeric expression.", - isAggregation = true, + type = FunctionType.AGGREGATE, examples = @Example(file = "stats", tag = "weighted-avg") ) public WeightedAvg( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java index 9f08401a42dd1..4115386882207 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java @@ -85,10 +85,6 @@ * To introduce your aggregation to the engine: *
    *
  • - * Add it to {@code org.elasticsearch.xpack.esql.planner.AggregateMapper}. - * Check all usages of other aggregations there, and replicate the logic. - *
  • - *
  • * Implement serialization for your aggregation by implementing * {@link org.elasticsearch.common.io.stream.NamedWriteable#getWriteableName}, * {@link org.elasticsearch.common.io.stream.NamedWriteable#writeTo}, @@ -97,7 +93,7 @@ * {@link org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateWritables#getNamedWriteables}. *
  • *
  • - * Do the same with {@link org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry}. + * Add it to {@link org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry}. *
  • *
* @@ -105,8 +101,84 @@ * *

Creating aggregators for your function

*

- * Aggregators contain the core logic of your aggregation. That is, how to combine values, what to store, how to process data, etc. + * Aggregators contain the core logic of how to combine values, what to store, how to process data, etc. + * Currently, we rely on code generation (per aggregation per type) in order to implement such functionality. + * This approach was picked for performance reasons (namely to avoid virtual method calls and boxing types). + * As a result we could not rely on interfaces implementation and generics. + *

+ *

+ * In order to implement aggregation logic create your class (typically named "${FunctionName}${Type}Aggregator"). + * It must be placed in `org.elasticsearch.compute.aggregation` in order to be picked up by code generation. + * Annotate it with {@link org.elasticsearch.compute.ann.Aggregator} and {@link org.elasticsearch.compute.ann.GroupingAggregator} + * The first one is responsible for an entire data set aggregation, while the second one is responsible for grouping within buckets. *

+ *

Before you start implementing it, please note that:

+ *
    + *
  • All methods must be public static
  • + *
  • + * {@code init/initSingle/initGrouping} could have optional {@link org.elasticsearch.common.util.BigArrays} or + * {@link org.elasticsearch.compute.operator.DriverContext} arguments that are going to be injected automatically. + * It is also possible to declare any number of arbitrary arguments that must be provided via generated Supplier. + *
  • + *
  • + * {@code combine, combineStates, combineIntermediate, evaluateFinal} methods (see below) could be generated automatically + * when both input type I and mutable accumulator state AggregatorState and GroupingAggregatorState are primitive (DOUBLE, INT). + *
  • + *
  • + * Code generation expects at least one IntermediateState field that is going to be used to keep + * the serialized state of the aggregation (eg AggregatorState and GroupingAggregatorState). + * It must be defined even if you rely on autogenerated implementation for the primitive types. + *
  • + *
+ *

Aggregation expects:

+ *
    + *
  • + * type AggregatorState (a mutable state used to accumulate result of the aggregation) to be public, not inner and implements + * {@link org.elasticsearch.compute.aggregation.AggregatorState} + *
  • + *
  • type I (input to your aggregation function), usually primitive types and {@link org.apache.lucene.util.BytesRef}
  • + *
  • {@code AggregatorState init()} or {@code AggregatorState initSingle()} returns empty initialized aggregation state
  • + *
  • + * {@code void combine(AggregatorState state, I input)} or {@code AggregatorState combine(AggregatorState state, I input)} + * adds input entry to the aggregation state + *
  • + *
  • + * {@code void combineIntermediate(AggregatorState state, intermediate states)} adds serialized aggregation state + * to the current aggregation state (used to combine results across different nodes) + *
  • + *
  • + * {@code Block evaluateFinal(AggregatorState state, DriverContext)} converts the inner state of the aggregation to the result + * column + *
  • + *
+ *

Grouping aggregation expects:

+ *
    + *
  • + * type GroupingAggregatorState (a mutable state used to accumulate result of the grouping aggregation) to be public, + * not inner and implements {@link org.elasticsearch.compute.aggregation.GroupingAggregatorState} + *
  • + *
  • type I (input to your aggregation function), usually primitive types and {@link org.apache.lucene.util.BytesRef}
  • + *
  • + * {@code GroupingAggregatorState init()} or {@code GroupingAggregatorState initGrouping()} returns empty initialized grouping + * aggregation state + *
  • + *
  • + * {@code void combine(GroupingAggregatorState state, int groupId, I input)} adds input entry to the corresponding group (bucket) + * of the grouping aggregation state + *
  • + *
  • + * {@code void combineStates(GroupingAggregatorState targetState, int targetGroupId, GS otherState, int otherGroupId)} + * merges other grouped aggregation state into the first one + *
  • + *
  • + * {@code void combineIntermediate(GroupingAggregatorState current, int groupId, intermediate states)} adds serialized + * aggregation state to the current grouped aggregation state (used to combine results across different nodes) + *
  • + *
  • + * {@code Block evaluateFinal(GroupingAggregatorState state, IntVectorSelected, DriverContext)} converts the inner state + * of the grouping aggregation to the result column + *
  • + *
*
    *
  1. * Copy an existing aggregator to use as a base. You'll usually make one per type. Check other classes to see the naming pattern. @@ -117,31 +189,8 @@ *

    *
  2. *
  3. - * The methods in the aggregator will define how it will work: - *
      - *
    • - * Adding the `type init()` method will autogenerate the code to manage the state, using your returned value - * as the initial value for each group. - *
    • - *
    • - * Adding the `type initSingle()` or `type initGrouping()` methods will use the state object you return there instead. - *

      - * You will also have to provide `evaluateIntermediate()` and `evaluateFinal()` methods this way. - *

      - *
    • - *
    - * Depending on the way you use, adapt your `combine*()` methods to receive one or other type as their first parameters. - *
  4. - *
  5. - * If it's also a {@link org.elasticsearch.compute.ann.GroupingAggregator}, you should provide the same methods as commented before: - *
      - *
    • - * Add an `initGrouping()`, unless you're using the `init()` method - *
    • - *
    • - * Add all the other methods, with the state parameter of the type of your `initGrouping()`. - *
    • - *
    + * Implement (or create an empty) methods according to the above list. + * Also check {@link org.elasticsearch.compute.ann.Aggregator} JavaDoc as it contains generated method usage. *
  6. *
  7. * Make a test for your aggregator. @@ -152,16 +201,8 @@ *

    *
  8. *
  9. - * Check the Javadoc of the {@link org.elasticsearch.compute.ann.Aggregator} - * and {@link org.elasticsearch.compute.ann.GroupingAggregator} annotations. - * Add/Modify them on your aggregator. - *
  10. - *
  11. - * The {@link org.elasticsearch.compute.ann.Aggregator} JavaDoc explains the static methods you should add. - *
  12. - *
  13. - * After implementing the required methods (Even if they have a dummy implementation), - * run the CsvTests to generate some extra required classes. + * Code generation is triggered when running the tests. + * Run the CsvTests to generate the code. Generated code should include: *

    * One of them will be the {@code AggregatorFunctionSupplier} for your aggregator. * Find it by its name ({@code AggregatorFunctionSupplier}), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java index 32a350ac7351e..cb0306775a711 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; -import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluator; import org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluator.ShardConfig; import org.elasticsearch.compute.operator.EvalOperator; @@ -110,11 +110,7 @@ public Expression query() { */ public Object queryAsObject() { Object queryAsObject = query().fold(FoldContext.small() /* TODO remove me */); - if (queryAsObject instanceof BytesRef bytesRef) { - return bytesRef.utf8ToString(); - } - - return queryAsObject; + return BytesRefs.toString(queryAsObject); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java index 3223e96da7136..3b3df2e376c9d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xpack.esql.capabilities.PostOptimizationVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; @@ -30,6 +30,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.DataTypeConverter; import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; +import org.elasticsearch.xpack.esql.core.util.Check; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -38,6 +39,7 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import org.elasticsearch.xpack.esql.querydsl.query.MatchQuery; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; @@ -48,6 +50,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.BiConsumer; import static java.util.Map.entry; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; @@ -88,7 +91,7 @@ /** * Full text function that performs a {@link org.elasticsearch.xpack.esql.querydsl.query.MatchQuery} . */ -public class Match extends FullTextFunction implements OptionalArgument, PostOptimizationVerificationAware { +public class Match extends FullTextFunction implements OptionalArgument, PostAnalysisPlanVerificationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Match", Match::readFrom); public static final Set FIELD_DATA_TYPES = Set.of( @@ -251,7 +254,7 @@ public Match( valueHint = { "none", "all" }, description = "Number of beginning characters left unchanged for fuzzy matching." ) }, - description = "Match additional options as <>." + description = "(Optional) Match additional options as <>." + " See <> for more information.", optional = true ) Expression options @@ -429,23 +432,23 @@ public Expression replaceQueryBuilder(QueryBuilder queryBuilder) { } @Override - public void postOptimizationVerification(Failures failures) { - Expression fieldExpression = field(); - // Field may be converted to other data type (field_name :: data_type), so we need to check the original field - if (fieldExpression instanceof AbstractConvertFunction convertFunction) { - fieldExpression = convertFunction.field(); - } - if (fieldExpression instanceof FieldAttribute == false) { - failures.add( - Failure.fail( - field, - "[{}] {} cannot operate on [{}], which is not a field from an index mapping", - functionName(), - functionType(), - field.sourceText() - ) - ); - } + public BiConsumer postAnalysisPlanVerification() { + return (plan, failures) -> { + super.postAnalysisPlanVerification().accept(plan, failures); + plan.forEachExpression(Match.class, m -> { + if (m.fieldAsFieldAttribute() == null) { + failures.add( + Failure.fail( + m.field(), + "[{}] {} cannot operate on [{}], which is not a field from an index mapping", + functionName(), + functionType(), + m.field().sourceText() + ) + ); + } + }); + }; } @Override @@ -476,22 +479,24 @@ public Object queryAsObject() { @Override protected Query translate(TranslatorHandler handler) { + var fieldAttribute = fieldAsFieldAttribute(); + Check.notNull(fieldAttribute, "Match must have a field attribute as the first argument"); + String fieldName = fieldAttribute.name(); + if (fieldAttribute.field() instanceof MultiTypeEsField multiTypeEsField) { + // If we have multiple field types, we allow the query to be done, but getting the underlying field name + fieldName = multiTypeEsField.getName(); + } + // Make query lenient so mixed field types can be queried when a field type is incompatible with the value provided + return new MatchQuery(source(), fieldName, queryAsObject(), matchQueryOptions()); + } + + private FieldAttribute fieldAsFieldAttribute() { Expression fieldExpression = field; // Field may be converted to other data type (field_name :: data_type), so we need to check the original field if (fieldExpression instanceof AbstractConvertFunction convertFunction) { fieldExpression = convertFunction.field(); } - if (fieldExpression instanceof FieldAttribute fieldAttribute) { - String fieldName = fieldAttribute.name(); - if (fieldAttribute.field() instanceof MultiTypeEsField multiTypeEsField) { - // If we have multiple field types, we allow the query to be done, but getting the underlying field name - fieldName = multiTypeEsField.getName(); - } - // Make query lenient so mixed field types can be queried when a field type is incompatible with the value provided - return new MatchQuery(source(), fieldName, queryAsObject(), matchQueryOptions()); - } - - throw new IllegalArgumentException("Match must have a field attribute as the first argument"); + return fieldExpression instanceof FieldAttribute fieldAttribute ? fieldAttribute : null; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryBuilderResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryBuilderResolver.java new file mode 100644 index 0000000000000..14607de433630 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryBuilderResolver.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ResolvedIndices; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.Rewriteable; +import org.elasticsearch.xpack.esql.core.util.Holder; +import org.elasticsearch.xpack.esql.plan.logical.EsRelation; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; +import org.elasticsearch.xpack.esql.plugin.TransportActionServices; +import org.elasticsearch.xpack.esql.session.IndexResolver; + +import java.io.IOException; +import java.util.HashSet; +import java.util.Set; + +/** + * Some {@link FullTextFunction} implementations such as {@link org.elasticsearch.xpack.esql.expression.function.fulltext.Match} + * will be translated to a {@link QueryBuilder} that require a rewrite phase on the coordinator. + * {@link QueryBuilderResolver#resolveQueryBuilders(LogicalPlan, TransportActionServices, ActionListener)} will rewrite the plan by + * replacing {@link FullTextFunction} expression with new ones that hold rewritten {@link QueryBuilder}s. + */ +public final class QueryBuilderResolver { + + private QueryBuilderResolver() {} + + public static void resolveQueryBuilders(LogicalPlan plan, TransportActionServices services, ActionListener listener) { + var hasFullTextFunctions = plan.anyMatch(p -> { + Holder hasFullTextFunction = new Holder<>(false); + p.forEachExpression(FullTextFunction.class, unused -> hasFullTextFunction.set(true)); + return hasFullTextFunction.get(); + }); + if (hasFullTextFunctions) { + Rewriteable.rewriteAndFetch( + new FullTextFunctionsRewritable(plan), + queryRewriteContext(services, indexNames(plan)), + listener.delegateFailureAndWrap((l, r) -> l.onResponse(r.plan)) + ); + } else { + listener.onResponse(plan); + } + } + + private static QueryRewriteContext queryRewriteContext(TransportActionServices services, Set indexNames) { + ResolvedIndices resolvedIndices = ResolvedIndices.resolveWithIndexNamesAndOptions( + indexNames.toArray(String[]::new), + IndexResolver.FIELD_CAPS_INDICES_OPTIONS, + services.clusterService().state(), + services.indexNameExpressionResolver(), + services.transportService().getRemoteClusterService(), + System.currentTimeMillis() + ); + + return services.searchService().getRewriteContext(System::currentTimeMillis, resolvedIndices, null); + } + + private static Set indexNames(LogicalPlan plan) { + Set indexNames = new HashSet<>(); + plan.forEachDown(EsRelation.class, esRelation -> indexNames.addAll(esRelation.concreteIndices())); + return indexNames; + } + + private record FullTextFunctionsRewritable(LogicalPlan plan) implements Rewriteable { + @Override + public FullTextFunctionsRewritable rewrite(QueryRewriteContext ctx) throws IOException { + Holder exceptionHolder = new Holder<>(); + Holder updated = new Holder<>(false); + LogicalPlan newPlan = plan.transformExpressionsDown(FullTextFunction.class, f -> { + QueryBuilder builder = f.queryBuilder(), initial = builder; + builder = builder == null ? f.asQuery(TranslatorHandler.TRANSLATOR_HANDLER).asBuilder() : builder; + try { + builder = builder.rewrite(ctx); + } catch (IOException e) { + exceptionHolder.setIfAbsent(e); + } + var rewritten = builder != initial; + updated.set(updated.get() || rewritten); + return rewritten ? f.replaceQueryBuilder(builder) : f; + }); + if (exceptionHolder.get() != null) { + throw exceptionHolder.get(); + } + return updated.get() ? new FullTextFunctionsRewritable(newPlan) : this; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java index 4db1c38694757..1da28b3069675 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xpack.esql.capabilities.PostOptimizationVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -26,10 +26,12 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import java.io.IOException; import java.util.List; +import java.util.function.BiConsumer; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -39,7 +41,7 @@ /** * Full text function that performs a {@link TermQuery} . */ -public class Term extends FullTextFunction implements PostOptimizationVerificationAware { +public class Term extends FullTextFunction implements PostAnalysisPlanVerificationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Term", Term::readFrom); @@ -104,18 +106,23 @@ private TypeResolution resolveField() { } @Override - public void postOptimizationVerification(Failures failures) { - if (field instanceof FieldAttribute == false) { - failures.add( - Failure.fail( - field, - "[{}] {} cannot operate on [{}], which is not a field from an index mapping", - functionName(), - functionType(), - field.sourceText() - ) - ); - } + public BiConsumer postAnalysisPlanVerification() { + return (plan, failures) -> { + super.postAnalysisPlanVerification().accept(plan, failures); + plan.forEachExpression(Term.class, t -> { + if (t.field() instanceof FieldAttribute == false) { // TODO: is a conversion possible, similar to Match's case? + failures.add( + Failure.fail( + t.field(), + "[{}] {} cannot operate on [{}], which is not a field from an index mapping", + t.functionName(), + t.functionType(), + t.field().sourceText() + ) + ); + } + }); + }; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java index 90b35e469ddce..ac54e3d6484d5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java @@ -27,6 +27,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.TwoOptionalArguments; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; @@ -175,7 +176,8 @@ Sometimes you need to change the start value of each bucket by a given duration inserting a negative offset of `1 hour` to buckets of `1 year` looks like this:""", file = "bucket", tag = "bucketWithOffset" - ) } + ) }, + type = FunctionType.GROUPING ) public Bucket( Source source, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java index 570ce7a96dd68..6cad20b4e28ba 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; @@ -60,7 +61,8 @@ public class Categorize extends GroupingFunction { tag = "docsCategorize", description = "This example categorizes server logs messages into categories and aggregates their counts. " ) }, - preview = true + preview = true, + type = FunctionType.GROUPING ) public Categorize( Source source, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java index 0d3bacbd47605..90152d546097c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromAggregateMetricDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateDiff; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateExtract; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateFormat; @@ -67,6 +68,7 @@ public static List getNamedWriteables() { entries.add(Concat.ENTRY); entries.add(E.ENTRY); entries.add(EndsWith.ENTRY); + entries.add(FromAggregateMetricDouble.ENTRY); entries.add(Greatest.ENTRY); entries.add(Hash.ENTRY); entries.add(Hypot.ENTRY); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDouble.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDouble.java new file mode 100644 index 0000000000000..f1bde9f57b671 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDouble.java @@ -0,0 +1,171 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.CompositeBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; + +public class FromAggregateMetricDouble extends EsqlScalarFunction { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "FromAggregateMetricDouble", + FromAggregateMetricDouble::new + ); + + private final Expression field; + private final Expression subfieldIndex; + + @FunctionInfo(returnType = { "long", "double" }, description = "Convert aggregate double metric to a block of a single subfield.") + public FromAggregateMetricDouble( + Source source, + @Param( + name = "aggregate_metric_double", + type = { "aggregate_metric_double" }, + description = "Aggregate double metric to convert." + ) Expression field, + @Param(name = "subfieldIndex", type = "int", description = "Index of subfield") Expression subfieldIndex + ) { + super(source, List.of(field, subfieldIndex)); + this.field = field; + this.subfieldIndex = subfieldIndex; + } + + public static FromAggregateMetricDouble withMetric(Source source, Expression field, AggregateMetricDoubleBlockBuilder.Metric metric) { + return new FromAggregateMetricDouble(source, field, new Literal(source, metric.getIndex(), INTEGER)); + } + + private FromAggregateMetricDouble(StreamInput in) throws IOException { + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + source().writeTo(out); + out.writeNamedWriteable(field); + out.writeNamedWriteable(subfieldIndex); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + public DataType dataType() { + if (subfieldIndex.foldable() == false) { + throw new EsqlIllegalArgumentException("Received a non-foldable value for subfield index"); + } + var folded = subfieldIndex.fold(FoldContext.small()); + if (folded == null) { + return NULL; + } + var subfield = ((Number) folded).intValue(); + if (subfield == AggregateMetricDoubleBlockBuilder.Metric.COUNT.getIndex()) { + return INTEGER; + } + return DOUBLE; + } + + @Override + public Expression replaceChildren(List newChildren) { + return new FromAggregateMetricDouble(source(), newChildren.get(0), newChildren.get(1)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, FromAggregateMetricDouble::new, field, subfieldIndex); + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + return isType(field, dt -> dt == DataType.AGGREGATE_METRIC_DOUBLE, sourceText(), DEFAULT, "aggregate_metric_double only"); + } + + @Override + public boolean foldable() { + return Expressions.foldable(children()); + } + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { + var fieldEvaluator = toEvaluator.apply(field); + return new EvalOperator.ExpressionEvaluator.Factory() { + + @Override + public String toString() { + return "FromAggregateMetricDoubleEvaluator[" + "field=" + fieldEvaluator + ",subfieldIndex=" + subfieldIndex + "]"; + } + + @Override + public EvalOperator.ExpressionEvaluator get(DriverContext context) { + final EvalOperator.ExpressionEvaluator eval = fieldEvaluator.get(context); + + return new EvalOperator.ExpressionEvaluator() { + @Override + public Block eval(Page page) { + Block block = eval.eval(page); + if (block.areAllValuesNull()) { + return block; + } + try { + CompositeBlock compositeBlock = (CompositeBlock) block; + Block resultBlock = compositeBlock.getBlock(((Number) subfieldIndex.fold(FoldContext.small())).intValue()); + resultBlock.incRef(); + return resultBlock; + } finally { + block.close(); + } + } + + @Override + public void close() { + Releasables.closeExpectNoException(eval); + } + + @Override + public String toString() { + return "FromAggregateMetricDoubleEvaluator[field=" + eval + ",subfieldIndex=" + subfieldIndex + "]"; + } + }; + + } + }; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 611c7a456864a..a426a14b0a319 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -210,7 +210,9 @@ public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { CoalesceBytesRefEvaluator.toEvaluator(toEvaluator, children()); case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; case UNSUPPORTED, SHORT, BYTE, DATE_PERIOD, OBJECT, DOC_DATA_TYPE, SOURCE, TIME_DURATION, FLOAT, HALF_FLOAT, TSID_DATA_TYPE, - SCALED_FLOAT, PARTIAL_AGG -> throw new UnsupportedOperationException(dataType() + " can't be coalesced"); + SCALED_FLOAT, PARTIAL_AGG, AGGREGATE_METRIC_DOUBLE -> throw new UnsupportedOperationException( + dataType() + " can't be coalesced" + ); }; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 36150083daec0..bc32945d73eb5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -10,7 +10,6 @@ import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.optimizer.rules.logical.AddDefaultTopN; import org.elasticsearch.xpack.esql.optimizer.rules.logical.BooleanFunctionEqualsElimination; import org.elasticsearch.xpack.esql.optimizer.rules.logical.BooleanSimplification; import org.elasticsearch.xpack.esql.optimizer.rules.logical.CombineBinaryComparisons; @@ -32,7 +31,7 @@ import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneEmptyPlans; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneFilters; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneLiteralsInOrderBy; -import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneOrderByBeforeStats; +import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneRedundantOrderBy; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneRedundantSortClauses; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PushDownAndCombineFilters; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PushDownAndCombineLimits; @@ -116,10 +115,9 @@ protected List> batches() { protected static List> rules() { var skip = new Batch<>("Skip Compute", new SkipQueryOnLimitZero()); - var defaultTopN = new Batch<>("Add default TopN", new AddDefaultTopN()); var label = new Batch<>("Set as Optimized", Limiter.ONCE, new SetAsOptimized()); - return asList(substitutions(), operators(), skip, cleanup(), defaultTopN, label); + return asList(substitutions(), operators(), skip, cleanup(), label); } protected static Batch substitutions() { @@ -189,7 +187,7 @@ protected static Batch operators() { new PushDownRegexExtract(), new PushDownEnrich(), new PushDownAndCombineOrderBy(), - new PruneOrderByBeforeStats(), + new PruneRedundantOrderBy(), new PruneRedundantSortClauses() ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java index 94248ce2ecd0a..c474c48d6d96b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java @@ -27,6 +27,9 @@ public Failures verify(LogicalPlan plan) { PlanConsistencyChecker.checkPlan(p, dependencyFailures); if (failures.hasFailures() == false) { + if (p instanceof PostOptimizationVerificationAware pova) { + pova.postOptimizationVerification(failures); + } p.forEachExpression(ex -> { if (ex instanceof PostOptimizationVerificationAware va) { va.postOptimizationVerification(failures); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/AddDefaultTopN.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/AddDefaultTopN.java deleted file mode 100644 index 02815d45d2896..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/AddDefaultTopN.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.optimizer.rules.logical; - -import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; -import org.elasticsearch.xpack.esql.plan.logical.EsRelation; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.plan.logical.OrderBy; -import org.elasticsearch.xpack.esql.plan.logical.TopN; -import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; - -/** - * This adds an explicit TopN node to a plan that only has an OrderBy right before Lucene. - * To date, the only known use case that "needs" this is a query of the form - * from test - * | sort emp_no - * | mv_expand first_name - * | rename first_name AS x - * | where x LIKE "*a*" - * | limit 15 - *

    - * or - *

    - * from test - * | sort emp_no - * | mv_expand first_name - * | sort first_name - * | limit 15 - *

    - * PushDownAndCombineLimits rule will copy the "limit 15" after "sort emp_no" if there is no filter on the expanded values - * OR if there is no sort between "limit" and "mv_expand". - * But, since this type of query has such a filter, the "sort emp_no" will have no limit when it reaches the current rule. - */ -public final class AddDefaultTopN extends OptimizerRules.ParameterizedOptimizerRule { - public AddDefaultTopN() { - super(OptimizerRules.TransformDirection.DOWN); - } - - @Override - protected LogicalPlan rule(LogicalPlan plan, LogicalOptimizerContext context) { - if (plan instanceof UnaryPlan unary && unary.child() instanceof OrderBy order && order.child() instanceof EsRelation relation) { - var limit = new Literal(plan.source(), context.configuration().resultTruncationMaxSize(), DataType.INTEGER); - return unary.replaceChild(new TopN(plan.source(), relation, order.order(), limit)); - } - return plan; - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneOrderByBeforeStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneOrderByBeforeStats.java deleted file mode 100644 index 24fb8971487d5..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneOrderByBeforeStats.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.optimizer.rules.logical; - -import org.elasticsearch.xpack.esql.plan.logical.Aggregate; -import org.elasticsearch.xpack.esql.plan.logical.Enrich; -import org.elasticsearch.xpack.esql.plan.logical.Eval; -import org.elasticsearch.xpack.esql.plan.logical.Filter; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.plan.logical.OrderBy; -import org.elasticsearch.xpack.esql.plan.logical.Project; -import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; -import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; - -public final class PruneOrderByBeforeStats extends OptimizerRules.OptimizerRule { - - @Override - protected LogicalPlan rule(Aggregate agg) { - OrderBy order = findPullableOrderBy(agg.child()); - - LogicalPlan p = agg; - if (order != null) { - p = agg.transformDown(OrderBy.class, o -> o == order ? order.child() : o); - } - return p; - } - - private static OrderBy findPullableOrderBy(LogicalPlan plan) { - OrderBy pullable = null; - if (plan instanceof OrderBy o) { - pullable = o; - } else if (plan instanceof Eval - || plan instanceof Filter - || plan instanceof Project - || plan instanceof RegexExtract - || plan instanceof Enrich) { - pullable = findPullableOrderBy(((UnaryPlan) plan).child()); - } - return pullable; - } - -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneRedundantOrderBy.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneRedundantOrderBy.java new file mode 100644 index 0000000000000..2495f72864d1c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneRedundantOrderBy.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules.logical; + +import org.elasticsearch.xpack.esql.plan.logical.Aggregate; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; +import org.elasticsearch.xpack.esql.plan.logical.SortAgnostic; +import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; + +import java.util.ArrayDeque; +import java.util.Collections; +import java.util.Deque; +import java.util.IdentityHashMap; +import java.util.Set; + +/** + * SORT cannot be executed without a LIMIT, as ES|QL doesn't support unbounded sort (yet). + *

    + * The planner tries to push down LIMIT and transform all the unbounded sorts into a TopN. + * In some cases it's not possible though, eg. + *

    + * from test | sort x | lookup join lookup on x | sort y + *

    + * from test | sort x | mv_expand x | sort y + *

    + * "sort y" will become a TopN due to the addition of the default Limit, but "sort x" will remain unbounded, + * so the query could not be executed. + *

    + * In most cases though, following commands can make the previous SORTs redundant, + * because it will re-sort previously sorted results (eg. if there is another SORT) + * or because the order will be scrambled by another command (eg. a STATS) + *

    + * This rule finds and prunes redundant SORTs, attempting to make the plan executable. + */ +public class PruneRedundantOrderBy extends OptimizerRules.OptimizerRule { + + @Override + protected LogicalPlan rule(LogicalPlan plan) { + if (plan instanceof OrderBy || plan instanceof TopN || plan instanceof Aggregate) { + Set redundant = findRedundantSort(((UnaryPlan) plan).child()); + if (redundant.isEmpty()) { + return plan; + } + return plan.transformDown(p -> redundant.contains(p) ? ((UnaryPlan) p).child() : p); + } else { + return plan; + } + } + + /** + * breadth-first recursion to find redundant SORTs in the children tree. + * Returns an identity set (we need to compare and prune the exact instances) + */ + private Set findRedundantSort(LogicalPlan plan) { + Set result = Collections.newSetFromMap(new IdentityHashMap<>()); + + Deque toCheck = new ArrayDeque<>(); + toCheck.push(plan); + + while (true) { + if (toCheck.isEmpty()) { + return result; + } + LogicalPlan p = toCheck.pop(); + if (p instanceof OrderBy ob) { + result.add(ob); + toCheck.push(ob.child()); + } else if (p instanceof SortAgnostic) { + for (LogicalPlan child : p.children()) { + toCheck.push(child); + } + } + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java index 969a6bb713eca..dca4dfbd533df 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; -import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; @@ -21,6 +20,9 @@ import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; +import java.util.ArrayList; +import java.util.List; + public final class PushDownAndCombineLimits extends OptimizerRules.ParameterizedOptimizerRule { public PushDownAndCombineLimits() { @@ -31,27 +33,18 @@ public PushDownAndCombineLimits() { public LogicalPlan rule(Limit limit, LogicalOptimizerContext ctx) { if (limit.child() instanceof Limit childLimit) { var limitSource = limit.limit(); - var l1 = (int) limitSource.fold(ctx.foldCtx()); - var l2 = (int) childLimit.limit().fold(ctx.foldCtx()); - return new Limit(limit.source(), Literal.of(limitSource, Math.min(l1, l2)), childLimit.child()); + var parentLimitValue = (int) limitSource.fold(ctx.foldCtx()); + var childLimitValue = (int) childLimit.limit().fold(ctx.foldCtx()); + // We want to preserve the duplicated() value of the smaller limit, so we'll use replaceChild. + return parentLimitValue < childLimitValue ? limit.replaceChild(childLimit.child()) : childLimit; } else if (limit.child() instanceof UnaryPlan unary) { if (unary instanceof Eval || unary instanceof Project || unary instanceof RegexExtract || unary instanceof Enrich) { return unary.replaceChild(limit.replaceChild(unary.child())); - } else if (unary instanceof MvExpand mvx) { + } else if (unary instanceof MvExpand) { // MV_EXPAND can increase the number of rows, so we cannot just push the limit down // (we also have to preserve the LIMIT afterwards) - // - // To avoid infinite loops, ie. - // | MV_EXPAND | LIMIT -> | LIMIT | MV_EXPAND | LIMIT -> ... | MV_EXPAND | LIMIT - // we add an inner limit to MvExpand and just push down the existing limit, ie. - // | MV_EXPAND | LIMIT N -> | LIMIT N | MV_EXPAND (with limit N) - var limitSource = limit.limit(); - var limitVal = (int) limitSource.fold(ctx.foldCtx()); - Integer mvxLimit = mvx.limit(); - if (mvxLimit == null || mvxLimit > limitVal) { - mvx = new MvExpand(mvx.source(), mvx.child(), mvx.target(), mvx.expanded(), limitVal); - } - return mvx.replaceChild(limit.replaceChild(mvx.child())); + // To avoid repeating this infinitely, we have to set duplicated = true. + return duplicateLimitAsFirstGrandchild(limit); } // check if there's a 'visible' descendant limit lower than the current one // and if so, align the current limit since it adds no value @@ -62,17 +55,15 @@ public LogicalPlan rule(Limit limit, LogicalOptimizerContext ctx) { var l1 = (int) limit.limit().fold(ctx.foldCtx()); var l2 = (int) descendantLimit.limit().fold(ctx.foldCtx()); if (l2 <= l1) { - return new Limit(limit.source(), Literal.of(limit.limit(), l2), limit.child()); + return limit.withLimit(descendantLimit.limit()); } } } - } else if (limit.child() instanceof Join join) { - if (join.config().type() == JoinTypes.LEFT) { - // NOTE! This is only correct because our LEFT JOINs preserve the number of rows from the left hand side. - // This deviates from SQL semantics. In SQL, multiple matches on the right hand side lead to multiple rows in the output. - // For us, multiple matches on the right hand side are collected into multi-values. - return join.replaceChildren(limit.replaceChild(join.left()), join.right()); - } + } else if (limit.child() instanceof Join join && join.config().type() == JoinTypes.LEFT) { + // Left joins increase the number of rows if any join key has multiple matches from the right hand side. + // Therefore, we cannot simply push down the limit - but we can add another limit before the join. + // To avoid repeating this infinitely, we have to set duplicated = true. + return duplicateLimitAsFirstGrandchild(limit); } return limit; } @@ -100,4 +91,27 @@ private static Limit descendantLimit(UnaryPlan unary) { } return null; } + + /** + * Duplicate the limit past its child if it wasn't duplicated yet. The duplicate is placed on top of its leftmost grandchild. + * Idempotent. (Sets {@link Limit#duplicated()} to {@code true} on the limit that remains at the top.) + */ + private static Limit duplicateLimitAsFirstGrandchild(Limit limit) { + if (limit.duplicated()) { + return limit; + } + + List grandChildren = limit.child().children(); + LogicalPlan firstGrandChild = grandChildren.getFirst(); + LogicalPlan newFirstGrandChild = limit.replaceChild(firstGrandChild); + + List newGrandChildren = new ArrayList<>(); + newGrandChildren.add(newFirstGrandChild); + for (int i = 1; i < grandChildren.size(); i++) { + newGrandChildren.add(grandChildren.get(i)); + } + + LogicalPlan newChild = limit.child().replaceChildren(newGrandChildren); + return limit.replaceChild(newChild).withDuplicated(true); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AstBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AstBuilder.java index 3b39e6a9d1fdb..ec23783fe1a2c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AstBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AstBuilder.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.esql.parser; public class AstBuilder extends LogicalPlanBuilder { - public AstBuilder(QueryParams params) { - super(params); + public AstBuilder(ParsingContext context) { + super(context); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 1d050bd91e66c..9afa5dcbb095e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -16,7 +16,7 @@ null 'sort' 'stats' 'where' -null +'lookup' null null null @@ -120,6 +120,7 @@ null null null null +'join' 'USING' null null @@ -130,6 +131,9 @@ null null null null +null +null +null token symbolic names: null @@ -149,14 +153,14 @@ SHOW SORT STATS WHERE +JOIN_LOOKUP +DEV_CHANGE_POINT DEV_INLINESTATS DEV_LOOKUP DEV_METRICS -DEV_JOIN DEV_JOIN_FULL DEV_JOIN_LEFT DEV_JOIN_RIGHT -DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT @@ -253,6 +257,7 @@ LOOKUP_WS LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS +JOIN USING JOIN_LINE_COMMENT JOIN_MULTILINE_COMMENT @@ -263,6 +268,9 @@ METRICS_WS CLOSING_METRICS_LINE_COMMENT CLOSING_METRICS_MULTILINE_COMMENT CLOSING_METRICS_WS +CHANGE_POINT_LINE_COMMENT +CHANGE_POINT_MULTILINE_COMMENT +CHANGE_POINT_WS rule names: DISSECT @@ -281,14 +289,14 @@ SHOW SORT STATS WHERE +JOIN_LOOKUP +DEV_CHANGE_POINT DEV_INLINESTATS DEV_LOOKUP DEV_METRICS -DEV_JOIN DEV_JOIN_FULL DEV_JOIN_LEFT DEV_JOIN_RIGHT -DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT @@ -456,7 +464,7 @@ LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS JOIN_PIPE -JOIN_JOIN +JOIN JOIN_AS JOIN_ON USING @@ -483,6 +491,16 @@ CLOSING_METRICS_QUOTED_IDENTIFIER CLOSING_METRICS_UNQUOTED_IDENTIFIER CLOSING_METRICS_BY CLOSING_METRICS_PIPE +CHANGE_POINT_PIPE +CHANGE_POINT_ON +CHANGE_POINT_AS +CHANGE_POINT_DOT +CHANGE_POINT_COMMA +CHANGE_POINT_QUOTED_IDENTIFIER +CHANGE_POINT_UNQUOTED_IDENTIFIER +CHANGE_POINT_LINE_COMMENT +CHANGE_POINT_MULTILINE_COMMENT +CHANGE_POINT_WS channel names: DEFAULT_TOKEN_CHANNEL @@ -505,6 +523,7 @@ LOOKUP_FIELD_MODE JOIN_MODE METRICS_MODE CLOSING_METRICS_MODE +CHANGE_POINT_MODE atn: -[4, 0, 130, 1627, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 2, 213, 7, 213, 2, 214, 7, 214, 2, 215, 7, 215, 2, 216, 7, 216, 2, 217, 7, 217, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 664, 8, 24, 11, 24, 12, 24, 665, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 674, 8, 25, 10, 25, 12, 25, 677, 9, 25, 1, 25, 3, 25, 680, 8, 25, 1, 25, 3, 25, 683, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 692, 8, 26, 10, 26, 12, 26, 695, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 703, 8, 27, 11, 27, 12, 27, 704, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 724, 8, 33, 1, 33, 4, 33, 727, 8, 33, 11, 33, 12, 33, 728, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 738, 8, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 745, 8, 38, 1, 39, 1, 39, 1, 39, 5, 39, 750, 8, 39, 10, 39, 12, 39, 753, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 761, 8, 39, 10, 39, 12, 39, 764, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 771, 8, 39, 1, 39, 3, 39, 774, 8, 39, 3, 39, 776, 8, 39, 1, 40, 4, 40, 779, 8, 40, 11, 40, 12, 40, 780, 1, 41, 4, 41, 784, 8, 41, 11, 41, 12, 41, 785, 1, 41, 1, 41, 5, 41, 790, 8, 41, 10, 41, 12, 41, 793, 9, 41, 1, 41, 1, 41, 4, 41, 797, 8, 41, 11, 41, 12, 41, 798, 1, 41, 4, 41, 802, 8, 41, 11, 41, 12, 41, 803, 1, 41, 1, 41, 5, 41, 808, 8, 41, 10, 41, 12, 41, 811, 9, 41, 3, 41, 813, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 819, 8, 41, 11, 41, 12, 41, 820, 1, 41, 1, 41, 3, 41, 825, 8, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 3, 81, 957, 8, 81, 1, 81, 5, 81, 960, 8, 81, 10, 81, 12, 81, 963, 9, 81, 1, 81, 1, 81, 4, 81, 967, 8, 81, 11, 81, 12, 81, 968, 3, 81, 971, 8, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 5, 84, 985, 8, 84, 10, 84, 12, 84, 988, 9, 84, 1, 84, 1, 84, 3, 84, 992, 8, 84, 1, 84, 4, 84, 995, 8, 84, 11, 84, 12, 84, 996, 3, 84, 999, 8, 84, 1, 85, 1, 85, 4, 85, 1003, 8, 85, 11, 85, 12, 85, 1004, 1, 85, 1, 85, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 3, 102, 1082, 8, 102, 1, 103, 4, 103, 1085, 8, 103, 11, 103, 12, 103, 1086, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 3, 114, 1136, 8, 114, 1, 115, 1, 115, 3, 115, 1140, 8, 115, 1, 115, 5, 115, 1143, 8, 115, 10, 115, 12, 115, 1146, 9, 115, 1, 115, 1, 115, 3, 115, 1150, 8, 115, 1, 115, 4, 115, 1153, 8, 115, 11, 115, 12, 115, 1154, 3, 115, 1157, 8, 115, 1, 116, 1, 116, 4, 116, 1161, 8, 116, 11, 116, 12, 116, 1162, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 136, 4, 136, 1248, 8, 136, 11, 136, 12, 136, 1249, 1, 136, 1, 136, 3, 136, 1254, 8, 136, 1, 136, 4, 136, 1257, 8, 136, 11, 136, 12, 136, 1258, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 4, 169, 1404, 8, 169, 11, 169, 12, 169, 1405, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 213, 1, 213, 1, 213, 1, 213, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 217, 1, 217, 1, 217, 1, 217, 1, 217, 2, 693, 762, 0, 218, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 68, 172, 69, 174, 70, 176, 0, 178, 71, 180, 72, 182, 73, 184, 74, 186, 0, 188, 75, 190, 76, 192, 77, 194, 78, 196, 0, 198, 0, 200, 79, 202, 80, 204, 81, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 0, 218, 82, 220, 0, 222, 83, 224, 0, 226, 0, 228, 84, 230, 85, 232, 86, 234, 0, 236, 0, 238, 0, 240, 0, 242, 0, 244, 0, 246, 0, 248, 87, 250, 88, 252, 89, 254, 90, 256, 0, 258, 0, 260, 0, 262, 0, 264, 0, 266, 0, 268, 91, 270, 0, 272, 92, 274, 93, 276, 94, 278, 0, 280, 0, 282, 95, 284, 96, 286, 0, 288, 97, 290, 0, 292, 98, 294, 99, 296, 100, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 0, 312, 0, 314, 0, 316, 101, 318, 102, 320, 103, 322, 0, 324, 0, 326, 0, 328, 0, 330, 0, 332, 0, 334, 104, 336, 105, 338, 106, 340, 0, 342, 107, 344, 108, 346, 109, 348, 110, 350, 0, 352, 0, 354, 111, 356, 112, 358, 113, 360, 114, 362, 0, 364, 0, 366, 0, 368, 0, 370, 0, 372, 0, 374, 0, 376, 115, 378, 116, 380, 117, 382, 0, 384, 0, 386, 0, 388, 0, 390, 118, 392, 119, 394, 120, 396, 0, 398, 0, 400, 0, 402, 0, 404, 121, 406, 0, 408, 0, 410, 0, 412, 0, 414, 0, 416, 122, 418, 123, 420, 124, 422, 0, 424, 0, 426, 0, 428, 125, 430, 126, 432, 127, 434, 0, 436, 0, 438, 128, 440, 129, 442, 130, 444, 0, 446, 0, 448, 0, 450, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 2, 0, 74, 74, 106, 106, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1654, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 1, 94, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 174, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 182, 1, 0, 0, 0, 1, 184, 1, 0, 0, 0, 1, 188, 1, 0, 0, 0, 1, 190, 1, 0, 0, 0, 1, 192, 1, 0, 0, 0, 1, 194, 1, 0, 0, 0, 2, 196, 1, 0, 0, 0, 2, 198, 1, 0, 0, 0, 2, 200, 1, 0, 0, 0, 2, 202, 1, 0, 0, 0, 2, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 216, 1, 0, 0, 0, 3, 218, 1, 0, 0, 0, 3, 222, 1, 0, 0, 0, 3, 224, 1, 0, 0, 0, 3, 226, 1, 0, 0, 0, 3, 228, 1, 0, 0, 0, 3, 230, 1, 0, 0, 0, 3, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 4, 240, 1, 0, 0, 0, 4, 242, 1, 0, 0, 0, 4, 248, 1, 0, 0, 0, 4, 250, 1, 0, 0, 0, 4, 252, 1, 0, 0, 0, 4, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 5, 258, 1, 0, 0, 0, 5, 260, 1, 0, 0, 0, 5, 262, 1, 0, 0, 0, 5, 264, 1, 0, 0, 0, 5, 266, 1, 0, 0, 0, 5, 268, 1, 0, 0, 0, 5, 270, 1, 0, 0, 0, 5, 272, 1, 0, 0, 0, 5, 274, 1, 0, 0, 0, 5, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 282, 1, 0, 0, 0, 6, 284, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 6, 294, 1, 0, 0, 0, 6, 296, 1, 0, 0, 0, 7, 298, 1, 0, 0, 0, 7, 300, 1, 0, 0, 0, 7, 302, 1, 0, 0, 0, 7, 304, 1, 0, 0, 0, 7, 306, 1, 0, 0, 0, 7, 308, 1, 0, 0, 0, 7, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 7, 318, 1, 0, 0, 0, 7, 320, 1, 0, 0, 0, 8, 322, 1, 0, 0, 0, 8, 324, 1, 0, 0, 0, 8, 326, 1, 0, 0, 0, 8, 328, 1, 0, 0, 0, 8, 330, 1, 0, 0, 0, 8, 332, 1, 0, 0, 0, 8, 334, 1, 0, 0, 0, 8, 336, 1, 0, 0, 0, 8, 338, 1, 0, 0, 0, 9, 340, 1, 0, 0, 0, 9, 342, 1, 0, 0, 0, 9, 344, 1, 0, 0, 0, 9, 346, 1, 0, 0, 0, 9, 348, 1, 0, 0, 0, 10, 350, 1, 0, 0, 0, 10, 352, 1, 0, 0, 0, 10, 354, 1, 0, 0, 0, 10, 356, 1, 0, 0, 0, 10, 358, 1, 0, 0, 0, 10, 360, 1, 0, 0, 0, 11, 362, 1, 0, 0, 0, 11, 364, 1, 0, 0, 0, 11, 366, 1, 0, 0, 0, 11, 368, 1, 0, 0, 0, 11, 370, 1, 0, 0, 0, 11, 372, 1, 0, 0, 0, 11, 374, 1, 0, 0, 0, 11, 376, 1, 0, 0, 0, 11, 378, 1, 0, 0, 0, 11, 380, 1, 0, 0, 0, 12, 382, 1, 0, 0, 0, 12, 384, 1, 0, 0, 0, 12, 386, 1, 0, 0, 0, 12, 388, 1, 0, 0, 0, 12, 390, 1, 0, 0, 0, 12, 392, 1, 0, 0, 0, 12, 394, 1, 0, 0, 0, 13, 396, 1, 0, 0, 0, 13, 398, 1, 0, 0, 0, 13, 400, 1, 0, 0, 0, 13, 402, 1, 0, 0, 0, 13, 404, 1, 0, 0, 0, 13, 406, 1, 0, 0, 0, 13, 408, 1, 0, 0, 0, 13, 410, 1, 0, 0, 0, 13, 412, 1, 0, 0, 0, 13, 414, 1, 0, 0, 0, 13, 416, 1, 0, 0, 0, 13, 418, 1, 0, 0, 0, 13, 420, 1, 0, 0, 0, 14, 422, 1, 0, 0, 0, 14, 424, 1, 0, 0, 0, 14, 426, 1, 0, 0, 0, 14, 428, 1, 0, 0, 0, 14, 430, 1, 0, 0, 0, 14, 432, 1, 0, 0, 0, 15, 434, 1, 0, 0, 0, 15, 436, 1, 0, 0, 0, 15, 438, 1, 0, 0, 0, 15, 440, 1, 0, 0, 0, 15, 442, 1, 0, 0, 0, 15, 444, 1, 0, 0, 0, 15, 446, 1, 0, 0, 0, 15, 448, 1, 0, 0, 0, 15, 450, 1, 0, 0, 0, 16, 452, 1, 0, 0, 0, 18, 462, 1, 0, 0, 0, 20, 469, 1, 0, 0, 0, 22, 478, 1, 0, 0, 0, 24, 485, 1, 0, 0, 0, 26, 495, 1, 0, 0, 0, 28, 502, 1, 0, 0, 0, 30, 509, 1, 0, 0, 0, 32, 516, 1, 0, 0, 0, 34, 524, 1, 0, 0, 0, 36, 536, 1, 0, 0, 0, 38, 545, 1, 0, 0, 0, 40, 551, 1, 0, 0, 0, 42, 558, 1, 0, 0, 0, 44, 565, 1, 0, 0, 0, 46, 573, 1, 0, 0, 0, 48, 581, 1, 0, 0, 0, 50, 596, 1, 0, 0, 0, 52, 608, 1, 0, 0, 0, 54, 619, 1, 0, 0, 0, 56, 627, 1, 0, 0, 0, 58, 635, 1, 0, 0, 0, 60, 643, 1, 0, 0, 0, 62, 652, 1, 0, 0, 0, 64, 663, 1, 0, 0, 0, 66, 669, 1, 0, 0, 0, 68, 686, 1, 0, 0, 0, 70, 702, 1, 0, 0, 0, 72, 708, 1, 0, 0, 0, 74, 712, 1, 0, 0, 0, 76, 714, 1, 0, 0, 0, 78, 716, 1, 0, 0, 0, 80, 719, 1, 0, 0, 0, 82, 721, 1, 0, 0, 0, 84, 730, 1, 0, 0, 0, 86, 732, 1, 0, 0, 0, 88, 737, 1, 0, 0, 0, 90, 739, 1, 0, 0, 0, 92, 744, 1, 0, 0, 0, 94, 775, 1, 0, 0, 0, 96, 778, 1, 0, 0, 0, 98, 824, 1, 0, 0, 0, 100, 826, 1, 0, 0, 0, 102, 829, 1, 0, 0, 0, 104, 833, 1, 0, 0, 0, 106, 837, 1, 0, 0, 0, 108, 839, 1, 0, 0, 0, 110, 842, 1, 0, 0, 0, 112, 844, 1, 0, 0, 0, 114, 846, 1, 0, 0, 0, 116, 851, 1, 0, 0, 0, 118, 853, 1, 0, 0, 0, 120, 859, 1, 0, 0, 0, 122, 865, 1, 0, 0, 0, 124, 868, 1, 0, 0, 0, 126, 871, 1, 0, 0, 0, 128, 876, 1, 0, 0, 0, 130, 881, 1, 0, 0, 0, 132, 883, 1, 0, 0, 0, 134, 887, 1, 0, 0, 0, 136, 892, 1, 0, 0, 0, 138, 898, 1, 0, 0, 0, 140, 901, 1, 0, 0, 0, 142, 903, 1, 0, 0, 0, 144, 909, 1, 0, 0, 0, 146, 911, 1, 0, 0, 0, 148, 916, 1, 0, 0, 0, 150, 919, 1, 0, 0, 0, 152, 922, 1, 0, 0, 0, 154, 925, 1, 0, 0, 0, 156, 927, 1, 0, 0, 0, 158, 930, 1, 0, 0, 0, 160, 932, 1, 0, 0, 0, 162, 935, 1, 0, 0, 0, 164, 937, 1, 0, 0, 0, 166, 939, 1, 0, 0, 0, 168, 941, 1, 0, 0, 0, 170, 943, 1, 0, 0, 0, 172, 945, 1, 0, 0, 0, 174, 947, 1, 0, 0, 0, 176, 949, 1, 0, 0, 0, 178, 970, 1, 0, 0, 0, 180, 972, 1, 0, 0, 0, 182, 977, 1, 0, 0, 0, 184, 998, 1, 0, 0, 0, 186, 1000, 1, 0, 0, 0, 188, 1008, 1, 0, 0, 0, 190, 1010, 1, 0, 0, 0, 192, 1014, 1, 0, 0, 0, 194, 1018, 1, 0, 0, 0, 196, 1022, 1, 0, 0, 0, 198, 1027, 1, 0, 0, 0, 200, 1032, 1, 0, 0, 0, 202, 1036, 1, 0, 0, 0, 204, 1040, 1, 0, 0, 0, 206, 1044, 1, 0, 0, 0, 208, 1049, 1, 0, 0, 0, 210, 1053, 1, 0, 0, 0, 212, 1057, 1, 0, 0, 0, 214, 1061, 1, 0, 0, 0, 216, 1065, 1, 0, 0, 0, 218, 1069, 1, 0, 0, 0, 220, 1081, 1, 0, 0, 0, 222, 1084, 1, 0, 0, 0, 224, 1088, 1, 0, 0, 0, 226, 1092, 1, 0, 0, 0, 228, 1096, 1, 0, 0, 0, 230, 1100, 1, 0, 0, 0, 232, 1104, 1, 0, 0, 0, 234, 1108, 1, 0, 0, 0, 236, 1113, 1, 0, 0, 0, 238, 1117, 1, 0, 0, 0, 240, 1121, 1, 0, 0, 0, 242, 1126, 1, 0, 0, 0, 244, 1135, 1, 0, 0, 0, 246, 1156, 1, 0, 0, 0, 248, 1160, 1, 0, 0, 0, 250, 1164, 1, 0, 0, 0, 252, 1168, 1, 0, 0, 0, 254, 1172, 1, 0, 0, 0, 256, 1176, 1, 0, 0, 0, 258, 1181, 1, 0, 0, 0, 260, 1185, 1, 0, 0, 0, 262, 1189, 1, 0, 0, 0, 264, 1193, 1, 0, 0, 0, 266, 1198, 1, 0, 0, 0, 268, 1203, 1, 0, 0, 0, 270, 1206, 1, 0, 0, 0, 272, 1210, 1, 0, 0, 0, 274, 1214, 1, 0, 0, 0, 276, 1218, 1, 0, 0, 0, 278, 1222, 1, 0, 0, 0, 280, 1227, 1, 0, 0, 0, 282, 1232, 1, 0, 0, 0, 284, 1237, 1, 0, 0, 0, 286, 1244, 1, 0, 0, 0, 288, 1253, 1, 0, 0, 0, 290, 1260, 1, 0, 0, 0, 292, 1264, 1, 0, 0, 0, 294, 1268, 1, 0, 0, 0, 296, 1272, 1, 0, 0, 0, 298, 1276, 1, 0, 0, 0, 300, 1282, 1, 0, 0, 0, 302, 1286, 1, 0, 0, 0, 304, 1290, 1, 0, 0, 0, 306, 1294, 1, 0, 0, 0, 308, 1298, 1, 0, 0, 0, 310, 1302, 1, 0, 0, 0, 312, 1306, 1, 0, 0, 0, 314, 1311, 1, 0, 0, 0, 316, 1316, 1, 0, 0, 0, 318, 1320, 1, 0, 0, 0, 320, 1324, 1, 0, 0, 0, 322, 1328, 1, 0, 0, 0, 324, 1333, 1, 0, 0, 0, 326, 1337, 1, 0, 0, 0, 328, 1342, 1, 0, 0, 0, 330, 1347, 1, 0, 0, 0, 332, 1351, 1, 0, 0, 0, 334, 1355, 1, 0, 0, 0, 336, 1359, 1, 0, 0, 0, 338, 1363, 1, 0, 0, 0, 340, 1367, 1, 0, 0, 0, 342, 1372, 1, 0, 0, 0, 344, 1377, 1, 0, 0, 0, 346, 1381, 1, 0, 0, 0, 348, 1385, 1, 0, 0, 0, 350, 1389, 1, 0, 0, 0, 352, 1394, 1, 0, 0, 0, 354, 1403, 1, 0, 0, 0, 356, 1407, 1, 0, 0, 0, 358, 1411, 1, 0, 0, 0, 360, 1415, 1, 0, 0, 0, 362, 1419, 1, 0, 0, 0, 364, 1424, 1, 0, 0, 0, 366, 1428, 1, 0, 0, 0, 368, 1432, 1, 0, 0, 0, 370, 1436, 1, 0, 0, 0, 372, 1441, 1, 0, 0, 0, 374, 1445, 1, 0, 0, 0, 376, 1449, 1, 0, 0, 0, 378, 1453, 1, 0, 0, 0, 380, 1457, 1, 0, 0, 0, 382, 1461, 1, 0, 0, 0, 384, 1467, 1, 0, 0, 0, 386, 1471, 1, 0, 0, 0, 388, 1475, 1, 0, 0, 0, 390, 1479, 1, 0, 0, 0, 392, 1483, 1, 0, 0, 0, 394, 1487, 1, 0, 0, 0, 396, 1491, 1, 0, 0, 0, 398, 1496, 1, 0, 0, 0, 400, 1500, 1, 0, 0, 0, 402, 1504, 1, 0, 0, 0, 404, 1510, 1, 0, 0, 0, 406, 1519, 1, 0, 0, 0, 408, 1523, 1, 0, 0, 0, 410, 1527, 1, 0, 0, 0, 412, 1531, 1, 0, 0, 0, 414, 1535, 1, 0, 0, 0, 416, 1539, 1, 0, 0, 0, 418, 1543, 1, 0, 0, 0, 420, 1547, 1, 0, 0, 0, 422, 1551, 1, 0, 0, 0, 424, 1556, 1, 0, 0, 0, 426, 1562, 1, 0, 0, 0, 428, 1568, 1, 0, 0, 0, 430, 1572, 1, 0, 0, 0, 432, 1576, 1, 0, 0, 0, 434, 1580, 1, 0, 0, 0, 436, 1586, 1, 0, 0, 0, 438, 1592, 1, 0, 0, 0, 440, 1596, 1, 0, 0, 0, 442, 1600, 1, 0, 0, 0, 444, 1604, 1, 0, 0, 0, 446, 1610, 1, 0, 0, 0, 448, 1616, 1, 0, 0, 0, 450, 1622, 1, 0, 0, 0, 452, 453, 7, 0, 0, 0, 453, 454, 7, 1, 0, 0, 454, 455, 7, 2, 0, 0, 455, 456, 7, 2, 0, 0, 456, 457, 7, 3, 0, 0, 457, 458, 7, 4, 0, 0, 458, 459, 7, 5, 0, 0, 459, 460, 1, 0, 0, 0, 460, 461, 6, 0, 0, 0, 461, 17, 1, 0, 0, 0, 462, 463, 7, 0, 0, 0, 463, 464, 7, 6, 0, 0, 464, 465, 7, 7, 0, 0, 465, 466, 7, 8, 0, 0, 466, 467, 1, 0, 0, 0, 467, 468, 6, 1, 1, 0, 468, 19, 1, 0, 0, 0, 469, 470, 7, 3, 0, 0, 470, 471, 7, 9, 0, 0, 471, 472, 7, 6, 0, 0, 472, 473, 7, 1, 0, 0, 473, 474, 7, 4, 0, 0, 474, 475, 7, 10, 0, 0, 475, 476, 1, 0, 0, 0, 476, 477, 6, 2, 2, 0, 477, 21, 1, 0, 0, 0, 478, 479, 7, 3, 0, 0, 479, 480, 7, 11, 0, 0, 480, 481, 7, 12, 0, 0, 481, 482, 7, 13, 0, 0, 482, 483, 1, 0, 0, 0, 483, 484, 6, 3, 0, 0, 484, 23, 1, 0, 0, 0, 485, 486, 7, 3, 0, 0, 486, 487, 7, 14, 0, 0, 487, 488, 7, 8, 0, 0, 488, 489, 7, 13, 0, 0, 489, 490, 7, 12, 0, 0, 490, 491, 7, 1, 0, 0, 491, 492, 7, 9, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 6, 4, 3, 0, 494, 25, 1, 0, 0, 0, 495, 496, 7, 15, 0, 0, 496, 497, 7, 6, 0, 0, 497, 498, 7, 7, 0, 0, 498, 499, 7, 16, 0, 0, 499, 500, 1, 0, 0, 0, 500, 501, 6, 5, 4, 0, 501, 27, 1, 0, 0, 0, 502, 503, 7, 17, 0, 0, 503, 504, 7, 6, 0, 0, 504, 505, 7, 7, 0, 0, 505, 506, 7, 18, 0, 0, 506, 507, 1, 0, 0, 0, 507, 508, 6, 6, 0, 0, 508, 29, 1, 0, 0, 0, 509, 510, 7, 18, 0, 0, 510, 511, 7, 3, 0, 0, 511, 512, 7, 3, 0, 0, 512, 513, 7, 8, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 6, 7, 1, 0, 515, 31, 1, 0, 0, 0, 516, 517, 7, 13, 0, 0, 517, 518, 7, 1, 0, 0, 518, 519, 7, 16, 0, 0, 519, 520, 7, 1, 0, 0, 520, 521, 7, 5, 0, 0, 521, 522, 1, 0, 0, 0, 522, 523, 6, 8, 0, 0, 523, 33, 1, 0, 0, 0, 524, 525, 7, 16, 0, 0, 525, 526, 7, 11, 0, 0, 526, 527, 5, 95, 0, 0, 527, 528, 7, 3, 0, 0, 528, 529, 7, 14, 0, 0, 529, 530, 7, 8, 0, 0, 530, 531, 7, 12, 0, 0, 531, 532, 7, 9, 0, 0, 532, 533, 7, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 535, 6, 9, 5, 0, 535, 35, 1, 0, 0, 0, 536, 537, 7, 6, 0, 0, 537, 538, 7, 3, 0, 0, 538, 539, 7, 9, 0, 0, 539, 540, 7, 12, 0, 0, 540, 541, 7, 16, 0, 0, 541, 542, 7, 3, 0, 0, 542, 543, 1, 0, 0, 0, 543, 544, 6, 10, 6, 0, 544, 37, 1, 0, 0, 0, 545, 546, 7, 6, 0, 0, 546, 547, 7, 7, 0, 0, 547, 548, 7, 19, 0, 0, 548, 549, 1, 0, 0, 0, 549, 550, 6, 11, 0, 0, 550, 39, 1, 0, 0, 0, 551, 552, 7, 2, 0, 0, 552, 553, 7, 10, 0, 0, 553, 554, 7, 7, 0, 0, 554, 555, 7, 19, 0, 0, 555, 556, 1, 0, 0, 0, 556, 557, 6, 12, 7, 0, 557, 41, 1, 0, 0, 0, 558, 559, 7, 2, 0, 0, 559, 560, 7, 7, 0, 0, 560, 561, 7, 6, 0, 0, 561, 562, 7, 5, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 13, 0, 0, 564, 43, 1, 0, 0, 0, 565, 566, 7, 2, 0, 0, 566, 567, 7, 5, 0, 0, 567, 568, 7, 12, 0, 0, 568, 569, 7, 5, 0, 0, 569, 570, 7, 2, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 6, 14, 0, 0, 572, 45, 1, 0, 0, 0, 573, 574, 7, 19, 0, 0, 574, 575, 7, 10, 0, 0, 575, 576, 7, 3, 0, 0, 576, 577, 7, 6, 0, 0, 577, 578, 7, 3, 0, 0, 578, 579, 1, 0, 0, 0, 579, 580, 6, 15, 0, 0, 580, 47, 1, 0, 0, 0, 581, 582, 4, 16, 0, 0, 582, 583, 7, 1, 0, 0, 583, 584, 7, 9, 0, 0, 584, 585, 7, 13, 0, 0, 585, 586, 7, 1, 0, 0, 586, 587, 7, 9, 0, 0, 587, 588, 7, 3, 0, 0, 588, 589, 7, 2, 0, 0, 589, 590, 7, 5, 0, 0, 590, 591, 7, 12, 0, 0, 591, 592, 7, 5, 0, 0, 592, 593, 7, 2, 0, 0, 593, 594, 1, 0, 0, 0, 594, 595, 6, 16, 0, 0, 595, 49, 1, 0, 0, 0, 596, 597, 4, 17, 1, 0, 597, 598, 7, 13, 0, 0, 598, 599, 7, 7, 0, 0, 599, 600, 7, 7, 0, 0, 600, 601, 7, 18, 0, 0, 601, 602, 7, 20, 0, 0, 602, 603, 7, 8, 0, 0, 603, 604, 5, 95, 0, 0, 604, 605, 5, 128020, 0, 0, 605, 606, 1, 0, 0, 0, 606, 607, 6, 17, 8, 0, 607, 51, 1, 0, 0, 0, 608, 609, 4, 18, 2, 0, 609, 610, 7, 16, 0, 0, 610, 611, 7, 3, 0, 0, 611, 612, 7, 5, 0, 0, 612, 613, 7, 6, 0, 0, 613, 614, 7, 1, 0, 0, 614, 615, 7, 4, 0, 0, 615, 616, 7, 2, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 6, 18, 9, 0, 618, 53, 1, 0, 0, 0, 619, 620, 4, 19, 3, 0, 620, 621, 7, 21, 0, 0, 621, 622, 7, 7, 0, 0, 622, 623, 7, 1, 0, 0, 623, 624, 7, 9, 0, 0, 624, 625, 1, 0, 0, 0, 625, 626, 6, 19, 10, 0, 626, 55, 1, 0, 0, 0, 627, 628, 4, 20, 4, 0, 628, 629, 7, 15, 0, 0, 629, 630, 7, 20, 0, 0, 630, 631, 7, 13, 0, 0, 631, 632, 7, 13, 0, 0, 632, 633, 1, 0, 0, 0, 633, 634, 6, 20, 10, 0, 634, 57, 1, 0, 0, 0, 635, 636, 4, 21, 5, 0, 636, 637, 7, 13, 0, 0, 637, 638, 7, 3, 0, 0, 638, 639, 7, 15, 0, 0, 639, 640, 7, 5, 0, 0, 640, 641, 1, 0, 0, 0, 641, 642, 6, 21, 10, 0, 642, 59, 1, 0, 0, 0, 643, 644, 4, 22, 6, 0, 644, 645, 7, 6, 0, 0, 645, 646, 7, 1, 0, 0, 646, 647, 7, 17, 0, 0, 647, 648, 7, 10, 0, 0, 648, 649, 7, 5, 0, 0, 649, 650, 1, 0, 0, 0, 650, 651, 6, 22, 10, 0, 651, 61, 1, 0, 0, 0, 652, 653, 4, 23, 7, 0, 653, 654, 7, 13, 0, 0, 654, 655, 7, 7, 0, 0, 655, 656, 7, 7, 0, 0, 656, 657, 7, 18, 0, 0, 657, 658, 7, 20, 0, 0, 658, 659, 7, 8, 0, 0, 659, 660, 1, 0, 0, 0, 660, 661, 6, 23, 10, 0, 661, 63, 1, 0, 0, 0, 662, 664, 8, 22, 0, 0, 663, 662, 1, 0, 0, 0, 664, 665, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 667, 1, 0, 0, 0, 667, 668, 6, 24, 0, 0, 668, 65, 1, 0, 0, 0, 669, 670, 5, 47, 0, 0, 670, 671, 5, 47, 0, 0, 671, 675, 1, 0, 0, 0, 672, 674, 8, 23, 0, 0, 673, 672, 1, 0, 0, 0, 674, 677, 1, 0, 0, 0, 675, 673, 1, 0, 0, 0, 675, 676, 1, 0, 0, 0, 676, 679, 1, 0, 0, 0, 677, 675, 1, 0, 0, 0, 678, 680, 5, 13, 0, 0, 679, 678, 1, 0, 0, 0, 679, 680, 1, 0, 0, 0, 680, 682, 1, 0, 0, 0, 681, 683, 5, 10, 0, 0, 682, 681, 1, 0, 0, 0, 682, 683, 1, 0, 0, 0, 683, 684, 1, 0, 0, 0, 684, 685, 6, 25, 11, 0, 685, 67, 1, 0, 0, 0, 686, 687, 5, 47, 0, 0, 687, 688, 5, 42, 0, 0, 688, 693, 1, 0, 0, 0, 689, 692, 3, 68, 26, 0, 690, 692, 9, 0, 0, 0, 691, 689, 1, 0, 0, 0, 691, 690, 1, 0, 0, 0, 692, 695, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 693, 691, 1, 0, 0, 0, 694, 696, 1, 0, 0, 0, 695, 693, 1, 0, 0, 0, 696, 697, 5, 42, 0, 0, 697, 698, 5, 47, 0, 0, 698, 699, 1, 0, 0, 0, 699, 700, 6, 26, 11, 0, 700, 69, 1, 0, 0, 0, 701, 703, 7, 24, 0, 0, 702, 701, 1, 0, 0, 0, 703, 704, 1, 0, 0, 0, 704, 702, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 707, 6, 27, 11, 0, 707, 71, 1, 0, 0, 0, 708, 709, 5, 124, 0, 0, 709, 710, 1, 0, 0, 0, 710, 711, 6, 28, 12, 0, 711, 73, 1, 0, 0, 0, 712, 713, 7, 25, 0, 0, 713, 75, 1, 0, 0, 0, 714, 715, 7, 26, 0, 0, 715, 77, 1, 0, 0, 0, 716, 717, 5, 92, 0, 0, 717, 718, 7, 27, 0, 0, 718, 79, 1, 0, 0, 0, 719, 720, 8, 28, 0, 0, 720, 81, 1, 0, 0, 0, 721, 723, 7, 3, 0, 0, 722, 724, 7, 29, 0, 0, 723, 722, 1, 0, 0, 0, 723, 724, 1, 0, 0, 0, 724, 726, 1, 0, 0, 0, 725, 727, 3, 74, 29, 0, 726, 725, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 726, 1, 0, 0, 0, 728, 729, 1, 0, 0, 0, 729, 83, 1, 0, 0, 0, 730, 731, 5, 64, 0, 0, 731, 85, 1, 0, 0, 0, 732, 733, 5, 96, 0, 0, 733, 87, 1, 0, 0, 0, 734, 738, 8, 30, 0, 0, 735, 736, 5, 96, 0, 0, 736, 738, 5, 96, 0, 0, 737, 734, 1, 0, 0, 0, 737, 735, 1, 0, 0, 0, 738, 89, 1, 0, 0, 0, 739, 740, 5, 95, 0, 0, 740, 91, 1, 0, 0, 0, 741, 745, 3, 76, 30, 0, 742, 745, 3, 74, 29, 0, 743, 745, 3, 90, 37, 0, 744, 741, 1, 0, 0, 0, 744, 742, 1, 0, 0, 0, 744, 743, 1, 0, 0, 0, 745, 93, 1, 0, 0, 0, 746, 751, 5, 34, 0, 0, 747, 750, 3, 78, 31, 0, 748, 750, 3, 80, 32, 0, 749, 747, 1, 0, 0, 0, 749, 748, 1, 0, 0, 0, 750, 753, 1, 0, 0, 0, 751, 749, 1, 0, 0, 0, 751, 752, 1, 0, 0, 0, 752, 754, 1, 0, 0, 0, 753, 751, 1, 0, 0, 0, 754, 776, 5, 34, 0, 0, 755, 756, 5, 34, 0, 0, 756, 757, 5, 34, 0, 0, 757, 758, 5, 34, 0, 0, 758, 762, 1, 0, 0, 0, 759, 761, 8, 23, 0, 0, 760, 759, 1, 0, 0, 0, 761, 764, 1, 0, 0, 0, 762, 763, 1, 0, 0, 0, 762, 760, 1, 0, 0, 0, 763, 765, 1, 0, 0, 0, 764, 762, 1, 0, 0, 0, 765, 766, 5, 34, 0, 0, 766, 767, 5, 34, 0, 0, 767, 768, 5, 34, 0, 0, 768, 770, 1, 0, 0, 0, 769, 771, 5, 34, 0, 0, 770, 769, 1, 0, 0, 0, 770, 771, 1, 0, 0, 0, 771, 773, 1, 0, 0, 0, 772, 774, 5, 34, 0, 0, 773, 772, 1, 0, 0, 0, 773, 774, 1, 0, 0, 0, 774, 776, 1, 0, 0, 0, 775, 746, 1, 0, 0, 0, 775, 755, 1, 0, 0, 0, 776, 95, 1, 0, 0, 0, 777, 779, 3, 74, 29, 0, 778, 777, 1, 0, 0, 0, 779, 780, 1, 0, 0, 0, 780, 778, 1, 0, 0, 0, 780, 781, 1, 0, 0, 0, 781, 97, 1, 0, 0, 0, 782, 784, 3, 74, 29, 0, 783, 782, 1, 0, 0, 0, 784, 785, 1, 0, 0, 0, 785, 783, 1, 0, 0, 0, 785, 786, 1, 0, 0, 0, 786, 787, 1, 0, 0, 0, 787, 791, 3, 116, 50, 0, 788, 790, 3, 74, 29, 0, 789, 788, 1, 0, 0, 0, 790, 793, 1, 0, 0, 0, 791, 789, 1, 0, 0, 0, 791, 792, 1, 0, 0, 0, 792, 825, 1, 0, 0, 0, 793, 791, 1, 0, 0, 0, 794, 796, 3, 116, 50, 0, 795, 797, 3, 74, 29, 0, 796, 795, 1, 0, 0, 0, 797, 798, 1, 0, 0, 0, 798, 796, 1, 0, 0, 0, 798, 799, 1, 0, 0, 0, 799, 825, 1, 0, 0, 0, 800, 802, 3, 74, 29, 0, 801, 800, 1, 0, 0, 0, 802, 803, 1, 0, 0, 0, 803, 801, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 812, 1, 0, 0, 0, 805, 809, 3, 116, 50, 0, 806, 808, 3, 74, 29, 0, 807, 806, 1, 0, 0, 0, 808, 811, 1, 0, 0, 0, 809, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 813, 1, 0, 0, 0, 811, 809, 1, 0, 0, 0, 812, 805, 1, 0, 0, 0, 812, 813, 1, 0, 0, 0, 813, 814, 1, 0, 0, 0, 814, 815, 3, 82, 33, 0, 815, 825, 1, 0, 0, 0, 816, 818, 3, 116, 50, 0, 817, 819, 3, 74, 29, 0, 818, 817, 1, 0, 0, 0, 819, 820, 1, 0, 0, 0, 820, 818, 1, 0, 0, 0, 820, 821, 1, 0, 0, 0, 821, 822, 1, 0, 0, 0, 822, 823, 3, 82, 33, 0, 823, 825, 1, 0, 0, 0, 824, 783, 1, 0, 0, 0, 824, 794, 1, 0, 0, 0, 824, 801, 1, 0, 0, 0, 824, 816, 1, 0, 0, 0, 825, 99, 1, 0, 0, 0, 826, 827, 7, 31, 0, 0, 827, 828, 7, 32, 0, 0, 828, 101, 1, 0, 0, 0, 829, 830, 7, 12, 0, 0, 830, 831, 7, 9, 0, 0, 831, 832, 7, 0, 0, 0, 832, 103, 1, 0, 0, 0, 833, 834, 7, 12, 0, 0, 834, 835, 7, 2, 0, 0, 835, 836, 7, 4, 0, 0, 836, 105, 1, 0, 0, 0, 837, 838, 5, 61, 0, 0, 838, 107, 1, 0, 0, 0, 839, 840, 5, 58, 0, 0, 840, 841, 5, 58, 0, 0, 841, 109, 1, 0, 0, 0, 842, 843, 5, 58, 0, 0, 843, 111, 1, 0, 0, 0, 844, 845, 5, 44, 0, 0, 845, 113, 1, 0, 0, 0, 846, 847, 7, 0, 0, 0, 847, 848, 7, 3, 0, 0, 848, 849, 7, 2, 0, 0, 849, 850, 7, 4, 0, 0, 850, 115, 1, 0, 0, 0, 851, 852, 5, 46, 0, 0, 852, 117, 1, 0, 0, 0, 853, 854, 7, 15, 0, 0, 854, 855, 7, 12, 0, 0, 855, 856, 7, 13, 0, 0, 856, 857, 7, 2, 0, 0, 857, 858, 7, 3, 0, 0, 858, 119, 1, 0, 0, 0, 859, 860, 7, 15, 0, 0, 860, 861, 7, 1, 0, 0, 861, 862, 7, 6, 0, 0, 862, 863, 7, 2, 0, 0, 863, 864, 7, 5, 0, 0, 864, 121, 1, 0, 0, 0, 865, 866, 7, 1, 0, 0, 866, 867, 7, 9, 0, 0, 867, 123, 1, 0, 0, 0, 868, 869, 7, 1, 0, 0, 869, 870, 7, 2, 0, 0, 870, 125, 1, 0, 0, 0, 871, 872, 7, 13, 0, 0, 872, 873, 7, 12, 0, 0, 873, 874, 7, 2, 0, 0, 874, 875, 7, 5, 0, 0, 875, 127, 1, 0, 0, 0, 876, 877, 7, 13, 0, 0, 877, 878, 7, 1, 0, 0, 878, 879, 7, 18, 0, 0, 879, 880, 7, 3, 0, 0, 880, 129, 1, 0, 0, 0, 881, 882, 5, 40, 0, 0, 882, 131, 1, 0, 0, 0, 883, 884, 7, 9, 0, 0, 884, 885, 7, 7, 0, 0, 885, 886, 7, 5, 0, 0, 886, 133, 1, 0, 0, 0, 887, 888, 7, 9, 0, 0, 888, 889, 7, 20, 0, 0, 889, 890, 7, 13, 0, 0, 890, 891, 7, 13, 0, 0, 891, 135, 1, 0, 0, 0, 892, 893, 7, 9, 0, 0, 893, 894, 7, 20, 0, 0, 894, 895, 7, 13, 0, 0, 895, 896, 7, 13, 0, 0, 896, 897, 7, 2, 0, 0, 897, 137, 1, 0, 0, 0, 898, 899, 7, 7, 0, 0, 899, 900, 7, 6, 0, 0, 900, 139, 1, 0, 0, 0, 901, 902, 5, 63, 0, 0, 902, 141, 1, 0, 0, 0, 903, 904, 7, 6, 0, 0, 904, 905, 7, 13, 0, 0, 905, 906, 7, 1, 0, 0, 906, 907, 7, 18, 0, 0, 907, 908, 7, 3, 0, 0, 908, 143, 1, 0, 0, 0, 909, 910, 5, 41, 0, 0, 910, 145, 1, 0, 0, 0, 911, 912, 7, 5, 0, 0, 912, 913, 7, 6, 0, 0, 913, 914, 7, 20, 0, 0, 914, 915, 7, 3, 0, 0, 915, 147, 1, 0, 0, 0, 916, 917, 5, 61, 0, 0, 917, 918, 5, 61, 0, 0, 918, 149, 1, 0, 0, 0, 919, 920, 5, 61, 0, 0, 920, 921, 5, 126, 0, 0, 921, 151, 1, 0, 0, 0, 922, 923, 5, 33, 0, 0, 923, 924, 5, 61, 0, 0, 924, 153, 1, 0, 0, 0, 925, 926, 5, 60, 0, 0, 926, 155, 1, 0, 0, 0, 927, 928, 5, 60, 0, 0, 928, 929, 5, 61, 0, 0, 929, 157, 1, 0, 0, 0, 930, 931, 5, 62, 0, 0, 931, 159, 1, 0, 0, 0, 932, 933, 5, 62, 0, 0, 933, 934, 5, 61, 0, 0, 934, 161, 1, 0, 0, 0, 935, 936, 5, 43, 0, 0, 936, 163, 1, 0, 0, 0, 937, 938, 5, 45, 0, 0, 938, 165, 1, 0, 0, 0, 939, 940, 5, 42, 0, 0, 940, 167, 1, 0, 0, 0, 941, 942, 5, 47, 0, 0, 942, 169, 1, 0, 0, 0, 943, 944, 5, 37, 0, 0, 944, 171, 1, 0, 0, 0, 945, 946, 5, 123, 0, 0, 946, 173, 1, 0, 0, 0, 947, 948, 5, 125, 0, 0, 948, 175, 1, 0, 0, 0, 949, 950, 3, 46, 15, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 80, 13, 0, 952, 177, 1, 0, 0, 0, 953, 956, 3, 140, 62, 0, 954, 957, 3, 76, 30, 0, 955, 957, 3, 90, 37, 0, 956, 954, 1, 0, 0, 0, 956, 955, 1, 0, 0, 0, 957, 961, 1, 0, 0, 0, 958, 960, 3, 92, 38, 0, 959, 958, 1, 0, 0, 0, 960, 963, 1, 0, 0, 0, 961, 959, 1, 0, 0, 0, 961, 962, 1, 0, 0, 0, 962, 971, 1, 0, 0, 0, 963, 961, 1, 0, 0, 0, 964, 966, 3, 140, 62, 0, 965, 967, 3, 74, 29, 0, 966, 965, 1, 0, 0, 0, 967, 968, 1, 0, 0, 0, 968, 966, 1, 0, 0, 0, 968, 969, 1, 0, 0, 0, 969, 971, 1, 0, 0, 0, 970, 953, 1, 0, 0, 0, 970, 964, 1, 0, 0, 0, 971, 179, 1, 0, 0, 0, 972, 973, 5, 91, 0, 0, 973, 974, 1, 0, 0, 0, 974, 975, 6, 82, 0, 0, 975, 976, 6, 82, 0, 0, 976, 181, 1, 0, 0, 0, 977, 978, 5, 93, 0, 0, 978, 979, 1, 0, 0, 0, 979, 980, 6, 83, 12, 0, 980, 981, 6, 83, 12, 0, 981, 183, 1, 0, 0, 0, 982, 986, 3, 76, 30, 0, 983, 985, 3, 92, 38, 0, 984, 983, 1, 0, 0, 0, 985, 988, 1, 0, 0, 0, 986, 984, 1, 0, 0, 0, 986, 987, 1, 0, 0, 0, 987, 999, 1, 0, 0, 0, 988, 986, 1, 0, 0, 0, 989, 992, 3, 90, 37, 0, 990, 992, 3, 84, 34, 0, 991, 989, 1, 0, 0, 0, 991, 990, 1, 0, 0, 0, 992, 994, 1, 0, 0, 0, 993, 995, 3, 92, 38, 0, 994, 993, 1, 0, 0, 0, 995, 996, 1, 0, 0, 0, 996, 994, 1, 0, 0, 0, 996, 997, 1, 0, 0, 0, 997, 999, 1, 0, 0, 0, 998, 982, 1, 0, 0, 0, 998, 991, 1, 0, 0, 0, 999, 185, 1, 0, 0, 0, 1000, 1002, 3, 86, 35, 0, 1001, 1003, 3, 88, 36, 0, 1002, 1001, 1, 0, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1002, 1, 0, 0, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1006, 1, 0, 0, 0, 1006, 1007, 3, 86, 35, 0, 1007, 187, 1, 0, 0, 0, 1008, 1009, 3, 186, 85, 0, 1009, 189, 1, 0, 0, 0, 1010, 1011, 3, 66, 25, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 87, 11, 0, 1013, 191, 1, 0, 0, 0, 1014, 1015, 3, 68, 26, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 88, 11, 0, 1017, 193, 1, 0, 0, 0, 1018, 1019, 3, 70, 27, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 89, 11, 0, 1021, 195, 1, 0, 0, 0, 1022, 1023, 3, 180, 82, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 90, 14, 0, 1025, 1026, 6, 90, 15, 0, 1026, 197, 1, 0, 0, 0, 1027, 1028, 3, 72, 28, 0, 1028, 1029, 1, 0, 0, 0, 1029, 1030, 6, 91, 16, 0, 1030, 1031, 6, 91, 12, 0, 1031, 199, 1, 0, 0, 0, 1032, 1033, 3, 70, 27, 0, 1033, 1034, 1, 0, 0, 0, 1034, 1035, 6, 92, 11, 0, 1035, 201, 1, 0, 0, 0, 1036, 1037, 3, 66, 25, 0, 1037, 1038, 1, 0, 0, 0, 1038, 1039, 6, 93, 11, 0, 1039, 203, 1, 0, 0, 0, 1040, 1041, 3, 68, 26, 0, 1041, 1042, 1, 0, 0, 0, 1042, 1043, 6, 94, 11, 0, 1043, 205, 1, 0, 0, 0, 1044, 1045, 3, 72, 28, 0, 1045, 1046, 1, 0, 0, 0, 1046, 1047, 6, 95, 16, 0, 1047, 1048, 6, 95, 12, 0, 1048, 207, 1, 0, 0, 0, 1049, 1050, 3, 180, 82, 0, 1050, 1051, 1, 0, 0, 0, 1051, 1052, 6, 96, 14, 0, 1052, 209, 1, 0, 0, 0, 1053, 1054, 3, 182, 83, 0, 1054, 1055, 1, 0, 0, 0, 1055, 1056, 6, 97, 17, 0, 1056, 211, 1, 0, 0, 0, 1057, 1058, 3, 110, 47, 0, 1058, 1059, 1, 0, 0, 0, 1059, 1060, 6, 98, 18, 0, 1060, 213, 1, 0, 0, 0, 1061, 1062, 3, 112, 48, 0, 1062, 1063, 1, 0, 0, 0, 1063, 1064, 6, 99, 19, 0, 1064, 215, 1, 0, 0, 0, 1065, 1066, 3, 106, 45, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1068, 6, 100, 20, 0, 1068, 217, 1, 0, 0, 0, 1069, 1070, 7, 16, 0, 0, 1070, 1071, 7, 3, 0, 0, 1071, 1072, 7, 5, 0, 0, 1072, 1073, 7, 12, 0, 0, 1073, 1074, 7, 0, 0, 0, 1074, 1075, 7, 12, 0, 0, 1075, 1076, 7, 5, 0, 0, 1076, 1077, 7, 12, 0, 0, 1077, 219, 1, 0, 0, 0, 1078, 1082, 8, 33, 0, 0, 1079, 1080, 5, 47, 0, 0, 1080, 1082, 8, 34, 0, 0, 1081, 1078, 1, 0, 0, 0, 1081, 1079, 1, 0, 0, 0, 1082, 221, 1, 0, 0, 0, 1083, 1085, 3, 220, 102, 0, 1084, 1083, 1, 0, 0, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1084, 1, 0, 0, 0, 1086, 1087, 1, 0, 0, 0, 1087, 223, 1, 0, 0, 0, 1088, 1089, 3, 222, 103, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 104, 21, 0, 1091, 225, 1, 0, 0, 0, 1092, 1093, 3, 94, 39, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1095, 6, 105, 22, 0, 1095, 227, 1, 0, 0, 0, 1096, 1097, 3, 66, 25, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1099, 6, 106, 11, 0, 1099, 229, 1, 0, 0, 0, 1100, 1101, 3, 68, 26, 0, 1101, 1102, 1, 0, 0, 0, 1102, 1103, 6, 107, 11, 0, 1103, 231, 1, 0, 0, 0, 1104, 1105, 3, 70, 27, 0, 1105, 1106, 1, 0, 0, 0, 1106, 1107, 6, 108, 11, 0, 1107, 233, 1, 0, 0, 0, 1108, 1109, 3, 72, 28, 0, 1109, 1110, 1, 0, 0, 0, 1110, 1111, 6, 109, 16, 0, 1111, 1112, 6, 109, 12, 0, 1112, 235, 1, 0, 0, 0, 1113, 1114, 3, 116, 50, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 110, 23, 0, 1116, 237, 1, 0, 0, 0, 1117, 1118, 3, 112, 48, 0, 1118, 1119, 1, 0, 0, 0, 1119, 1120, 6, 111, 19, 0, 1120, 239, 1, 0, 0, 0, 1121, 1122, 4, 112, 8, 0, 1122, 1123, 3, 140, 62, 0, 1123, 1124, 1, 0, 0, 0, 1124, 1125, 6, 112, 24, 0, 1125, 241, 1, 0, 0, 0, 1126, 1127, 4, 113, 9, 0, 1127, 1128, 3, 178, 81, 0, 1128, 1129, 1, 0, 0, 0, 1129, 1130, 6, 113, 25, 0, 1130, 243, 1, 0, 0, 0, 1131, 1136, 3, 76, 30, 0, 1132, 1136, 3, 74, 29, 0, 1133, 1136, 3, 90, 37, 0, 1134, 1136, 3, 166, 75, 0, 1135, 1131, 1, 0, 0, 0, 1135, 1132, 1, 0, 0, 0, 1135, 1133, 1, 0, 0, 0, 1135, 1134, 1, 0, 0, 0, 1136, 245, 1, 0, 0, 0, 1137, 1140, 3, 76, 30, 0, 1138, 1140, 3, 166, 75, 0, 1139, 1137, 1, 0, 0, 0, 1139, 1138, 1, 0, 0, 0, 1140, 1144, 1, 0, 0, 0, 1141, 1143, 3, 244, 114, 0, 1142, 1141, 1, 0, 0, 0, 1143, 1146, 1, 0, 0, 0, 1144, 1142, 1, 0, 0, 0, 1144, 1145, 1, 0, 0, 0, 1145, 1157, 1, 0, 0, 0, 1146, 1144, 1, 0, 0, 0, 1147, 1150, 3, 90, 37, 0, 1148, 1150, 3, 84, 34, 0, 1149, 1147, 1, 0, 0, 0, 1149, 1148, 1, 0, 0, 0, 1150, 1152, 1, 0, 0, 0, 1151, 1153, 3, 244, 114, 0, 1152, 1151, 1, 0, 0, 0, 1153, 1154, 1, 0, 0, 0, 1154, 1152, 1, 0, 0, 0, 1154, 1155, 1, 0, 0, 0, 1155, 1157, 1, 0, 0, 0, 1156, 1139, 1, 0, 0, 0, 1156, 1149, 1, 0, 0, 0, 1157, 247, 1, 0, 0, 0, 1158, 1161, 3, 246, 115, 0, 1159, 1161, 3, 186, 85, 0, 1160, 1158, 1, 0, 0, 0, 1160, 1159, 1, 0, 0, 0, 1161, 1162, 1, 0, 0, 0, 1162, 1160, 1, 0, 0, 0, 1162, 1163, 1, 0, 0, 0, 1163, 249, 1, 0, 0, 0, 1164, 1165, 3, 66, 25, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1167, 6, 117, 11, 0, 1167, 251, 1, 0, 0, 0, 1168, 1169, 3, 68, 26, 0, 1169, 1170, 1, 0, 0, 0, 1170, 1171, 6, 118, 11, 0, 1171, 253, 1, 0, 0, 0, 1172, 1173, 3, 70, 27, 0, 1173, 1174, 1, 0, 0, 0, 1174, 1175, 6, 119, 11, 0, 1175, 255, 1, 0, 0, 0, 1176, 1177, 3, 72, 28, 0, 1177, 1178, 1, 0, 0, 0, 1178, 1179, 6, 120, 16, 0, 1179, 1180, 6, 120, 12, 0, 1180, 257, 1, 0, 0, 0, 1181, 1182, 3, 106, 45, 0, 1182, 1183, 1, 0, 0, 0, 1183, 1184, 6, 121, 20, 0, 1184, 259, 1, 0, 0, 0, 1185, 1186, 3, 112, 48, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 122, 19, 0, 1188, 261, 1, 0, 0, 0, 1189, 1190, 3, 116, 50, 0, 1190, 1191, 1, 0, 0, 0, 1191, 1192, 6, 123, 23, 0, 1192, 263, 1, 0, 0, 0, 1193, 1194, 4, 124, 10, 0, 1194, 1195, 3, 140, 62, 0, 1195, 1196, 1, 0, 0, 0, 1196, 1197, 6, 124, 24, 0, 1197, 265, 1, 0, 0, 0, 1198, 1199, 4, 125, 11, 0, 1199, 1200, 3, 178, 81, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 125, 25, 0, 1202, 267, 1, 0, 0, 0, 1203, 1204, 7, 12, 0, 0, 1204, 1205, 7, 2, 0, 0, 1205, 269, 1, 0, 0, 0, 1206, 1207, 3, 248, 116, 0, 1207, 1208, 1, 0, 0, 0, 1208, 1209, 6, 127, 26, 0, 1209, 271, 1, 0, 0, 0, 1210, 1211, 3, 66, 25, 0, 1211, 1212, 1, 0, 0, 0, 1212, 1213, 6, 128, 11, 0, 1213, 273, 1, 0, 0, 0, 1214, 1215, 3, 68, 26, 0, 1215, 1216, 1, 0, 0, 0, 1216, 1217, 6, 129, 11, 0, 1217, 275, 1, 0, 0, 0, 1218, 1219, 3, 70, 27, 0, 1219, 1220, 1, 0, 0, 0, 1220, 1221, 6, 130, 11, 0, 1221, 277, 1, 0, 0, 0, 1222, 1223, 3, 72, 28, 0, 1223, 1224, 1, 0, 0, 0, 1224, 1225, 6, 131, 16, 0, 1225, 1226, 6, 131, 12, 0, 1226, 279, 1, 0, 0, 0, 1227, 1228, 3, 180, 82, 0, 1228, 1229, 1, 0, 0, 0, 1229, 1230, 6, 132, 14, 0, 1230, 1231, 6, 132, 27, 0, 1231, 281, 1, 0, 0, 0, 1232, 1233, 7, 7, 0, 0, 1233, 1234, 7, 9, 0, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1236, 6, 133, 28, 0, 1236, 283, 1, 0, 0, 0, 1237, 1238, 7, 19, 0, 0, 1238, 1239, 7, 1, 0, 0, 1239, 1240, 7, 5, 0, 0, 1240, 1241, 7, 10, 0, 0, 1241, 1242, 1, 0, 0, 0, 1242, 1243, 6, 134, 28, 0, 1243, 285, 1, 0, 0, 0, 1244, 1245, 8, 35, 0, 0, 1245, 287, 1, 0, 0, 0, 1246, 1248, 3, 286, 135, 0, 1247, 1246, 1, 0, 0, 0, 1248, 1249, 1, 0, 0, 0, 1249, 1247, 1, 0, 0, 0, 1249, 1250, 1, 0, 0, 0, 1250, 1251, 1, 0, 0, 0, 1251, 1252, 3, 110, 47, 0, 1252, 1254, 1, 0, 0, 0, 1253, 1247, 1, 0, 0, 0, 1253, 1254, 1, 0, 0, 0, 1254, 1256, 1, 0, 0, 0, 1255, 1257, 3, 286, 135, 0, 1256, 1255, 1, 0, 0, 0, 1257, 1258, 1, 0, 0, 0, 1258, 1256, 1, 0, 0, 0, 1258, 1259, 1, 0, 0, 0, 1259, 289, 1, 0, 0, 0, 1260, 1261, 3, 288, 136, 0, 1261, 1262, 1, 0, 0, 0, 1262, 1263, 6, 137, 29, 0, 1263, 291, 1, 0, 0, 0, 1264, 1265, 3, 66, 25, 0, 1265, 1266, 1, 0, 0, 0, 1266, 1267, 6, 138, 11, 0, 1267, 293, 1, 0, 0, 0, 1268, 1269, 3, 68, 26, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 139, 11, 0, 1271, 295, 1, 0, 0, 0, 1272, 1273, 3, 70, 27, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 140, 11, 0, 1275, 297, 1, 0, 0, 0, 1276, 1277, 3, 72, 28, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1279, 6, 141, 16, 0, 1279, 1280, 6, 141, 12, 0, 1280, 1281, 6, 141, 12, 0, 1281, 299, 1, 0, 0, 0, 1282, 1283, 3, 106, 45, 0, 1283, 1284, 1, 0, 0, 0, 1284, 1285, 6, 142, 20, 0, 1285, 301, 1, 0, 0, 0, 1286, 1287, 3, 112, 48, 0, 1287, 1288, 1, 0, 0, 0, 1288, 1289, 6, 143, 19, 0, 1289, 303, 1, 0, 0, 0, 1290, 1291, 3, 116, 50, 0, 1291, 1292, 1, 0, 0, 0, 1292, 1293, 6, 144, 23, 0, 1293, 305, 1, 0, 0, 0, 1294, 1295, 3, 284, 134, 0, 1295, 1296, 1, 0, 0, 0, 1296, 1297, 6, 145, 30, 0, 1297, 307, 1, 0, 0, 0, 1298, 1299, 3, 248, 116, 0, 1299, 1300, 1, 0, 0, 0, 1300, 1301, 6, 146, 26, 0, 1301, 309, 1, 0, 0, 0, 1302, 1303, 3, 188, 86, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1305, 6, 147, 31, 0, 1305, 311, 1, 0, 0, 0, 1306, 1307, 4, 148, 12, 0, 1307, 1308, 3, 140, 62, 0, 1308, 1309, 1, 0, 0, 0, 1309, 1310, 6, 148, 24, 0, 1310, 313, 1, 0, 0, 0, 1311, 1312, 4, 149, 13, 0, 1312, 1313, 3, 178, 81, 0, 1313, 1314, 1, 0, 0, 0, 1314, 1315, 6, 149, 25, 0, 1315, 315, 1, 0, 0, 0, 1316, 1317, 3, 66, 25, 0, 1317, 1318, 1, 0, 0, 0, 1318, 1319, 6, 150, 11, 0, 1319, 317, 1, 0, 0, 0, 1320, 1321, 3, 68, 26, 0, 1321, 1322, 1, 0, 0, 0, 1322, 1323, 6, 151, 11, 0, 1323, 319, 1, 0, 0, 0, 1324, 1325, 3, 70, 27, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 152, 11, 0, 1327, 321, 1, 0, 0, 0, 1328, 1329, 3, 72, 28, 0, 1329, 1330, 1, 0, 0, 0, 1330, 1331, 6, 153, 16, 0, 1331, 1332, 6, 153, 12, 0, 1332, 323, 1, 0, 0, 0, 1333, 1334, 3, 116, 50, 0, 1334, 1335, 1, 0, 0, 0, 1335, 1336, 6, 154, 23, 0, 1336, 325, 1, 0, 0, 0, 1337, 1338, 4, 155, 14, 0, 1338, 1339, 3, 140, 62, 0, 1339, 1340, 1, 0, 0, 0, 1340, 1341, 6, 155, 24, 0, 1341, 327, 1, 0, 0, 0, 1342, 1343, 4, 156, 15, 0, 1343, 1344, 3, 178, 81, 0, 1344, 1345, 1, 0, 0, 0, 1345, 1346, 6, 156, 25, 0, 1346, 329, 1, 0, 0, 0, 1347, 1348, 3, 188, 86, 0, 1348, 1349, 1, 0, 0, 0, 1349, 1350, 6, 157, 31, 0, 1350, 331, 1, 0, 0, 0, 1351, 1352, 3, 184, 84, 0, 1352, 1353, 1, 0, 0, 0, 1353, 1354, 6, 158, 32, 0, 1354, 333, 1, 0, 0, 0, 1355, 1356, 3, 66, 25, 0, 1356, 1357, 1, 0, 0, 0, 1357, 1358, 6, 159, 11, 0, 1358, 335, 1, 0, 0, 0, 1359, 1360, 3, 68, 26, 0, 1360, 1361, 1, 0, 0, 0, 1361, 1362, 6, 160, 11, 0, 1362, 337, 1, 0, 0, 0, 1363, 1364, 3, 70, 27, 0, 1364, 1365, 1, 0, 0, 0, 1365, 1366, 6, 161, 11, 0, 1366, 339, 1, 0, 0, 0, 1367, 1368, 3, 72, 28, 0, 1368, 1369, 1, 0, 0, 0, 1369, 1370, 6, 162, 16, 0, 1370, 1371, 6, 162, 12, 0, 1371, 341, 1, 0, 0, 0, 1372, 1373, 7, 1, 0, 0, 1373, 1374, 7, 9, 0, 0, 1374, 1375, 7, 15, 0, 0, 1375, 1376, 7, 7, 0, 0, 1376, 343, 1, 0, 0, 0, 1377, 1378, 3, 66, 25, 0, 1378, 1379, 1, 0, 0, 0, 1379, 1380, 6, 164, 11, 0, 1380, 345, 1, 0, 0, 0, 1381, 1382, 3, 68, 26, 0, 1382, 1383, 1, 0, 0, 0, 1383, 1384, 6, 165, 11, 0, 1384, 347, 1, 0, 0, 0, 1385, 1386, 3, 70, 27, 0, 1386, 1387, 1, 0, 0, 0, 1387, 1388, 6, 166, 11, 0, 1388, 349, 1, 0, 0, 0, 1389, 1390, 3, 182, 83, 0, 1390, 1391, 1, 0, 0, 0, 1391, 1392, 6, 167, 17, 0, 1392, 1393, 6, 167, 12, 0, 1393, 351, 1, 0, 0, 0, 1394, 1395, 3, 110, 47, 0, 1395, 1396, 1, 0, 0, 0, 1396, 1397, 6, 168, 18, 0, 1397, 353, 1, 0, 0, 0, 1398, 1404, 3, 84, 34, 0, 1399, 1404, 3, 74, 29, 0, 1400, 1404, 3, 116, 50, 0, 1401, 1404, 3, 76, 30, 0, 1402, 1404, 3, 90, 37, 0, 1403, 1398, 1, 0, 0, 0, 1403, 1399, 1, 0, 0, 0, 1403, 1400, 1, 0, 0, 0, 1403, 1401, 1, 0, 0, 0, 1403, 1402, 1, 0, 0, 0, 1404, 1405, 1, 0, 0, 0, 1405, 1403, 1, 0, 0, 0, 1405, 1406, 1, 0, 0, 0, 1406, 355, 1, 0, 0, 0, 1407, 1408, 3, 66, 25, 0, 1408, 1409, 1, 0, 0, 0, 1409, 1410, 6, 170, 11, 0, 1410, 357, 1, 0, 0, 0, 1411, 1412, 3, 68, 26, 0, 1412, 1413, 1, 0, 0, 0, 1413, 1414, 6, 171, 11, 0, 1414, 359, 1, 0, 0, 0, 1415, 1416, 3, 70, 27, 0, 1416, 1417, 1, 0, 0, 0, 1417, 1418, 6, 172, 11, 0, 1418, 361, 1, 0, 0, 0, 1419, 1420, 3, 72, 28, 0, 1420, 1421, 1, 0, 0, 0, 1421, 1422, 6, 173, 16, 0, 1422, 1423, 6, 173, 12, 0, 1423, 363, 1, 0, 0, 0, 1424, 1425, 3, 110, 47, 0, 1425, 1426, 1, 0, 0, 0, 1426, 1427, 6, 174, 18, 0, 1427, 365, 1, 0, 0, 0, 1428, 1429, 3, 112, 48, 0, 1429, 1430, 1, 0, 0, 0, 1430, 1431, 6, 175, 19, 0, 1431, 367, 1, 0, 0, 0, 1432, 1433, 3, 116, 50, 0, 1433, 1434, 1, 0, 0, 0, 1434, 1435, 6, 176, 23, 0, 1435, 369, 1, 0, 0, 0, 1436, 1437, 3, 282, 133, 0, 1437, 1438, 1, 0, 0, 0, 1438, 1439, 6, 177, 33, 0, 1439, 1440, 6, 177, 34, 0, 1440, 371, 1, 0, 0, 0, 1441, 1442, 3, 222, 103, 0, 1442, 1443, 1, 0, 0, 0, 1443, 1444, 6, 178, 21, 0, 1444, 373, 1, 0, 0, 0, 1445, 1446, 3, 94, 39, 0, 1446, 1447, 1, 0, 0, 0, 1447, 1448, 6, 179, 22, 0, 1448, 375, 1, 0, 0, 0, 1449, 1450, 3, 66, 25, 0, 1450, 1451, 1, 0, 0, 0, 1451, 1452, 6, 180, 11, 0, 1452, 377, 1, 0, 0, 0, 1453, 1454, 3, 68, 26, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1456, 6, 181, 11, 0, 1456, 379, 1, 0, 0, 0, 1457, 1458, 3, 70, 27, 0, 1458, 1459, 1, 0, 0, 0, 1459, 1460, 6, 182, 11, 0, 1460, 381, 1, 0, 0, 0, 1461, 1462, 3, 72, 28, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 183, 16, 0, 1464, 1465, 6, 183, 12, 0, 1465, 1466, 6, 183, 12, 0, 1466, 383, 1, 0, 0, 0, 1467, 1468, 3, 112, 48, 0, 1468, 1469, 1, 0, 0, 0, 1469, 1470, 6, 184, 19, 0, 1470, 385, 1, 0, 0, 0, 1471, 1472, 3, 116, 50, 0, 1472, 1473, 1, 0, 0, 0, 1473, 1474, 6, 185, 23, 0, 1474, 387, 1, 0, 0, 0, 1475, 1476, 3, 248, 116, 0, 1476, 1477, 1, 0, 0, 0, 1477, 1478, 6, 186, 26, 0, 1478, 389, 1, 0, 0, 0, 1479, 1480, 3, 66, 25, 0, 1480, 1481, 1, 0, 0, 0, 1481, 1482, 6, 187, 11, 0, 1482, 391, 1, 0, 0, 0, 1483, 1484, 3, 68, 26, 0, 1484, 1485, 1, 0, 0, 0, 1485, 1486, 6, 188, 11, 0, 1486, 393, 1, 0, 0, 0, 1487, 1488, 3, 70, 27, 0, 1488, 1489, 1, 0, 0, 0, 1489, 1490, 6, 189, 11, 0, 1490, 395, 1, 0, 0, 0, 1491, 1492, 3, 72, 28, 0, 1492, 1493, 1, 0, 0, 0, 1493, 1494, 6, 190, 16, 0, 1494, 1495, 6, 190, 12, 0, 1495, 397, 1, 0, 0, 0, 1496, 1497, 3, 54, 19, 0, 1497, 1498, 1, 0, 0, 0, 1498, 1499, 6, 191, 35, 0, 1499, 399, 1, 0, 0, 0, 1500, 1501, 3, 268, 126, 0, 1501, 1502, 1, 0, 0, 0, 1502, 1503, 6, 192, 36, 0, 1503, 401, 1, 0, 0, 0, 1504, 1505, 3, 282, 133, 0, 1505, 1506, 1, 0, 0, 0, 1506, 1507, 6, 193, 33, 0, 1507, 1508, 6, 193, 12, 0, 1508, 1509, 6, 193, 0, 0, 1509, 403, 1, 0, 0, 0, 1510, 1511, 7, 20, 0, 0, 1511, 1512, 7, 2, 0, 0, 1512, 1513, 7, 1, 0, 0, 1513, 1514, 7, 9, 0, 0, 1514, 1515, 7, 17, 0, 0, 1515, 1516, 1, 0, 0, 0, 1516, 1517, 6, 194, 12, 0, 1517, 1518, 6, 194, 0, 0, 1518, 405, 1, 0, 0, 0, 1519, 1520, 3, 222, 103, 0, 1520, 1521, 1, 0, 0, 0, 1521, 1522, 6, 195, 21, 0, 1522, 407, 1, 0, 0, 0, 1523, 1524, 3, 94, 39, 0, 1524, 1525, 1, 0, 0, 0, 1525, 1526, 6, 196, 22, 0, 1526, 409, 1, 0, 0, 0, 1527, 1528, 3, 110, 47, 0, 1528, 1529, 1, 0, 0, 0, 1529, 1530, 6, 197, 18, 0, 1530, 411, 1, 0, 0, 0, 1531, 1532, 3, 184, 84, 0, 1532, 1533, 1, 0, 0, 0, 1533, 1534, 6, 198, 32, 0, 1534, 413, 1, 0, 0, 0, 1535, 1536, 3, 188, 86, 0, 1536, 1537, 1, 0, 0, 0, 1537, 1538, 6, 199, 31, 0, 1538, 415, 1, 0, 0, 0, 1539, 1540, 3, 66, 25, 0, 1540, 1541, 1, 0, 0, 0, 1541, 1542, 6, 200, 11, 0, 1542, 417, 1, 0, 0, 0, 1543, 1544, 3, 68, 26, 0, 1544, 1545, 1, 0, 0, 0, 1545, 1546, 6, 201, 11, 0, 1546, 419, 1, 0, 0, 0, 1547, 1548, 3, 70, 27, 0, 1548, 1549, 1, 0, 0, 0, 1549, 1550, 6, 202, 11, 0, 1550, 421, 1, 0, 0, 0, 1551, 1552, 3, 72, 28, 0, 1552, 1553, 1, 0, 0, 0, 1553, 1554, 6, 203, 16, 0, 1554, 1555, 6, 203, 12, 0, 1555, 423, 1, 0, 0, 0, 1556, 1557, 3, 222, 103, 0, 1557, 1558, 1, 0, 0, 0, 1558, 1559, 6, 204, 21, 0, 1559, 1560, 6, 204, 12, 0, 1560, 1561, 6, 204, 37, 0, 1561, 425, 1, 0, 0, 0, 1562, 1563, 3, 94, 39, 0, 1563, 1564, 1, 0, 0, 0, 1564, 1565, 6, 205, 22, 0, 1565, 1566, 6, 205, 12, 0, 1566, 1567, 6, 205, 37, 0, 1567, 427, 1, 0, 0, 0, 1568, 1569, 3, 66, 25, 0, 1569, 1570, 1, 0, 0, 0, 1570, 1571, 6, 206, 11, 0, 1571, 429, 1, 0, 0, 0, 1572, 1573, 3, 68, 26, 0, 1573, 1574, 1, 0, 0, 0, 1574, 1575, 6, 207, 11, 0, 1575, 431, 1, 0, 0, 0, 1576, 1577, 3, 70, 27, 0, 1577, 1578, 1, 0, 0, 0, 1578, 1579, 6, 208, 11, 0, 1579, 433, 1, 0, 0, 0, 1580, 1581, 3, 110, 47, 0, 1581, 1582, 1, 0, 0, 0, 1582, 1583, 6, 209, 18, 0, 1583, 1584, 6, 209, 12, 0, 1584, 1585, 6, 209, 9, 0, 1585, 435, 1, 0, 0, 0, 1586, 1587, 3, 112, 48, 0, 1587, 1588, 1, 0, 0, 0, 1588, 1589, 6, 210, 19, 0, 1589, 1590, 6, 210, 12, 0, 1590, 1591, 6, 210, 9, 0, 1591, 437, 1, 0, 0, 0, 1592, 1593, 3, 66, 25, 0, 1593, 1594, 1, 0, 0, 0, 1594, 1595, 6, 211, 11, 0, 1595, 439, 1, 0, 0, 0, 1596, 1597, 3, 68, 26, 0, 1597, 1598, 1, 0, 0, 0, 1598, 1599, 6, 212, 11, 0, 1599, 441, 1, 0, 0, 0, 1600, 1601, 3, 70, 27, 0, 1601, 1602, 1, 0, 0, 0, 1602, 1603, 6, 213, 11, 0, 1603, 443, 1, 0, 0, 0, 1604, 1605, 3, 188, 86, 0, 1605, 1606, 1, 0, 0, 0, 1606, 1607, 6, 214, 12, 0, 1607, 1608, 6, 214, 0, 0, 1608, 1609, 6, 214, 31, 0, 1609, 445, 1, 0, 0, 0, 1610, 1611, 3, 184, 84, 0, 1611, 1612, 1, 0, 0, 0, 1612, 1613, 6, 215, 12, 0, 1613, 1614, 6, 215, 0, 0, 1614, 1615, 6, 215, 32, 0, 1615, 447, 1, 0, 0, 0, 1616, 1617, 3, 100, 42, 0, 1617, 1618, 1, 0, 0, 0, 1618, 1619, 6, 216, 12, 0, 1619, 1620, 6, 216, 0, 0, 1620, 1621, 6, 216, 38, 0, 1621, 449, 1, 0, 0, 0, 1622, 1623, 3, 72, 28, 0, 1623, 1624, 1, 0, 0, 0, 1624, 1625, 6, 217, 16, 0, 1625, 1626, 6, 217, 12, 0, 1626, 451, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 665, 675, 679, 682, 691, 693, 704, 723, 728, 737, 744, 749, 751, 762, 770, 773, 775, 780, 785, 791, 798, 803, 809, 812, 820, 824, 956, 961, 968, 970, 986, 991, 996, 998, 1004, 1081, 1086, 1135, 1139, 1144, 1149, 1154, 1156, 1160, 1162, 1249, 1253, 1258, 1403, 1405, 39, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 14, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 72, 0, 5, 0, 0, 7, 29, 0, 7, 73, 0, 7, 38, 0, 7, 39, 0, 7, 36, 0, 7, 83, 0, 7, 30, 0, 7, 41, 0, 7, 53, 0, 7, 71, 0, 7, 87, 0, 5, 10, 0, 5, 7, 0, 7, 97, 0, 7, 96, 0, 7, 75, 0, 7, 74, 0, 7, 95, 0, 5, 12, 0, 7, 20, 0, 7, 91, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file +[4, 0, 134, 1689, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 2, 213, 7, 213, 2, 214, 7, 214, 2, 215, 7, 215, 2, 216, 7, 216, 2, 217, 7, 217, 2, 218, 7, 218, 2, 219, 7, 219, 2, 220, 7, 220, 2, 221, 7, 221, 2, 222, 7, 222, 2, 223, 7, 223, 2, 224, 7, 224, 2, 225, 7, 225, 2, 226, 7, 226, 2, 227, 7, 227, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 692, 8, 24, 11, 24, 12, 24, 693, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 702, 8, 25, 10, 25, 12, 25, 705, 9, 25, 1, 25, 3, 25, 708, 8, 25, 1, 25, 3, 25, 711, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 720, 8, 26, 10, 26, 12, 26, 723, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 731, 8, 27, 11, 27, 12, 27, 732, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 752, 8, 33, 1, 33, 4, 33, 755, 8, 33, 11, 33, 12, 33, 756, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 766, 8, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 773, 8, 38, 1, 39, 1, 39, 1, 39, 5, 39, 778, 8, 39, 10, 39, 12, 39, 781, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 789, 8, 39, 10, 39, 12, 39, 792, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 799, 8, 39, 1, 39, 3, 39, 802, 8, 39, 3, 39, 804, 8, 39, 1, 40, 4, 40, 807, 8, 40, 11, 40, 12, 40, 808, 1, 41, 4, 41, 812, 8, 41, 11, 41, 12, 41, 813, 1, 41, 1, 41, 5, 41, 818, 8, 41, 10, 41, 12, 41, 821, 9, 41, 1, 41, 1, 41, 4, 41, 825, 8, 41, 11, 41, 12, 41, 826, 1, 41, 4, 41, 830, 8, 41, 11, 41, 12, 41, 831, 1, 41, 1, 41, 5, 41, 836, 8, 41, 10, 41, 12, 41, 839, 9, 41, 3, 41, 841, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 847, 8, 41, 11, 41, 12, 41, 848, 1, 41, 1, 41, 3, 41, 853, 8, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 3, 81, 985, 8, 81, 1, 81, 5, 81, 988, 8, 81, 10, 81, 12, 81, 991, 9, 81, 1, 81, 1, 81, 4, 81, 995, 8, 81, 11, 81, 12, 81, 996, 3, 81, 999, 8, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 5, 84, 1013, 8, 84, 10, 84, 12, 84, 1016, 9, 84, 1, 84, 1, 84, 3, 84, 1020, 8, 84, 1, 84, 4, 84, 1023, 8, 84, 11, 84, 12, 84, 1024, 3, 84, 1027, 8, 84, 1, 85, 1, 85, 4, 85, 1031, 8, 85, 11, 85, 12, 85, 1032, 1, 85, 1, 85, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 3, 102, 1110, 8, 102, 1, 103, 4, 103, 1113, 8, 103, 11, 103, 12, 103, 1114, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 3, 114, 1162, 8, 114, 1, 115, 1, 115, 3, 115, 1166, 8, 115, 1, 115, 5, 115, 1169, 8, 115, 10, 115, 12, 115, 1172, 9, 115, 1, 115, 1, 115, 3, 115, 1176, 8, 115, 1, 115, 4, 115, 1179, 8, 115, 11, 115, 12, 115, 1180, 3, 115, 1183, 8, 115, 1, 116, 1, 116, 4, 116, 1187, 8, 116, 11, 116, 12, 116, 1188, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 136, 4, 136, 1272, 8, 136, 11, 136, 12, 136, 1273, 1, 136, 1, 136, 3, 136, 1278, 8, 136, 1, 136, 4, 136, 1281, 8, 136, 11, 136, 12, 136, 1282, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 4, 169, 1424, 8, 169, 11, 169, 12, 169, 1425, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 213, 1, 213, 1, 213, 1, 213, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 217, 1, 217, 1, 217, 1, 217, 1, 217, 1, 218, 1, 218, 1, 218, 1, 218, 1, 218, 1, 219, 1, 219, 1, 219, 1, 219, 1, 220, 1, 220, 1, 220, 1, 220, 1, 221, 1, 221, 1, 221, 1, 221, 1, 222, 1, 222, 1, 222, 1, 222, 1, 223, 1, 223, 1, 223, 1, 223, 1, 224, 1, 224, 1, 224, 1, 224, 1, 225, 1, 225, 1, 225, 1, 225, 1, 226, 1, 226, 1, 226, 1, 226, 1, 227, 1, 227, 1, 227, 1, 227, 2, 721, 790, 0, 228, 17, 1, 19, 2, 21, 3, 23, 4, 25, 5, 27, 6, 29, 7, 31, 8, 33, 9, 35, 10, 37, 11, 39, 12, 41, 13, 43, 14, 45, 15, 47, 16, 49, 17, 51, 18, 53, 19, 55, 20, 57, 21, 59, 22, 61, 23, 63, 24, 65, 25, 67, 26, 69, 27, 71, 28, 73, 29, 75, 0, 77, 0, 79, 0, 81, 0, 83, 0, 85, 0, 87, 0, 89, 0, 91, 0, 93, 0, 95, 30, 97, 31, 99, 32, 101, 33, 103, 34, 105, 35, 107, 36, 109, 37, 111, 38, 113, 39, 115, 40, 117, 41, 119, 42, 121, 43, 123, 44, 125, 45, 127, 46, 129, 47, 131, 48, 133, 49, 135, 50, 137, 51, 139, 52, 141, 53, 143, 54, 145, 55, 147, 56, 149, 57, 151, 58, 153, 59, 155, 60, 157, 61, 159, 62, 161, 63, 163, 64, 165, 65, 167, 66, 169, 67, 171, 68, 173, 69, 175, 70, 177, 0, 179, 71, 181, 72, 183, 73, 185, 74, 187, 0, 189, 75, 191, 76, 193, 77, 195, 78, 197, 0, 199, 0, 201, 79, 203, 80, 205, 81, 207, 0, 209, 0, 211, 0, 213, 0, 215, 0, 217, 0, 219, 82, 221, 0, 223, 83, 225, 0, 227, 0, 229, 84, 231, 85, 233, 86, 235, 0, 237, 0, 239, 0, 241, 0, 243, 0, 245, 0, 247, 0, 249, 87, 251, 88, 253, 89, 255, 90, 257, 0, 259, 0, 261, 0, 263, 0, 265, 0, 267, 0, 269, 91, 271, 0, 273, 92, 275, 93, 277, 94, 279, 0, 281, 0, 283, 95, 285, 96, 287, 0, 289, 97, 291, 0, 293, 98, 295, 99, 297, 100, 299, 0, 301, 0, 303, 0, 305, 0, 307, 0, 309, 0, 311, 0, 313, 0, 315, 0, 317, 101, 319, 102, 321, 103, 323, 0, 325, 0, 327, 0, 329, 0, 331, 0, 333, 0, 335, 104, 337, 105, 339, 106, 341, 0, 343, 107, 345, 108, 347, 109, 349, 110, 351, 0, 353, 0, 355, 111, 357, 112, 359, 113, 361, 114, 363, 0, 365, 0, 367, 0, 369, 0, 371, 0, 373, 0, 375, 0, 377, 115, 379, 116, 381, 117, 383, 0, 385, 0, 387, 0, 389, 0, 391, 118, 393, 119, 395, 120, 397, 0, 399, 121, 401, 0, 403, 0, 405, 122, 407, 0, 409, 0, 411, 0, 413, 0, 415, 0, 417, 123, 419, 124, 421, 125, 423, 0, 425, 0, 427, 0, 429, 126, 431, 127, 433, 128, 435, 0, 437, 0, 439, 129, 441, 130, 443, 131, 445, 0, 447, 0, 449, 0, 451, 0, 453, 0, 455, 0, 457, 0, 459, 0, 461, 0, 463, 0, 465, 0, 467, 132, 469, 133, 471, 134, 17, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 2, 0, 74, 74, 106, 106, 1715, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 63, 1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 1, 73, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 169, 1, 0, 0, 0, 1, 171, 1, 0, 0, 0, 1, 173, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 1, 179, 1, 0, 0, 0, 1, 181, 1, 0, 0, 0, 1, 183, 1, 0, 0, 0, 1, 185, 1, 0, 0, 0, 1, 189, 1, 0, 0, 0, 1, 191, 1, 0, 0, 0, 1, 193, 1, 0, 0, 0, 1, 195, 1, 0, 0, 0, 2, 197, 1, 0, 0, 0, 2, 199, 1, 0, 0, 0, 2, 201, 1, 0, 0, 0, 2, 203, 1, 0, 0, 0, 2, 205, 1, 0, 0, 0, 3, 207, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 3, 217, 1, 0, 0, 0, 3, 219, 1, 0, 0, 0, 3, 223, 1, 0, 0, 0, 3, 225, 1, 0, 0, 0, 3, 227, 1, 0, 0, 0, 3, 229, 1, 0, 0, 0, 3, 231, 1, 0, 0, 0, 3, 233, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 4, 237, 1, 0, 0, 0, 4, 239, 1, 0, 0, 0, 4, 241, 1, 0, 0, 0, 4, 243, 1, 0, 0, 0, 4, 249, 1, 0, 0, 0, 4, 251, 1, 0, 0, 0, 4, 253, 1, 0, 0, 0, 4, 255, 1, 0, 0, 0, 5, 257, 1, 0, 0, 0, 5, 259, 1, 0, 0, 0, 5, 261, 1, 0, 0, 0, 5, 263, 1, 0, 0, 0, 5, 265, 1, 0, 0, 0, 5, 267, 1, 0, 0, 0, 5, 269, 1, 0, 0, 0, 5, 271, 1, 0, 0, 0, 5, 273, 1, 0, 0, 0, 5, 275, 1, 0, 0, 0, 5, 277, 1, 0, 0, 0, 6, 279, 1, 0, 0, 0, 6, 281, 1, 0, 0, 0, 6, 283, 1, 0, 0, 0, 6, 285, 1, 0, 0, 0, 6, 289, 1, 0, 0, 0, 6, 291, 1, 0, 0, 0, 6, 293, 1, 0, 0, 0, 6, 295, 1, 0, 0, 0, 6, 297, 1, 0, 0, 0, 7, 299, 1, 0, 0, 0, 7, 301, 1, 0, 0, 0, 7, 303, 1, 0, 0, 0, 7, 305, 1, 0, 0, 0, 7, 307, 1, 0, 0, 0, 7, 309, 1, 0, 0, 0, 7, 311, 1, 0, 0, 0, 7, 313, 1, 0, 0, 0, 7, 315, 1, 0, 0, 0, 7, 317, 1, 0, 0, 0, 7, 319, 1, 0, 0, 0, 7, 321, 1, 0, 0, 0, 8, 323, 1, 0, 0, 0, 8, 325, 1, 0, 0, 0, 8, 327, 1, 0, 0, 0, 8, 329, 1, 0, 0, 0, 8, 331, 1, 0, 0, 0, 8, 333, 1, 0, 0, 0, 8, 335, 1, 0, 0, 0, 8, 337, 1, 0, 0, 0, 8, 339, 1, 0, 0, 0, 9, 341, 1, 0, 0, 0, 9, 343, 1, 0, 0, 0, 9, 345, 1, 0, 0, 0, 9, 347, 1, 0, 0, 0, 9, 349, 1, 0, 0, 0, 10, 351, 1, 0, 0, 0, 10, 353, 1, 0, 0, 0, 10, 355, 1, 0, 0, 0, 10, 357, 1, 0, 0, 0, 10, 359, 1, 0, 0, 0, 10, 361, 1, 0, 0, 0, 11, 363, 1, 0, 0, 0, 11, 365, 1, 0, 0, 0, 11, 367, 1, 0, 0, 0, 11, 369, 1, 0, 0, 0, 11, 371, 1, 0, 0, 0, 11, 373, 1, 0, 0, 0, 11, 375, 1, 0, 0, 0, 11, 377, 1, 0, 0, 0, 11, 379, 1, 0, 0, 0, 11, 381, 1, 0, 0, 0, 12, 383, 1, 0, 0, 0, 12, 385, 1, 0, 0, 0, 12, 387, 1, 0, 0, 0, 12, 389, 1, 0, 0, 0, 12, 391, 1, 0, 0, 0, 12, 393, 1, 0, 0, 0, 12, 395, 1, 0, 0, 0, 13, 397, 1, 0, 0, 0, 13, 399, 1, 0, 0, 0, 13, 401, 1, 0, 0, 0, 13, 403, 1, 0, 0, 0, 13, 405, 1, 0, 0, 0, 13, 407, 1, 0, 0, 0, 13, 409, 1, 0, 0, 0, 13, 411, 1, 0, 0, 0, 13, 413, 1, 0, 0, 0, 13, 415, 1, 0, 0, 0, 13, 417, 1, 0, 0, 0, 13, 419, 1, 0, 0, 0, 13, 421, 1, 0, 0, 0, 14, 423, 1, 0, 0, 0, 14, 425, 1, 0, 0, 0, 14, 427, 1, 0, 0, 0, 14, 429, 1, 0, 0, 0, 14, 431, 1, 0, 0, 0, 14, 433, 1, 0, 0, 0, 15, 435, 1, 0, 0, 0, 15, 437, 1, 0, 0, 0, 15, 439, 1, 0, 0, 0, 15, 441, 1, 0, 0, 0, 15, 443, 1, 0, 0, 0, 15, 445, 1, 0, 0, 0, 15, 447, 1, 0, 0, 0, 15, 449, 1, 0, 0, 0, 15, 451, 1, 0, 0, 0, 16, 453, 1, 0, 0, 0, 16, 455, 1, 0, 0, 0, 16, 457, 1, 0, 0, 0, 16, 459, 1, 0, 0, 0, 16, 461, 1, 0, 0, 0, 16, 463, 1, 0, 0, 0, 16, 465, 1, 0, 0, 0, 16, 467, 1, 0, 0, 0, 16, 469, 1, 0, 0, 0, 16, 471, 1, 0, 0, 0, 17, 473, 1, 0, 0, 0, 19, 483, 1, 0, 0, 0, 21, 490, 1, 0, 0, 0, 23, 499, 1, 0, 0, 0, 25, 506, 1, 0, 0, 0, 27, 516, 1, 0, 0, 0, 29, 523, 1, 0, 0, 0, 31, 530, 1, 0, 0, 0, 33, 537, 1, 0, 0, 0, 35, 545, 1, 0, 0, 0, 37, 557, 1, 0, 0, 0, 39, 566, 1, 0, 0, 0, 41, 572, 1, 0, 0, 0, 43, 579, 1, 0, 0, 0, 45, 586, 1, 0, 0, 0, 47, 594, 1, 0, 0, 0, 49, 602, 1, 0, 0, 0, 51, 611, 1, 0, 0, 0, 53, 627, 1, 0, 0, 0, 55, 642, 1, 0, 0, 0, 57, 654, 1, 0, 0, 0, 59, 665, 1, 0, 0, 0, 61, 673, 1, 0, 0, 0, 63, 681, 1, 0, 0, 0, 65, 691, 1, 0, 0, 0, 67, 697, 1, 0, 0, 0, 69, 714, 1, 0, 0, 0, 71, 730, 1, 0, 0, 0, 73, 736, 1, 0, 0, 0, 75, 740, 1, 0, 0, 0, 77, 742, 1, 0, 0, 0, 79, 744, 1, 0, 0, 0, 81, 747, 1, 0, 0, 0, 83, 749, 1, 0, 0, 0, 85, 758, 1, 0, 0, 0, 87, 760, 1, 0, 0, 0, 89, 765, 1, 0, 0, 0, 91, 767, 1, 0, 0, 0, 93, 772, 1, 0, 0, 0, 95, 803, 1, 0, 0, 0, 97, 806, 1, 0, 0, 0, 99, 852, 1, 0, 0, 0, 101, 854, 1, 0, 0, 0, 103, 857, 1, 0, 0, 0, 105, 861, 1, 0, 0, 0, 107, 865, 1, 0, 0, 0, 109, 867, 1, 0, 0, 0, 111, 870, 1, 0, 0, 0, 113, 872, 1, 0, 0, 0, 115, 874, 1, 0, 0, 0, 117, 879, 1, 0, 0, 0, 119, 881, 1, 0, 0, 0, 121, 887, 1, 0, 0, 0, 123, 893, 1, 0, 0, 0, 125, 896, 1, 0, 0, 0, 127, 899, 1, 0, 0, 0, 129, 904, 1, 0, 0, 0, 131, 909, 1, 0, 0, 0, 133, 911, 1, 0, 0, 0, 135, 915, 1, 0, 0, 0, 137, 920, 1, 0, 0, 0, 139, 926, 1, 0, 0, 0, 141, 929, 1, 0, 0, 0, 143, 931, 1, 0, 0, 0, 145, 937, 1, 0, 0, 0, 147, 939, 1, 0, 0, 0, 149, 944, 1, 0, 0, 0, 151, 947, 1, 0, 0, 0, 153, 950, 1, 0, 0, 0, 155, 953, 1, 0, 0, 0, 157, 955, 1, 0, 0, 0, 159, 958, 1, 0, 0, 0, 161, 960, 1, 0, 0, 0, 163, 963, 1, 0, 0, 0, 165, 965, 1, 0, 0, 0, 167, 967, 1, 0, 0, 0, 169, 969, 1, 0, 0, 0, 171, 971, 1, 0, 0, 0, 173, 973, 1, 0, 0, 0, 175, 975, 1, 0, 0, 0, 177, 977, 1, 0, 0, 0, 179, 998, 1, 0, 0, 0, 181, 1000, 1, 0, 0, 0, 183, 1005, 1, 0, 0, 0, 185, 1026, 1, 0, 0, 0, 187, 1028, 1, 0, 0, 0, 189, 1036, 1, 0, 0, 0, 191, 1038, 1, 0, 0, 0, 193, 1042, 1, 0, 0, 0, 195, 1046, 1, 0, 0, 0, 197, 1050, 1, 0, 0, 0, 199, 1055, 1, 0, 0, 0, 201, 1060, 1, 0, 0, 0, 203, 1064, 1, 0, 0, 0, 205, 1068, 1, 0, 0, 0, 207, 1072, 1, 0, 0, 0, 209, 1077, 1, 0, 0, 0, 211, 1081, 1, 0, 0, 0, 213, 1085, 1, 0, 0, 0, 215, 1089, 1, 0, 0, 0, 217, 1093, 1, 0, 0, 0, 219, 1097, 1, 0, 0, 0, 221, 1109, 1, 0, 0, 0, 223, 1112, 1, 0, 0, 0, 225, 1116, 1, 0, 0, 0, 227, 1120, 1, 0, 0, 0, 229, 1124, 1, 0, 0, 0, 231, 1128, 1, 0, 0, 0, 233, 1132, 1, 0, 0, 0, 235, 1136, 1, 0, 0, 0, 237, 1141, 1, 0, 0, 0, 239, 1145, 1, 0, 0, 0, 241, 1149, 1, 0, 0, 0, 243, 1153, 1, 0, 0, 0, 245, 1161, 1, 0, 0, 0, 247, 1182, 1, 0, 0, 0, 249, 1186, 1, 0, 0, 0, 251, 1190, 1, 0, 0, 0, 253, 1194, 1, 0, 0, 0, 255, 1198, 1, 0, 0, 0, 257, 1202, 1, 0, 0, 0, 259, 1207, 1, 0, 0, 0, 261, 1211, 1, 0, 0, 0, 263, 1215, 1, 0, 0, 0, 265, 1219, 1, 0, 0, 0, 267, 1223, 1, 0, 0, 0, 269, 1227, 1, 0, 0, 0, 271, 1230, 1, 0, 0, 0, 273, 1234, 1, 0, 0, 0, 275, 1238, 1, 0, 0, 0, 277, 1242, 1, 0, 0, 0, 279, 1246, 1, 0, 0, 0, 281, 1251, 1, 0, 0, 0, 283, 1256, 1, 0, 0, 0, 285, 1261, 1, 0, 0, 0, 287, 1268, 1, 0, 0, 0, 289, 1277, 1, 0, 0, 0, 291, 1284, 1, 0, 0, 0, 293, 1288, 1, 0, 0, 0, 295, 1292, 1, 0, 0, 0, 297, 1296, 1, 0, 0, 0, 299, 1300, 1, 0, 0, 0, 301, 1306, 1, 0, 0, 0, 303, 1310, 1, 0, 0, 0, 305, 1314, 1, 0, 0, 0, 307, 1318, 1, 0, 0, 0, 309, 1322, 1, 0, 0, 0, 311, 1326, 1, 0, 0, 0, 313, 1330, 1, 0, 0, 0, 315, 1334, 1, 0, 0, 0, 317, 1338, 1, 0, 0, 0, 319, 1342, 1, 0, 0, 0, 321, 1346, 1, 0, 0, 0, 323, 1350, 1, 0, 0, 0, 325, 1355, 1, 0, 0, 0, 327, 1359, 1, 0, 0, 0, 329, 1363, 1, 0, 0, 0, 331, 1367, 1, 0, 0, 0, 333, 1371, 1, 0, 0, 0, 335, 1375, 1, 0, 0, 0, 337, 1379, 1, 0, 0, 0, 339, 1383, 1, 0, 0, 0, 341, 1387, 1, 0, 0, 0, 343, 1392, 1, 0, 0, 0, 345, 1397, 1, 0, 0, 0, 347, 1401, 1, 0, 0, 0, 349, 1405, 1, 0, 0, 0, 351, 1409, 1, 0, 0, 0, 353, 1414, 1, 0, 0, 0, 355, 1423, 1, 0, 0, 0, 357, 1427, 1, 0, 0, 0, 359, 1431, 1, 0, 0, 0, 361, 1435, 1, 0, 0, 0, 363, 1439, 1, 0, 0, 0, 365, 1444, 1, 0, 0, 0, 367, 1448, 1, 0, 0, 0, 369, 1452, 1, 0, 0, 0, 371, 1456, 1, 0, 0, 0, 373, 1461, 1, 0, 0, 0, 375, 1465, 1, 0, 0, 0, 377, 1469, 1, 0, 0, 0, 379, 1473, 1, 0, 0, 0, 381, 1477, 1, 0, 0, 0, 383, 1481, 1, 0, 0, 0, 385, 1487, 1, 0, 0, 0, 387, 1491, 1, 0, 0, 0, 389, 1495, 1, 0, 0, 0, 391, 1499, 1, 0, 0, 0, 393, 1503, 1, 0, 0, 0, 395, 1507, 1, 0, 0, 0, 397, 1511, 1, 0, 0, 0, 399, 1516, 1, 0, 0, 0, 401, 1521, 1, 0, 0, 0, 403, 1525, 1, 0, 0, 0, 405, 1531, 1, 0, 0, 0, 407, 1540, 1, 0, 0, 0, 409, 1544, 1, 0, 0, 0, 411, 1548, 1, 0, 0, 0, 413, 1552, 1, 0, 0, 0, 415, 1556, 1, 0, 0, 0, 417, 1560, 1, 0, 0, 0, 419, 1564, 1, 0, 0, 0, 421, 1568, 1, 0, 0, 0, 423, 1572, 1, 0, 0, 0, 425, 1577, 1, 0, 0, 0, 427, 1583, 1, 0, 0, 0, 429, 1589, 1, 0, 0, 0, 431, 1593, 1, 0, 0, 0, 433, 1597, 1, 0, 0, 0, 435, 1601, 1, 0, 0, 0, 437, 1607, 1, 0, 0, 0, 439, 1613, 1, 0, 0, 0, 441, 1617, 1, 0, 0, 0, 443, 1621, 1, 0, 0, 0, 445, 1625, 1, 0, 0, 0, 447, 1631, 1, 0, 0, 0, 449, 1637, 1, 0, 0, 0, 451, 1643, 1, 0, 0, 0, 453, 1648, 1, 0, 0, 0, 455, 1653, 1, 0, 0, 0, 457, 1657, 1, 0, 0, 0, 459, 1661, 1, 0, 0, 0, 461, 1665, 1, 0, 0, 0, 463, 1669, 1, 0, 0, 0, 465, 1673, 1, 0, 0, 0, 467, 1677, 1, 0, 0, 0, 469, 1681, 1, 0, 0, 0, 471, 1685, 1, 0, 0, 0, 473, 474, 7, 0, 0, 0, 474, 475, 7, 1, 0, 0, 475, 476, 7, 2, 0, 0, 476, 477, 7, 2, 0, 0, 477, 478, 7, 3, 0, 0, 478, 479, 7, 4, 0, 0, 479, 480, 7, 5, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 6, 0, 0, 0, 482, 18, 1, 0, 0, 0, 483, 484, 7, 0, 0, 0, 484, 485, 7, 6, 0, 0, 485, 486, 7, 7, 0, 0, 486, 487, 7, 8, 0, 0, 487, 488, 1, 0, 0, 0, 488, 489, 6, 1, 1, 0, 489, 20, 1, 0, 0, 0, 490, 491, 7, 3, 0, 0, 491, 492, 7, 9, 0, 0, 492, 493, 7, 6, 0, 0, 493, 494, 7, 1, 0, 0, 494, 495, 7, 4, 0, 0, 495, 496, 7, 10, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 6, 2, 2, 0, 498, 22, 1, 0, 0, 0, 499, 500, 7, 3, 0, 0, 500, 501, 7, 11, 0, 0, 501, 502, 7, 12, 0, 0, 502, 503, 7, 13, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 3, 0, 0, 505, 24, 1, 0, 0, 0, 506, 507, 7, 3, 0, 0, 507, 508, 7, 14, 0, 0, 508, 509, 7, 8, 0, 0, 509, 510, 7, 13, 0, 0, 510, 511, 7, 12, 0, 0, 511, 512, 7, 1, 0, 0, 512, 513, 7, 9, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 6, 4, 3, 0, 515, 26, 1, 0, 0, 0, 516, 517, 7, 15, 0, 0, 517, 518, 7, 6, 0, 0, 518, 519, 7, 7, 0, 0, 519, 520, 7, 16, 0, 0, 520, 521, 1, 0, 0, 0, 521, 522, 6, 5, 4, 0, 522, 28, 1, 0, 0, 0, 523, 524, 7, 17, 0, 0, 524, 525, 7, 6, 0, 0, 525, 526, 7, 7, 0, 0, 526, 527, 7, 18, 0, 0, 527, 528, 1, 0, 0, 0, 528, 529, 6, 6, 0, 0, 529, 30, 1, 0, 0, 0, 530, 531, 7, 18, 0, 0, 531, 532, 7, 3, 0, 0, 532, 533, 7, 3, 0, 0, 533, 534, 7, 8, 0, 0, 534, 535, 1, 0, 0, 0, 535, 536, 6, 7, 1, 0, 536, 32, 1, 0, 0, 0, 537, 538, 7, 13, 0, 0, 538, 539, 7, 1, 0, 0, 539, 540, 7, 16, 0, 0, 540, 541, 7, 1, 0, 0, 541, 542, 7, 5, 0, 0, 542, 543, 1, 0, 0, 0, 543, 544, 6, 8, 0, 0, 544, 34, 1, 0, 0, 0, 545, 546, 7, 16, 0, 0, 546, 547, 7, 11, 0, 0, 547, 548, 5, 95, 0, 0, 548, 549, 7, 3, 0, 0, 549, 550, 7, 14, 0, 0, 550, 551, 7, 8, 0, 0, 551, 552, 7, 12, 0, 0, 552, 553, 7, 9, 0, 0, 553, 554, 7, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 6, 9, 5, 0, 556, 36, 1, 0, 0, 0, 557, 558, 7, 6, 0, 0, 558, 559, 7, 3, 0, 0, 559, 560, 7, 9, 0, 0, 560, 561, 7, 12, 0, 0, 561, 562, 7, 16, 0, 0, 562, 563, 7, 3, 0, 0, 563, 564, 1, 0, 0, 0, 564, 565, 6, 10, 6, 0, 565, 38, 1, 0, 0, 0, 566, 567, 7, 6, 0, 0, 567, 568, 7, 7, 0, 0, 568, 569, 7, 19, 0, 0, 569, 570, 1, 0, 0, 0, 570, 571, 6, 11, 0, 0, 571, 40, 1, 0, 0, 0, 572, 573, 7, 2, 0, 0, 573, 574, 7, 10, 0, 0, 574, 575, 7, 7, 0, 0, 575, 576, 7, 19, 0, 0, 576, 577, 1, 0, 0, 0, 577, 578, 6, 12, 7, 0, 578, 42, 1, 0, 0, 0, 579, 580, 7, 2, 0, 0, 580, 581, 7, 7, 0, 0, 581, 582, 7, 6, 0, 0, 582, 583, 7, 5, 0, 0, 583, 584, 1, 0, 0, 0, 584, 585, 6, 13, 0, 0, 585, 44, 1, 0, 0, 0, 586, 587, 7, 2, 0, 0, 587, 588, 7, 5, 0, 0, 588, 589, 7, 12, 0, 0, 589, 590, 7, 5, 0, 0, 590, 591, 7, 2, 0, 0, 591, 592, 1, 0, 0, 0, 592, 593, 6, 14, 0, 0, 593, 46, 1, 0, 0, 0, 594, 595, 7, 19, 0, 0, 595, 596, 7, 10, 0, 0, 596, 597, 7, 3, 0, 0, 597, 598, 7, 6, 0, 0, 598, 599, 7, 3, 0, 0, 599, 600, 1, 0, 0, 0, 600, 601, 6, 15, 0, 0, 601, 48, 1, 0, 0, 0, 602, 603, 7, 13, 0, 0, 603, 604, 7, 7, 0, 0, 604, 605, 7, 7, 0, 0, 605, 606, 7, 18, 0, 0, 606, 607, 7, 20, 0, 0, 607, 608, 7, 8, 0, 0, 608, 609, 1, 0, 0, 0, 609, 610, 6, 16, 8, 0, 610, 50, 1, 0, 0, 0, 611, 612, 4, 17, 0, 0, 612, 613, 7, 4, 0, 0, 613, 614, 7, 10, 0, 0, 614, 615, 7, 12, 0, 0, 615, 616, 7, 9, 0, 0, 616, 617, 7, 17, 0, 0, 617, 618, 7, 3, 0, 0, 618, 619, 5, 95, 0, 0, 619, 620, 7, 8, 0, 0, 620, 621, 7, 7, 0, 0, 621, 622, 7, 1, 0, 0, 622, 623, 7, 9, 0, 0, 623, 624, 7, 5, 0, 0, 624, 625, 1, 0, 0, 0, 625, 626, 6, 17, 9, 0, 626, 52, 1, 0, 0, 0, 627, 628, 4, 18, 1, 0, 628, 629, 7, 1, 0, 0, 629, 630, 7, 9, 0, 0, 630, 631, 7, 13, 0, 0, 631, 632, 7, 1, 0, 0, 632, 633, 7, 9, 0, 0, 633, 634, 7, 3, 0, 0, 634, 635, 7, 2, 0, 0, 635, 636, 7, 5, 0, 0, 636, 637, 7, 12, 0, 0, 637, 638, 7, 5, 0, 0, 638, 639, 7, 2, 0, 0, 639, 640, 1, 0, 0, 0, 640, 641, 6, 18, 0, 0, 641, 54, 1, 0, 0, 0, 642, 643, 4, 19, 2, 0, 643, 644, 7, 13, 0, 0, 644, 645, 7, 7, 0, 0, 645, 646, 7, 7, 0, 0, 646, 647, 7, 18, 0, 0, 647, 648, 7, 20, 0, 0, 648, 649, 7, 8, 0, 0, 649, 650, 5, 95, 0, 0, 650, 651, 5, 128020, 0, 0, 651, 652, 1, 0, 0, 0, 652, 653, 6, 19, 10, 0, 653, 56, 1, 0, 0, 0, 654, 655, 4, 20, 3, 0, 655, 656, 7, 16, 0, 0, 656, 657, 7, 3, 0, 0, 657, 658, 7, 5, 0, 0, 658, 659, 7, 6, 0, 0, 659, 660, 7, 1, 0, 0, 660, 661, 7, 4, 0, 0, 661, 662, 7, 2, 0, 0, 662, 663, 1, 0, 0, 0, 663, 664, 6, 20, 11, 0, 664, 58, 1, 0, 0, 0, 665, 666, 4, 21, 4, 0, 666, 667, 7, 15, 0, 0, 667, 668, 7, 20, 0, 0, 668, 669, 7, 13, 0, 0, 669, 670, 7, 13, 0, 0, 670, 671, 1, 0, 0, 0, 671, 672, 6, 21, 8, 0, 672, 60, 1, 0, 0, 0, 673, 674, 4, 22, 5, 0, 674, 675, 7, 13, 0, 0, 675, 676, 7, 3, 0, 0, 676, 677, 7, 15, 0, 0, 677, 678, 7, 5, 0, 0, 678, 679, 1, 0, 0, 0, 679, 680, 6, 22, 8, 0, 680, 62, 1, 0, 0, 0, 681, 682, 4, 23, 6, 0, 682, 683, 7, 6, 0, 0, 683, 684, 7, 1, 0, 0, 684, 685, 7, 17, 0, 0, 685, 686, 7, 10, 0, 0, 686, 687, 7, 5, 0, 0, 687, 688, 1, 0, 0, 0, 688, 689, 6, 23, 8, 0, 689, 64, 1, 0, 0, 0, 690, 692, 8, 21, 0, 0, 691, 690, 1, 0, 0, 0, 692, 693, 1, 0, 0, 0, 693, 691, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 696, 6, 24, 0, 0, 696, 66, 1, 0, 0, 0, 697, 698, 5, 47, 0, 0, 698, 699, 5, 47, 0, 0, 699, 703, 1, 0, 0, 0, 700, 702, 8, 22, 0, 0, 701, 700, 1, 0, 0, 0, 702, 705, 1, 0, 0, 0, 703, 701, 1, 0, 0, 0, 703, 704, 1, 0, 0, 0, 704, 707, 1, 0, 0, 0, 705, 703, 1, 0, 0, 0, 706, 708, 5, 13, 0, 0, 707, 706, 1, 0, 0, 0, 707, 708, 1, 0, 0, 0, 708, 710, 1, 0, 0, 0, 709, 711, 5, 10, 0, 0, 710, 709, 1, 0, 0, 0, 710, 711, 1, 0, 0, 0, 711, 712, 1, 0, 0, 0, 712, 713, 6, 25, 12, 0, 713, 68, 1, 0, 0, 0, 714, 715, 5, 47, 0, 0, 715, 716, 5, 42, 0, 0, 716, 721, 1, 0, 0, 0, 717, 720, 3, 69, 26, 0, 718, 720, 9, 0, 0, 0, 719, 717, 1, 0, 0, 0, 719, 718, 1, 0, 0, 0, 720, 723, 1, 0, 0, 0, 721, 722, 1, 0, 0, 0, 721, 719, 1, 0, 0, 0, 722, 724, 1, 0, 0, 0, 723, 721, 1, 0, 0, 0, 724, 725, 5, 42, 0, 0, 725, 726, 5, 47, 0, 0, 726, 727, 1, 0, 0, 0, 727, 728, 6, 26, 12, 0, 728, 70, 1, 0, 0, 0, 729, 731, 7, 23, 0, 0, 730, 729, 1, 0, 0, 0, 731, 732, 1, 0, 0, 0, 732, 730, 1, 0, 0, 0, 732, 733, 1, 0, 0, 0, 733, 734, 1, 0, 0, 0, 734, 735, 6, 27, 12, 0, 735, 72, 1, 0, 0, 0, 736, 737, 5, 124, 0, 0, 737, 738, 1, 0, 0, 0, 738, 739, 6, 28, 13, 0, 739, 74, 1, 0, 0, 0, 740, 741, 7, 24, 0, 0, 741, 76, 1, 0, 0, 0, 742, 743, 7, 25, 0, 0, 743, 78, 1, 0, 0, 0, 744, 745, 5, 92, 0, 0, 745, 746, 7, 26, 0, 0, 746, 80, 1, 0, 0, 0, 747, 748, 8, 27, 0, 0, 748, 82, 1, 0, 0, 0, 749, 751, 7, 3, 0, 0, 750, 752, 7, 28, 0, 0, 751, 750, 1, 0, 0, 0, 751, 752, 1, 0, 0, 0, 752, 754, 1, 0, 0, 0, 753, 755, 3, 75, 29, 0, 754, 753, 1, 0, 0, 0, 755, 756, 1, 0, 0, 0, 756, 754, 1, 0, 0, 0, 756, 757, 1, 0, 0, 0, 757, 84, 1, 0, 0, 0, 758, 759, 5, 64, 0, 0, 759, 86, 1, 0, 0, 0, 760, 761, 5, 96, 0, 0, 761, 88, 1, 0, 0, 0, 762, 766, 8, 29, 0, 0, 763, 764, 5, 96, 0, 0, 764, 766, 5, 96, 0, 0, 765, 762, 1, 0, 0, 0, 765, 763, 1, 0, 0, 0, 766, 90, 1, 0, 0, 0, 767, 768, 5, 95, 0, 0, 768, 92, 1, 0, 0, 0, 769, 773, 3, 77, 30, 0, 770, 773, 3, 75, 29, 0, 771, 773, 3, 91, 37, 0, 772, 769, 1, 0, 0, 0, 772, 770, 1, 0, 0, 0, 772, 771, 1, 0, 0, 0, 773, 94, 1, 0, 0, 0, 774, 779, 5, 34, 0, 0, 775, 778, 3, 79, 31, 0, 776, 778, 3, 81, 32, 0, 777, 775, 1, 0, 0, 0, 777, 776, 1, 0, 0, 0, 778, 781, 1, 0, 0, 0, 779, 777, 1, 0, 0, 0, 779, 780, 1, 0, 0, 0, 780, 782, 1, 0, 0, 0, 781, 779, 1, 0, 0, 0, 782, 804, 5, 34, 0, 0, 783, 784, 5, 34, 0, 0, 784, 785, 5, 34, 0, 0, 785, 786, 5, 34, 0, 0, 786, 790, 1, 0, 0, 0, 787, 789, 8, 22, 0, 0, 788, 787, 1, 0, 0, 0, 789, 792, 1, 0, 0, 0, 790, 791, 1, 0, 0, 0, 790, 788, 1, 0, 0, 0, 791, 793, 1, 0, 0, 0, 792, 790, 1, 0, 0, 0, 793, 794, 5, 34, 0, 0, 794, 795, 5, 34, 0, 0, 795, 796, 5, 34, 0, 0, 796, 798, 1, 0, 0, 0, 797, 799, 5, 34, 0, 0, 798, 797, 1, 0, 0, 0, 798, 799, 1, 0, 0, 0, 799, 801, 1, 0, 0, 0, 800, 802, 5, 34, 0, 0, 801, 800, 1, 0, 0, 0, 801, 802, 1, 0, 0, 0, 802, 804, 1, 0, 0, 0, 803, 774, 1, 0, 0, 0, 803, 783, 1, 0, 0, 0, 804, 96, 1, 0, 0, 0, 805, 807, 3, 75, 29, 0, 806, 805, 1, 0, 0, 0, 807, 808, 1, 0, 0, 0, 808, 806, 1, 0, 0, 0, 808, 809, 1, 0, 0, 0, 809, 98, 1, 0, 0, 0, 810, 812, 3, 75, 29, 0, 811, 810, 1, 0, 0, 0, 812, 813, 1, 0, 0, 0, 813, 811, 1, 0, 0, 0, 813, 814, 1, 0, 0, 0, 814, 815, 1, 0, 0, 0, 815, 819, 3, 117, 50, 0, 816, 818, 3, 75, 29, 0, 817, 816, 1, 0, 0, 0, 818, 821, 1, 0, 0, 0, 819, 817, 1, 0, 0, 0, 819, 820, 1, 0, 0, 0, 820, 853, 1, 0, 0, 0, 821, 819, 1, 0, 0, 0, 822, 824, 3, 117, 50, 0, 823, 825, 3, 75, 29, 0, 824, 823, 1, 0, 0, 0, 825, 826, 1, 0, 0, 0, 826, 824, 1, 0, 0, 0, 826, 827, 1, 0, 0, 0, 827, 853, 1, 0, 0, 0, 828, 830, 3, 75, 29, 0, 829, 828, 1, 0, 0, 0, 830, 831, 1, 0, 0, 0, 831, 829, 1, 0, 0, 0, 831, 832, 1, 0, 0, 0, 832, 840, 1, 0, 0, 0, 833, 837, 3, 117, 50, 0, 834, 836, 3, 75, 29, 0, 835, 834, 1, 0, 0, 0, 836, 839, 1, 0, 0, 0, 837, 835, 1, 0, 0, 0, 837, 838, 1, 0, 0, 0, 838, 841, 1, 0, 0, 0, 839, 837, 1, 0, 0, 0, 840, 833, 1, 0, 0, 0, 840, 841, 1, 0, 0, 0, 841, 842, 1, 0, 0, 0, 842, 843, 3, 83, 33, 0, 843, 853, 1, 0, 0, 0, 844, 846, 3, 117, 50, 0, 845, 847, 3, 75, 29, 0, 846, 845, 1, 0, 0, 0, 847, 848, 1, 0, 0, 0, 848, 846, 1, 0, 0, 0, 848, 849, 1, 0, 0, 0, 849, 850, 1, 0, 0, 0, 850, 851, 3, 83, 33, 0, 851, 853, 1, 0, 0, 0, 852, 811, 1, 0, 0, 0, 852, 822, 1, 0, 0, 0, 852, 829, 1, 0, 0, 0, 852, 844, 1, 0, 0, 0, 853, 100, 1, 0, 0, 0, 854, 855, 7, 30, 0, 0, 855, 856, 7, 31, 0, 0, 856, 102, 1, 0, 0, 0, 857, 858, 7, 12, 0, 0, 858, 859, 7, 9, 0, 0, 859, 860, 7, 0, 0, 0, 860, 104, 1, 0, 0, 0, 861, 862, 7, 12, 0, 0, 862, 863, 7, 2, 0, 0, 863, 864, 7, 4, 0, 0, 864, 106, 1, 0, 0, 0, 865, 866, 5, 61, 0, 0, 866, 108, 1, 0, 0, 0, 867, 868, 5, 58, 0, 0, 868, 869, 5, 58, 0, 0, 869, 110, 1, 0, 0, 0, 870, 871, 5, 58, 0, 0, 871, 112, 1, 0, 0, 0, 872, 873, 5, 44, 0, 0, 873, 114, 1, 0, 0, 0, 874, 875, 7, 0, 0, 0, 875, 876, 7, 3, 0, 0, 876, 877, 7, 2, 0, 0, 877, 878, 7, 4, 0, 0, 878, 116, 1, 0, 0, 0, 879, 880, 5, 46, 0, 0, 880, 118, 1, 0, 0, 0, 881, 882, 7, 15, 0, 0, 882, 883, 7, 12, 0, 0, 883, 884, 7, 13, 0, 0, 884, 885, 7, 2, 0, 0, 885, 886, 7, 3, 0, 0, 886, 120, 1, 0, 0, 0, 887, 888, 7, 15, 0, 0, 888, 889, 7, 1, 0, 0, 889, 890, 7, 6, 0, 0, 890, 891, 7, 2, 0, 0, 891, 892, 7, 5, 0, 0, 892, 122, 1, 0, 0, 0, 893, 894, 7, 1, 0, 0, 894, 895, 7, 9, 0, 0, 895, 124, 1, 0, 0, 0, 896, 897, 7, 1, 0, 0, 897, 898, 7, 2, 0, 0, 898, 126, 1, 0, 0, 0, 899, 900, 7, 13, 0, 0, 900, 901, 7, 12, 0, 0, 901, 902, 7, 2, 0, 0, 902, 903, 7, 5, 0, 0, 903, 128, 1, 0, 0, 0, 904, 905, 7, 13, 0, 0, 905, 906, 7, 1, 0, 0, 906, 907, 7, 18, 0, 0, 907, 908, 7, 3, 0, 0, 908, 130, 1, 0, 0, 0, 909, 910, 5, 40, 0, 0, 910, 132, 1, 0, 0, 0, 911, 912, 7, 9, 0, 0, 912, 913, 7, 7, 0, 0, 913, 914, 7, 5, 0, 0, 914, 134, 1, 0, 0, 0, 915, 916, 7, 9, 0, 0, 916, 917, 7, 20, 0, 0, 917, 918, 7, 13, 0, 0, 918, 919, 7, 13, 0, 0, 919, 136, 1, 0, 0, 0, 920, 921, 7, 9, 0, 0, 921, 922, 7, 20, 0, 0, 922, 923, 7, 13, 0, 0, 923, 924, 7, 13, 0, 0, 924, 925, 7, 2, 0, 0, 925, 138, 1, 0, 0, 0, 926, 927, 7, 7, 0, 0, 927, 928, 7, 6, 0, 0, 928, 140, 1, 0, 0, 0, 929, 930, 5, 63, 0, 0, 930, 142, 1, 0, 0, 0, 931, 932, 7, 6, 0, 0, 932, 933, 7, 13, 0, 0, 933, 934, 7, 1, 0, 0, 934, 935, 7, 18, 0, 0, 935, 936, 7, 3, 0, 0, 936, 144, 1, 0, 0, 0, 937, 938, 5, 41, 0, 0, 938, 146, 1, 0, 0, 0, 939, 940, 7, 5, 0, 0, 940, 941, 7, 6, 0, 0, 941, 942, 7, 20, 0, 0, 942, 943, 7, 3, 0, 0, 943, 148, 1, 0, 0, 0, 944, 945, 5, 61, 0, 0, 945, 946, 5, 61, 0, 0, 946, 150, 1, 0, 0, 0, 947, 948, 5, 61, 0, 0, 948, 949, 5, 126, 0, 0, 949, 152, 1, 0, 0, 0, 950, 951, 5, 33, 0, 0, 951, 952, 5, 61, 0, 0, 952, 154, 1, 0, 0, 0, 953, 954, 5, 60, 0, 0, 954, 156, 1, 0, 0, 0, 955, 956, 5, 60, 0, 0, 956, 957, 5, 61, 0, 0, 957, 158, 1, 0, 0, 0, 958, 959, 5, 62, 0, 0, 959, 160, 1, 0, 0, 0, 960, 961, 5, 62, 0, 0, 961, 962, 5, 61, 0, 0, 962, 162, 1, 0, 0, 0, 963, 964, 5, 43, 0, 0, 964, 164, 1, 0, 0, 0, 965, 966, 5, 45, 0, 0, 966, 166, 1, 0, 0, 0, 967, 968, 5, 42, 0, 0, 968, 168, 1, 0, 0, 0, 969, 970, 5, 47, 0, 0, 970, 170, 1, 0, 0, 0, 971, 972, 5, 37, 0, 0, 972, 172, 1, 0, 0, 0, 973, 974, 5, 123, 0, 0, 974, 174, 1, 0, 0, 0, 975, 976, 5, 125, 0, 0, 976, 176, 1, 0, 0, 0, 977, 978, 3, 47, 15, 0, 978, 979, 1, 0, 0, 0, 979, 980, 6, 80, 14, 0, 980, 178, 1, 0, 0, 0, 981, 984, 3, 141, 62, 0, 982, 985, 3, 77, 30, 0, 983, 985, 3, 91, 37, 0, 984, 982, 1, 0, 0, 0, 984, 983, 1, 0, 0, 0, 985, 989, 1, 0, 0, 0, 986, 988, 3, 93, 38, 0, 987, 986, 1, 0, 0, 0, 988, 991, 1, 0, 0, 0, 989, 987, 1, 0, 0, 0, 989, 990, 1, 0, 0, 0, 990, 999, 1, 0, 0, 0, 991, 989, 1, 0, 0, 0, 992, 994, 3, 141, 62, 0, 993, 995, 3, 75, 29, 0, 994, 993, 1, 0, 0, 0, 995, 996, 1, 0, 0, 0, 996, 994, 1, 0, 0, 0, 996, 997, 1, 0, 0, 0, 997, 999, 1, 0, 0, 0, 998, 981, 1, 0, 0, 0, 998, 992, 1, 0, 0, 0, 999, 180, 1, 0, 0, 0, 1000, 1001, 5, 91, 0, 0, 1001, 1002, 1, 0, 0, 0, 1002, 1003, 6, 82, 0, 0, 1003, 1004, 6, 82, 0, 0, 1004, 182, 1, 0, 0, 0, 1005, 1006, 5, 93, 0, 0, 1006, 1007, 1, 0, 0, 0, 1007, 1008, 6, 83, 13, 0, 1008, 1009, 6, 83, 13, 0, 1009, 184, 1, 0, 0, 0, 1010, 1014, 3, 77, 30, 0, 1011, 1013, 3, 93, 38, 0, 1012, 1011, 1, 0, 0, 0, 1013, 1016, 1, 0, 0, 0, 1014, 1012, 1, 0, 0, 0, 1014, 1015, 1, 0, 0, 0, 1015, 1027, 1, 0, 0, 0, 1016, 1014, 1, 0, 0, 0, 1017, 1020, 3, 91, 37, 0, 1018, 1020, 3, 85, 34, 0, 1019, 1017, 1, 0, 0, 0, 1019, 1018, 1, 0, 0, 0, 1020, 1022, 1, 0, 0, 0, 1021, 1023, 3, 93, 38, 0, 1022, 1021, 1, 0, 0, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1022, 1, 0, 0, 0, 1024, 1025, 1, 0, 0, 0, 1025, 1027, 1, 0, 0, 0, 1026, 1010, 1, 0, 0, 0, 1026, 1019, 1, 0, 0, 0, 1027, 186, 1, 0, 0, 0, 1028, 1030, 3, 87, 35, 0, 1029, 1031, 3, 89, 36, 0, 1030, 1029, 1, 0, 0, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1030, 1, 0, 0, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1034, 1, 0, 0, 0, 1034, 1035, 3, 87, 35, 0, 1035, 188, 1, 0, 0, 0, 1036, 1037, 3, 187, 85, 0, 1037, 190, 1, 0, 0, 0, 1038, 1039, 3, 67, 25, 0, 1039, 1040, 1, 0, 0, 0, 1040, 1041, 6, 87, 12, 0, 1041, 192, 1, 0, 0, 0, 1042, 1043, 3, 69, 26, 0, 1043, 1044, 1, 0, 0, 0, 1044, 1045, 6, 88, 12, 0, 1045, 194, 1, 0, 0, 0, 1046, 1047, 3, 71, 27, 0, 1047, 1048, 1, 0, 0, 0, 1048, 1049, 6, 89, 12, 0, 1049, 196, 1, 0, 0, 0, 1050, 1051, 3, 181, 82, 0, 1051, 1052, 1, 0, 0, 0, 1052, 1053, 6, 90, 15, 0, 1053, 1054, 6, 90, 16, 0, 1054, 198, 1, 0, 0, 0, 1055, 1056, 3, 73, 28, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1058, 6, 91, 17, 0, 1058, 1059, 6, 91, 13, 0, 1059, 200, 1, 0, 0, 0, 1060, 1061, 3, 71, 27, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1063, 6, 92, 12, 0, 1063, 202, 1, 0, 0, 0, 1064, 1065, 3, 67, 25, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1067, 6, 93, 12, 0, 1067, 204, 1, 0, 0, 0, 1068, 1069, 3, 69, 26, 0, 1069, 1070, 1, 0, 0, 0, 1070, 1071, 6, 94, 12, 0, 1071, 206, 1, 0, 0, 0, 1072, 1073, 3, 73, 28, 0, 1073, 1074, 1, 0, 0, 0, 1074, 1075, 6, 95, 17, 0, 1075, 1076, 6, 95, 13, 0, 1076, 208, 1, 0, 0, 0, 1077, 1078, 3, 181, 82, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1080, 6, 96, 15, 0, 1080, 210, 1, 0, 0, 0, 1081, 1082, 3, 183, 83, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 97, 18, 0, 1084, 212, 1, 0, 0, 0, 1085, 1086, 3, 111, 47, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 98, 19, 0, 1088, 214, 1, 0, 0, 0, 1089, 1090, 3, 113, 48, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 99, 20, 0, 1092, 216, 1, 0, 0, 0, 1093, 1094, 3, 107, 45, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 100, 21, 0, 1096, 218, 1, 0, 0, 0, 1097, 1098, 7, 16, 0, 0, 1098, 1099, 7, 3, 0, 0, 1099, 1100, 7, 5, 0, 0, 1100, 1101, 7, 12, 0, 0, 1101, 1102, 7, 0, 0, 0, 1102, 1103, 7, 12, 0, 0, 1103, 1104, 7, 5, 0, 0, 1104, 1105, 7, 12, 0, 0, 1105, 220, 1, 0, 0, 0, 1106, 1110, 8, 32, 0, 0, 1107, 1108, 5, 47, 0, 0, 1108, 1110, 8, 33, 0, 0, 1109, 1106, 1, 0, 0, 0, 1109, 1107, 1, 0, 0, 0, 1110, 222, 1, 0, 0, 0, 1111, 1113, 3, 221, 102, 0, 1112, 1111, 1, 0, 0, 0, 1113, 1114, 1, 0, 0, 0, 1114, 1112, 1, 0, 0, 0, 1114, 1115, 1, 0, 0, 0, 1115, 224, 1, 0, 0, 0, 1116, 1117, 3, 223, 103, 0, 1117, 1118, 1, 0, 0, 0, 1118, 1119, 6, 104, 22, 0, 1119, 226, 1, 0, 0, 0, 1120, 1121, 3, 95, 39, 0, 1121, 1122, 1, 0, 0, 0, 1122, 1123, 6, 105, 23, 0, 1123, 228, 1, 0, 0, 0, 1124, 1125, 3, 67, 25, 0, 1125, 1126, 1, 0, 0, 0, 1126, 1127, 6, 106, 12, 0, 1127, 230, 1, 0, 0, 0, 1128, 1129, 3, 69, 26, 0, 1129, 1130, 1, 0, 0, 0, 1130, 1131, 6, 107, 12, 0, 1131, 232, 1, 0, 0, 0, 1132, 1133, 3, 71, 27, 0, 1133, 1134, 1, 0, 0, 0, 1134, 1135, 6, 108, 12, 0, 1135, 234, 1, 0, 0, 0, 1136, 1137, 3, 73, 28, 0, 1137, 1138, 1, 0, 0, 0, 1138, 1139, 6, 109, 17, 0, 1139, 1140, 6, 109, 13, 0, 1140, 236, 1, 0, 0, 0, 1141, 1142, 3, 117, 50, 0, 1142, 1143, 1, 0, 0, 0, 1143, 1144, 6, 110, 24, 0, 1144, 238, 1, 0, 0, 0, 1145, 1146, 3, 113, 48, 0, 1146, 1147, 1, 0, 0, 0, 1147, 1148, 6, 111, 20, 0, 1148, 240, 1, 0, 0, 0, 1149, 1150, 3, 141, 62, 0, 1150, 1151, 1, 0, 0, 0, 1151, 1152, 6, 112, 25, 0, 1152, 242, 1, 0, 0, 0, 1153, 1154, 3, 179, 81, 0, 1154, 1155, 1, 0, 0, 0, 1155, 1156, 6, 113, 26, 0, 1156, 244, 1, 0, 0, 0, 1157, 1162, 3, 77, 30, 0, 1158, 1162, 3, 75, 29, 0, 1159, 1162, 3, 91, 37, 0, 1160, 1162, 3, 167, 75, 0, 1161, 1157, 1, 0, 0, 0, 1161, 1158, 1, 0, 0, 0, 1161, 1159, 1, 0, 0, 0, 1161, 1160, 1, 0, 0, 0, 1162, 246, 1, 0, 0, 0, 1163, 1166, 3, 77, 30, 0, 1164, 1166, 3, 167, 75, 0, 1165, 1163, 1, 0, 0, 0, 1165, 1164, 1, 0, 0, 0, 1166, 1170, 1, 0, 0, 0, 1167, 1169, 3, 245, 114, 0, 1168, 1167, 1, 0, 0, 0, 1169, 1172, 1, 0, 0, 0, 1170, 1168, 1, 0, 0, 0, 1170, 1171, 1, 0, 0, 0, 1171, 1183, 1, 0, 0, 0, 1172, 1170, 1, 0, 0, 0, 1173, 1176, 3, 91, 37, 0, 1174, 1176, 3, 85, 34, 0, 1175, 1173, 1, 0, 0, 0, 1175, 1174, 1, 0, 0, 0, 1176, 1178, 1, 0, 0, 0, 1177, 1179, 3, 245, 114, 0, 1178, 1177, 1, 0, 0, 0, 1179, 1180, 1, 0, 0, 0, 1180, 1178, 1, 0, 0, 0, 1180, 1181, 1, 0, 0, 0, 1181, 1183, 1, 0, 0, 0, 1182, 1165, 1, 0, 0, 0, 1182, 1175, 1, 0, 0, 0, 1183, 248, 1, 0, 0, 0, 1184, 1187, 3, 247, 115, 0, 1185, 1187, 3, 187, 85, 0, 1186, 1184, 1, 0, 0, 0, 1186, 1185, 1, 0, 0, 0, 1187, 1188, 1, 0, 0, 0, 1188, 1186, 1, 0, 0, 0, 1188, 1189, 1, 0, 0, 0, 1189, 250, 1, 0, 0, 0, 1190, 1191, 3, 67, 25, 0, 1191, 1192, 1, 0, 0, 0, 1192, 1193, 6, 117, 12, 0, 1193, 252, 1, 0, 0, 0, 1194, 1195, 3, 69, 26, 0, 1195, 1196, 1, 0, 0, 0, 1196, 1197, 6, 118, 12, 0, 1197, 254, 1, 0, 0, 0, 1198, 1199, 3, 71, 27, 0, 1199, 1200, 1, 0, 0, 0, 1200, 1201, 6, 119, 12, 0, 1201, 256, 1, 0, 0, 0, 1202, 1203, 3, 73, 28, 0, 1203, 1204, 1, 0, 0, 0, 1204, 1205, 6, 120, 17, 0, 1205, 1206, 6, 120, 13, 0, 1206, 258, 1, 0, 0, 0, 1207, 1208, 3, 107, 45, 0, 1208, 1209, 1, 0, 0, 0, 1209, 1210, 6, 121, 21, 0, 1210, 260, 1, 0, 0, 0, 1211, 1212, 3, 113, 48, 0, 1212, 1213, 1, 0, 0, 0, 1213, 1214, 6, 122, 20, 0, 1214, 262, 1, 0, 0, 0, 1215, 1216, 3, 117, 50, 0, 1216, 1217, 1, 0, 0, 0, 1217, 1218, 6, 123, 24, 0, 1218, 264, 1, 0, 0, 0, 1219, 1220, 3, 141, 62, 0, 1220, 1221, 1, 0, 0, 0, 1221, 1222, 6, 124, 25, 0, 1222, 266, 1, 0, 0, 0, 1223, 1224, 3, 179, 81, 0, 1224, 1225, 1, 0, 0, 0, 1225, 1226, 6, 125, 26, 0, 1226, 268, 1, 0, 0, 0, 1227, 1228, 7, 12, 0, 0, 1228, 1229, 7, 2, 0, 0, 1229, 270, 1, 0, 0, 0, 1230, 1231, 3, 249, 116, 0, 1231, 1232, 1, 0, 0, 0, 1232, 1233, 6, 127, 27, 0, 1233, 272, 1, 0, 0, 0, 1234, 1235, 3, 67, 25, 0, 1235, 1236, 1, 0, 0, 0, 1236, 1237, 6, 128, 12, 0, 1237, 274, 1, 0, 0, 0, 1238, 1239, 3, 69, 26, 0, 1239, 1240, 1, 0, 0, 0, 1240, 1241, 6, 129, 12, 0, 1241, 276, 1, 0, 0, 0, 1242, 1243, 3, 71, 27, 0, 1243, 1244, 1, 0, 0, 0, 1244, 1245, 6, 130, 12, 0, 1245, 278, 1, 0, 0, 0, 1246, 1247, 3, 73, 28, 0, 1247, 1248, 1, 0, 0, 0, 1248, 1249, 6, 131, 17, 0, 1249, 1250, 6, 131, 13, 0, 1250, 280, 1, 0, 0, 0, 1251, 1252, 3, 181, 82, 0, 1252, 1253, 1, 0, 0, 0, 1253, 1254, 6, 132, 15, 0, 1254, 1255, 6, 132, 28, 0, 1255, 282, 1, 0, 0, 0, 1256, 1257, 7, 7, 0, 0, 1257, 1258, 7, 9, 0, 0, 1258, 1259, 1, 0, 0, 0, 1259, 1260, 6, 133, 29, 0, 1260, 284, 1, 0, 0, 0, 1261, 1262, 7, 19, 0, 0, 1262, 1263, 7, 1, 0, 0, 1263, 1264, 7, 5, 0, 0, 1264, 1265, 7, 10, 0, 0, 1265, 1266, 1, 0, 0, 0, 1266, 1267, 6, 134, 29, 0, 1267, 286, 1, 0, 0, 0, 1268, 1269, 8, 34, 0, 0, 1269, 288, 1, 0, 0, 0, 1270, 1272, 3, 287, 135, 0, 1271, 1270, 1, 0, 0, 0, 1272, 1273, 1, 0, 0, 0, 1273, 1271, 1, 0, 0, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 1, 0, 0, 0, 1275, 1276, 3, 111, 47, 0, 1276, 1278, 1, 0, 0, 0, 1277, 1271, 1, 0, 0, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1280, 1, 0, 0, 0, 1279, 1281, 3, 287, 135, 0, 1280, 1279, 1, 0, 0, 0, 1281, 1282, 1, 0, 0, 0, 1282, 1280, 1, 0, 0, 0, 1282, 1283, 1, 0, 0, 0, 1283, 290, 1, 0, 0, 0, 1284, 1285, 3, 289, 136, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 137, 30, 0, 1287, 292, 1, 0, 0, 0, 1288, 1289, 3, 67, 25, 0, 1289, 1290, 1, 0, 0, 0, 1290, 1291, 6, 138, 12, 0, 1291, 294, 1, 0, 0, 0, 1292, 1293, 3, 69, 26, 0, 1293, 1294, 1, 0, 0, 0, 1294, 1295, 6, 139, 12, 0, 1295, 296, 1, 0, 0, 0, 1296, 1297, 3, 71, 27, 0, 1297, 1298, 1, 0, 0, 0, 1298, 1299, 6, 140, 12, 0, 1299, 298, 1, 0, 0, 0, 1300, 1301, 3, 73, 28, 0, 1301, 1302, 1, 0, 0, 0, 1302, 1303, 6, 141, 17, 0, 1303, 1304, 6, 141, 13, 0, 1304, 1305, 6, 141, 13, 0, 1305, 300, 1, 0, 0, 0, 1306, 1307, 3, 107, 45, 0, 1307, 1308, 1, 0, 0, 0, 1308, 1309, 6, 142, 21, 0, 1309, 302, 1, 0, 0, 0, 1310, 1311, 3, 113, 48, 0, 1311, 1312, 1, 0, 0, 0, 1312, 1313, 6, 143, 20, 0, 1313, 304, 1, 0, 0, 0, 1314, 1315, 3, 117, 50, 0, 1315, 1316, 1, 0, 0, 0, 1316, 1317, 6, 144, 24, 0, 1317, 306, 1, 0, 0, 0, 1318, 1319, 3, 285, 134, 0, 1319, 1320, 1, 0, 0, 0, 1320, 1321, 6, 145, 31, 0, 1321, 308, 1, 0, 0, 0, 1322, 1323, 3, 249, 116, 0, 1323, 1324, 1, 0, 0, 0, 1324, 1325, 6, 146, 27, 0, 1325, 310, 1, 0, 0, 0, 1326, 1327, 3, 189, 86, 0, 1327, 1328, 1, 0, 0, 0, 1328, 1329, 6, 147, 32, 0, 1329, 312, 1, 0, 0, 0, 1330, 1331, 3, 141, 62, 0, 1331, 1332, 1, 0, 0, 0, 1332, 1333, 6, 148, 25, 0, 1333, 314, 1, 0, 0, 0, 1334, 1335, 3, 179, 81, 0, 1335, 1336, 1, 0, 0, 0, 1336, 1337, 6, 149, 26, 0, 1337, 316, 1, 0, 0, 0, 1338, 1339, 3, 67, 25, 0, 1339, 1340, 1, 0, 0, 0, 1340, 1341, 6, 150, 12, 0, 1341, 318, 1, 0, 0, 0, 1342, 1343, 3, 69, 26, 0, 1343, 1344, 1, 0, 0, 0, 1344, 1345, 6, 151, 12, 0, 1345, 320, 1, 0, 0, 0, 1346, 1347, 3, 71, 27, 0, 1347, 1348, 1, 0, 0, 0, 1348, 1349, 6, 152, 12, 0, 1349, 322, 1, 0, 0, 0, 1350, 1351, 3, 73, 28, 0, 1351, 1352, 1, 0, 0, 0, 1352, 1353, 6, 153, 17, 0, 1353, 1354, 6, 153, 13, 0, 1354, 324, 1, 0, 0, 0, 1355, 1356, 3, 117, 50, 0, 1356, 1357, 1, 0, 0, 0, 1357, 1358, 6, 154, 24, 0, 1358, 326, 1, 0, 0, 0, 1359, 1360, 3, 141, 62, 0, 1360, 1361, 1, 0, 0, 0, 1361, 1362, 6, 155, 25, 0, 1362, 328, 1, 0, 0, 0, 1363, 1364, 3, 179, 81, 0, 1364, 1365, 1, 0, 0, 0, 1365, 1366, 6, 156, 26, 0, 1366, 330, 1, 0, 0, 0, 1367, 1368, 3, 189, 86, 0, 1368, 1369, 1, 0, 0, 0, 1369, 1370, 6, 157, 32, 0, 1370, 332, 1, 0, 0, 0, 1371, 1372, 3, 185, 84, 0, 1372, 1373, 1, 0, 0, 0, 1373, 1374, 6, 158, 33, 0, 1374, 334, 1, 0, 0, 0, 1375, 1376, 3, 67, 25, 0, 1376, 1377, 1, 0, 0, 0, 1377, 1378, 6, 159, 12, 0, 1378, 336, 1, 0, 0, 0, 1379, 1380, 3, 69, 26, 0, 1380, 1381, 1, 0, 0, 0, 1381, 1382, 6, 160, 12, 0, 1382, 338, 1, 0, 0, 0, 1383, 1384, 3, 71, 27, 0, 1384, 1385, 1, 0, 0, 0, 1385, 1386, 6, 161, 12, 0, 1386, 340, 1, 0, 0, 0, 1387, 1388, 3, 73, 28, 0, 1388, 1389, 1, 0, 0, 0, 1389, 1390, 6, 162, 17, 0, 1390, 1391, 6, 162, 13, 0, 1391, 342, 1, 0, 0, 0, 1392, 1393, 7, 1, 0, 0, 1393, 1394, 7, 9, 0, 0, 1394, 1395, 7, 15, 0, 0, 1395, 1396, 7, 7, 0, 0, 1396, 344, 1, 0, 0, 0, 1397, 1398, 3, 67, 25, 0, 1398, 1399, 1, 0, 0, 0, 1399, 1400, 6, 164, 12, 0, 1400, 346, 1, 0, 0, 0, 1401, 1402, 3, 69, 26, 0, 1402, 1403, 1, 0, 0, 0, 1403, 1404, 6, 165, 12, 0, 1404, 348, 1, 0, 0, 0, 1405, 1406, 3, 71, 27, 0, 1406, 1407, 1, 0, 0, 0, 1407, 1408, 6, 166, 12, 0, 1408, 350, 1, 0, 0, 0, 1409, 1410, 3, 183, 83, 0, 1410, 1411, 1, 0, 0, 0, 1411, 1412, 6, 167, 18, 0, 1412, 1413, 6, 167, 13, 0, 1413, 352, 1, 0, 0, 0, 1414, 1415, 3, 111, 47, 0, 1415, 1416, 1, 0, 0, 0, 1416, 1417, 6, 168, 19, 0, 1417, 354, 1, 0, 0, 0, 1418, 1424, 3, 85, 34, 0, 1419, 1424, 3, 75, 29, 0, 1420, 1424, 3, 117, 50, 0, 1421, 1424, 3, 77, 30, 0, 1422, 1424, 3, 91, 37, 0, 1423, 1418, 1, 0, 0, 0, 1423, 1419, 1, 0, 0, 0, 1423, 1420, 1, 0, 0, 0, 1423, 1421, 1, 0, 0, 0, 1423, 1422, 1, 0, 0, 0, 1424, 1425, 1, 0, 0, 0, 1425, 1423, 1, 0, 0, 0, 1425, 1426, 1, 0, 0, 0, 1426, 356, 1, 0, 0, 0, 1427, 1428, 3, 67, 25, 0, 1428, 1429, 1, 0, 0, 0, 1429, 1430, 6, 170, 12, 0, 1430, 358, 1, 0, 0, 0, 1431, 1432, 3, 69, 26, 0, 1432, 1433, 1, 0, 0, 0, 1433, 1434, 6, 171, 12, 0, 1434, 360, 1, 0, 0, 0, 1435, 1436, 3, 71, 27, 0, 1436, 1437, 1, 0, 0, 0, 1437, 1438, 6, 172, 12, 0, 1438, 362, 1, 0, 0, 0, 1439, 1440, 3, 73, 28, 0, 1440, 1441, 1, 0, 0, 0, 1441, 1442, 6, 173, 17, 0, 1442, 1443, 6, 173, 13, 0, 1443, 364, 1, 0, 0, 0, 1444, 1445, 3, 111, 47, 0, 1445, 1446, 1, 0, 0, 0, 1446, 1447, 6, 174, 19, 0, 1447, 366, 1, 0, 0, 0, 1448, 1449, 3, 113, 48, 0, 1449, 1450, 1, 0, 0, 0, 1450, 1451, 6, 175, 20, 0, 1451, 368, 1, 0, 0, 0, 1452, 1453, 3, 117, 50, 0, 1453, 1454, 1, 0, 0, 0, 1454, 1455, 6, 176, 24, 0, 1455, 370, 1, 0, 0, 0, 1456, 1457, 3, 283, 133, 0, 1457, 1458, 1, 0, 0, 0, 1458, 1459, 6, 177, 34, 0, 1459, 1460, 6, 177, 35, 0, 1460, 372, 1, 0, 0, 0, 1461, 1462, 3, 223, 103, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 178, 22, 0, 1464, 374, 1, 0, 0, 0, 1465, 1466, 3, 95, 39, 0, 1466, 1467, 1, 0, 0, 0, 1467, 1468, 6, 179, 23, 0, 1468, 376, 1, 0, 0, 0, 1469, 1470, 3, 67, 25, 0, 1470, 1471, 1, 0, 0, 0, 1471, 1472, 6, 180, 12, 0, 1472, 378, 1, 0, 0, 0, 1473, 1474, 3, 69, 26, 0, 1474, 1475, 1, 0, 0, 0, 1475, 1476, 6, 181, 12, 0, 1476, 380, 1, 0, 0, 0, 1477, 1478, 3, 71, 27, 0, 1478, 1479, 1, 0, 0, 0, 1479, 1480, 6, 182, 12, 0, 1480, 382, 1, 0, 0, 0, 1481, 1482, 3, 73, 28, 0, 1482, 1483, 1, 0, 0, 0, 1483, 1484, 6, 183, 17, 0, 1484, 1485, 6, 183, 13, 0, 1485, 1486, 6, 183, 13, 0, 1486, 384, 1, 0, 0, 0, 1487, 1488, 3, 113, 48, 0, 1488, 1489, 1, 0, 0, 0, 1489, 1490, 6, 184, 20, 0, 1490, 386, 1, 0, 0, 0, 1491, 1492, 3, 117, 50, 0, 1492, 1493, 1, 0, 0, 0, 1493, 1494, 6, 185, 24, 0, 1494, 388, 1, 0, 0, 0, 1495, 1496, 3, 249, 116, 0, 1496, 1497, 1, 0, 0, 0, 1497, 1498, 6, 186, 27, 0, 1498, 390, 1, 0, 0, 0, 1499, 1500, 3, 67, 25, 0, 1500, 1501, 1, 0, 0, 0, 1501, 1502, 6, 187, 12, 0, 1502, 392, 1, 0, 0, 0, 1503, 1504, 3, 69, 26, 0, 1504, 1505, 1, 0, 0, 0, 1505, 1506, 6, 188, 12, 0, 1506, 394, 1, 0, 0, 0, 1507, 1508, 3, 71, 27, 0, 1508, 1509, 1, 0, 0, 0, 1509, 1510, 6, 189, 12, 0, 1510, 396, 1, 0, 0, 0, 1511, 1512, 3, 73, 28, 0, 1512, 1513, 1, 0, 0, 0, 1513, 1514, 6, 190, 17, 0, 1514, 1515, 6, 190, 13, 0, 1515, 398, 1, 0, 0, 0, 1516, 1517, 7, 35, 0, 0, 1517, 1518, 7, 7, 0, 0, 1518, 1519, 7, 1, 0, 0, 1519, 1520, 7, 9, 0, 0, 1520, 400, 1, 0, 0, 0, 1521, 1522, 3, 269, 126, 0, 1522, 1523, 1, 0, 0, 0, 1523, 1524, 6, 192, 36, 0, 1524, 402, 1, 0, 0, 0, 1525, 1526, 3, 283, 133, 0, 1526, 1527, 1, 0, 0, 0, 1527, 1528, 6, 193, 34, 0, 1528, 1529, 6, 193, 13, 0, 1529, 1530, 6, 193, 0, 0, 1530, 404, 1, 0, 0, 0, 1531, 1532, 7, 20, 0, 0, 1532, 1533, 7, 2, 0, 0, 1533, 1534, 7, 1, 0, 0, 1534, 1535, 7, 9, 0, 0, 1535, 1536, 7, 17, 0, 0, 1536, 1537, 1, 0, 0, 0, 1537, 1538, 6, 194, 13, 0, 1538, 1539, 6, 194, 0, 0, 1539, 406, 1, 0, 0, 0, 1540, 1541, 3, 223, 103, 0, 1541, 1542, 1, 0, 0, 0, 1542, 1543, 6, 195, 22, 0, 1543, 408, 1, 0, 0, 0, 1544, 1545, 3, 95, 39, 0, 1545, 1546, 1, 0, 0, 0, 1546, 1547, 6, 196, 23, 0, 1547, 410, 1, 0, 0, 0, 1548, 1549, 3, 111, 47, 0, 1549, 1550, 1, 0, 0, 0, 1550, 1551, 6, 197, 19, 0, 1551, 412, 1, 0, 0, 0, 1552, 1553, 3, 185, 84, 0, 1553, 1554, 1, 0, 0, 0, 1554, 1555, 6, 198, 33, 0, 1555, 414, 1, 0, 0, 0, 1556, 1557, 3, 189, 86, 0, 1557, 1558, 1, 0, 0, 0, 1558, 1559, 6, 199, 32, 0, 1559, 416, 1, 0, 0, 0, 1560, 1561, 3, 67, 25, 0, 1561, 1562, 1, 0, 0, 0, 1562, 1563, 6, 200, 12, 0, 1563, 418, 1, 0, 0, 0, 1564, 1565, 3, 69, 26, 0, 1565, 1566, 1, 0, 0, 0, 1566, 1567, 6, 201, 12, 0, 1567, 420, 1, 0, 0, 0, 1568, 1569, 3, 71, 27, 0, 1569, 1570, 1, 0, 0, 0, 1570, 1571, 6, 202, 12, 0, 1571, 422, 1, 0, 0, 0, 1572, 1573, 3, 73, 28, 0, 1573, 1574, 1, 0, 0, 0, 1574, 1575, 6, 203, 17, 0, 1575, 1576, 6, 203, 13, 0, 1576, 424, 1, 0, 0, 0, 1577, 1578, 3, 223, 103, 0, 1578, 1579, 1, 0, 0, 0, 1579, 1580, 6, 204, 22, 0, 1580, 1581, 6, 204, 13, 0, 1581, 1582, 6, 204, 37, 0, 1582, 426, 1, 0, 0, 0, 1583, 1584, 3, 95, 39, 0, 1584, 1585, 1, 0, 0, 0, 1585, 1586, 6, 205, 23, 0, 1586, 1587, 6, 205, 13, 0, 1587, 1588, 6, 205, 37, 0, 1588, 428, 1, 0, 0, 0, 1589, 1590, 3, 67, 25, 0, 1590, 1591, 1, 0, 0, 0, 1591, 1592, 6, 206, 12, 0, 1592, 430, 1, 0, 0, 0, 1593, 1594, 3, 69, 26, 0, 1594, 1595, 1, 0, 0, 0, 1595, 1596, 6, 207, 12, 0, 1596, 432, 1, 0, 0, 0, 1597, 1598, 3, 71, 27, 0, 1598, 1599, 1, 0, 0, 0, 1599, 1600, 6, 208, 12, 0, 1600, 434, 1, 0, 0, 0, 1601, 1602, 3, 111, 47, 0, 1602, 1603, 1, 0, 0, 0, 1603, 1604, 6, 209, 19, 0, 1604, 1605, 6, 209, 13, 0, 1605, 1606, 6, 209, 11, 0, 1606, 436, 1, 0, 0, 0, 1607, 1608, 3, 113, 48, 0, 1608, 1609, 1, 0, 0, 0, 1609, 1610, 6, 210, 20, 0, 1610, 1611, 6, 210, 13, 0, 1611, 1612, 6, 210, 11, 0, 1612, 438, 1, 0, 0, 0, 1613, 1614, 3, 67, 25, 0, 1614, 1615, 1, 0, 0, 0, 1615, 1616, 6, 211, 12, 0, 1616, 440, 1, 0, 0, 0, 1617, 1618, 3, 69, 26, 0, 1618, 1619, 1, 0, 0, 0, 1619, 1620, 6, 212, 12, 0, 1620, 442, 1, 0, 0, 0, 1621, 1622, 3, 71, 27, 0, 1622, 1623, 1, 0, 0, 0, 1623, 1624, 6, 213, 12, 0, 1624, 444, 1, 0, 0, 0, 1625, 1626, 3, 189, 86, 0, 1626, 1627, 1, 0, 0, 0, 1627, 1628, 6, 214, 13, 0, 1628, 1629, 6, 214, 0, 0, 1629, 1630, 6, 214, 32, 0, 1630, 446, 1, 0, 0, 0, 1631, 1632, 3, 185, 84, 0, 1632, 1633, 1, 0, 0, 0, 1633, 1634, 6, 215, 13, 0, 1634, 1635, 6, 215, 0, 0, 1635, 1636, 6, 215, 33, 0, 1636, 448, 1, 0, 0, 0, 1637, 1638, 3, 101, 42, 0, 1638, 1639, 1, 0, 0, 0, 1639, 1640, 6, 216, 13, 0, 1640, 1641, 6, 216, 0, 0, 1641, 1642, 6, 216, 38, 0, 1642, 450, 1, 0, 0, 0, 1643, 1644, 3, 73, 28, 0, 1644, 1645, 1, 0, 0, 0, 1645, 1646, 6, 217, 17, 0, 1646, 1647, 6, 217, 13, 0, 1647, 452, 1, 0, 0, 0, 1648, 1649, 3, 73, 28, 0, 1649, 1650, 1, 0, 0, 0, 1650, 1651, 6, 218, 17, 0, 1651, 1652, 6, 218, 13, 0, 1652, 454, 1, 0, 0, 0, 1653, 1654, 3, 283, 133, 0, 1654, 1655, 1, 0, 0, 0, 1655, 1656, 6, 219, 34, 0, 1656, 456, 1, 0, 0, 0, 1657, 1658, 3, 269, 126, 0, 1658, 1659, 1, 0, 0, 0, 1659, 1660, 6, 220, 36, 0, 1660, 458, 1, 0, 0, 0, 1661, 1662, 3, 117, 50, 0, 1662, 1663, 1, 0, 0, 0, 1663, 1664, 6, 221, 24, 0, 1664, 460, 1, 0, 0, 0, 1665, 1666, 3, 113, 48, 0, 1666, 1667, 1, 0, 0, 0, 1667, 1668, 6, 222, 20, 0, 1668, 462, 1, 0, 0, 0, 1669, 1670, 3, 189, 86, 0, 1670, 1671, 1, 0, 0, 0, 1671, 1672, 6, 223, 32, 0, 1672, 464, 1, 0, 0, 0, 1673, 1674, 3, 185, 84, 0, 1674, 1675, 1, 0, 0, 0, 1675, 1676, 6, 224, 33, 0, 1676, 466, 1, 0, 0, 0, 1677, 1678, 3, 67, 25, 0, 1678, 1679, 1, 0, 0, 0, 1679, 1680, 6, 225, 12, 0, 1680, 468, 1, 0, 0, 0, 1681, 1682, 3, 69, 26, 0, 1682, 1683, 1, 0, 0, 0, 1683, 1684, 6, 226, 12, 0, 1684, 470, 1, 0, 0, 0, 1685, 1686, 3, 71, 27, 0, 1686, 1687, 1, 0, 0, 0, 1687, 1688, 6, 227, 12, 0, 1688, 472, 1, 0, 0, 0, 67, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 693, 703, 707, 710, 719, 721, 732, 751, 756, 765, 772, 777, 779, 790, 798, 801, 803, 808, 813, 819, 826, 831, 837, 840, 848, 852, 984, 989, 996, 998, 1014, 1019, 1024, 1026, 1032, 1109, 1114, 1161, 1165, 1170, 1175, 1180, 1182, 1186, 1188, 1273, 1277, 1282, 1423, 1425, 39, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 13, 0, 5, 16, 0, 5, 11, 0, 5, 14, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 72, 0, 5, 0, 0, 7, 29, 0, 7, 73, 0, 7, 38, 0, 7, 39, 0, 7, 36, 0, 7, 83, 0, 7, 30, 0, 7, 41, 0, 7, 53, 0, 7, 71, 0, 7, 87, 0, 5, 10, 0, 5, 7, 0, 7, 97, 0, 7, 96, 0, 7, 75, 0, 7, 74, 0, 7, 95, 0, 5, 12, 0, 7, 91, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 28358a0f614e6..e4f8699993da6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -27,8 +27,8 @@ public class EsqlBaseLexer extends LexerConfig { public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, DEV_JOIN=20, - DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, DEV_JOIN_LOOKUP=24, + WHERE=16, JOIN_LOOKUP=17, DEV_CHANGE_POINT=18, DEV_INLINESTATS=19, DEV_LOOKUP=20, + DEV_METRICS=21, DEV_JOIN_FULL=22, DEV_JOIN_LEFT=23, DEV_JOIN_RIGHT=24, UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, PIPE=29, QUOTED_STRING=30, INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, ASC=35, ASSIGN=36, CAST_OP=37, COLON=38, COMMA=39, DESC=40, DOT=41, FALSE=42, @@ -48,14 +48,16 @@ public class EsqlBaseLexer extends LexerConfig { SHOW_MULTILINE_COMMENT=109, SHOW_WS=110, SETTING=111, SETTING_LINE_COMMENT=112, SETTTING_MULTILINE_COMMENT=113, SETTING_WS=114, LOOKUP_LINE_COMMENT=115, LOOKUP_MULTILINE_COMMENT=116, LOOKUP_WS=117, LOOKUP_FIELD_LINE_COMMENT=118, - LOOKUP_FIELD_MULTILINE_COMMENT=119, LOOKUP_FIELD_WS=120, USING=121, JOIN_LINE_COMMENT=122, - JOIN_MULTILINE_COMMENT=123, JOIN_WS=124, METRICS_LINE_COMMENT=125, METRICS_MULTILINE_COMMENT=126, - METRICS_WS=127, CLOSING_METRICS_LINE_COMMENT=128, CLOSING_METRICS_MULTILINE_COMMENT=129, - CLOSING_METRICS_WS=130; + LOOKUP_FIELD_MULTILINE_COMMENT=119, LOOKUP_FIELD_WS=120, JOIN=121, USING=122, + JOIN_LINE_COMMENT=123, JOIN_MULTILINE_COMMENT=124, JOIN_WS=125, METRICS_LINE_COMMENT=126, + METRICS_MULTILINE_COMMENT=127, METRICS_WS=128, CLOSING_METRICS_LINE_COMMENT=129, + CLOSING_METRICS_MULTILINE_COMMENT=130, CLOSING_METRICS_WS=131, CHANGE_POINT_LINE_COMMENT=132, + CHANGE_POINT_MULTILINE_COMMENT=133, CHANGE_POINT_WS=134; public static final int EXPRESSION_MODE=1, EXPLAIN_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10, - LOOKUP_MODE=11, LOOKUP_FIELD_MODE=12, JOIN_MODE=13, METRICS_MODE=14, CLOSING_METRICS_MODE=15; + LOOKUP_MODE=11, LOOKUP_FIELD_MODE=12, JOIN_MODE=13, METRICS_MODE=14, CLOSING_METRICS_MODE=15, + CHANGE_POINT_MODE=16; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; @@ -64,22 +66,22 @@ public class EsqlBaseLexer extends LexerConfig { "DEFAULT_MODE", "EXPRESSION_MODE", "EXPLAIN_MODE", "FROM_MODE", "PROJECT_MODE", "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", "SETTING_MODE", "LOOKUP_MODE", "LOOKUP_FIELD_MODE", "JOIN_MODE", "METRICS_MODE", - "CLOSING_METRICS_MODE" + "CLOSING_METRICS_MODE", "CHANGE_POINT_MODE" }; private static String[] makeRuleNames() { return new String[] { "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", - "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", "DEV_JOIN_FULL", - "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", "UNKNOWN_CMD", - "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "DIGIT", "LETTER", - "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", - "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", - "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", - "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "JOIN_LOOKUP", "DEV_CHANGE_POINT", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", + "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "UNKNOWN_CMD", "LINE_COMMENT", + "MULTILINE_COMMENT", "WS", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", + "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", + "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COLON", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", + "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", + "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "LEFT_BRACES", "RIGHT_BRACES", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", @@ -108,14 +110,17 @@ private static String[] makeRuleNames() { "LOOKUP_UNQUOTED_SOURCE", "LOOKUP_QUOTED_SOURCE", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_PIPE", "LOOKUP_FIELD_COMMA", "LOOKUP_FIELD_DOT", "LOOKUP_FIELD_ID_PATTERN", "LOOKUP_FIELD_LINE_COMMENT", - "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "JOIN_PIPE", "JOIN_JOIN", + "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "JOIN_PIPE", "JOIN", "JOIN_AS", "JOIN_ON", "USING", "JOIN_UNQUOTED_SOURCE", "JOIN_QUOTED_SOURCE", "JOIN_COLON", "JOIN_UNQUOTED_IDENTIFER", "JOIN_QUOTED_IDENTIFIER", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_PIPE", "METRICS_UNQUOTED_SOURCE", "METRICS_QUOTED_SOURCE", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COLON", "CLOSING_METRICS_COMMA", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", - "CLOSING_METRICS_UNQUOTED_IDENTIFIER", "CLOSING_METRICS_BY", "CLOSING_METRICS_PIPE" + "CLOSING_METRICS_UNQUOTED_IDENTIFIER", "CLOSING_METRICS_BY", "CLOSING_METRICS_PIPE", + "CHANGE_POINT_PIPE", "CHANGE_POINT_ON", "CHANGE_POINT_AS", "CHANGE_POINT_DOT", + "CHANGE_POINT_COMMA", "CHANGE_POINT_QUOTED_IDENTIFIER", "CHANGE_POINT_UNQUOTED_IDENTIFIER", + "CHANGE_POINT_LINE_COMMENT", "CHANGE_POINT_MULTILINE_COMMENT", "CHANGE_POINT_WS" }; } public static final String[] ruleNames = makeRuleNames(); @@ -124,17 +129,17 @@ private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - null, null, null, null, null, "'|'", null, null, null, "'by'", "'and'", - "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", "'first'", - "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", - "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", - "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'{'", "'}'", - null, null, "']'", null, null, null, null, null, null, null, null, "'metadata'", - null, null, null, null, null, null, null, null, "'as'", null, null, null, - "'on'", "'with'", null, null, null, null, null, null, null, null, null, - null, "'info'", null, null, null, null, null, null, null, null, null, - null, null, null, null, "'USING'" + "'sort'", "'stats'", "'where'", "'lookup'", null, null, null, null, null, + null, null, null, null, null, null, "'|'", null, null, null, "'by'", + "'and'", "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", + "'first'", "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", + "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", + "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", + "'{'", "'}'", null, null, "']'", null, null, null, null, null, null, + null, null, "'metadata'", null, null, null, null, null, null, null, null, + "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, + null, null, null, null, null, "'info'", null, null, null, null, null, + null, null, null, null, null, null, null, null, "'join'", "'USING'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); @@ -142,13 +147,13 @@ private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", - "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", - "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", - "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", - "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "WHERE", "JOIN_LOOKUP", "DEV_CHANGE_POINT", "DEV_INLINESTATS", "DEV_LOOKUP", + "DEV_METRICS", "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "UNKNOWN_CMD", + "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COLON", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", + "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", + "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "LEFT_BRACES", "RIGHT_BRACES", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", @@ -163,9 +168,10 @@ private static String[] makeSymbolicNames() { "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", - "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_LINE_COMMENT", - "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", - "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" + "JOIN", "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", + "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", + "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS", "CHANGE_POINT_LINE_COMMENT", + "CHANGE_POINT_MULTILINE_COMMENT", "CHANGE_POINT_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -230,63 +236,45 @@ public EsqlBaseLexer(CharStream input) { @Override public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { - case 16: - return DEV_INLINESTATS_sempred((RuleContext)_localctx, predIndex); case 17: - return DEV_LOOKUP_sempred((RuleContext)_localctx, predIndex); + return DEV_CHANGE_POINT_sempred((RuleContext)_localctx, predIndex); case 18: - return DEV_METRICS_sempred((RuleContext)_localctx, predIndex); + return DEV_INLINESTATS_sempred((RuleContext)_localctx, predIndex); case 19: - return DEV_JOIN_sempred((RuleContext)_localctx, predIndex); + return DEV_LOOKUP_sempred((RuleContext)_localctx, predIndex); case 20: - return DEV_JOIN_FULL_sempred((RuleContext)_localctx, predIndex); + return DEV_METRICS_sempred((RuleContext)_localctx, predIndex); case 21: - return DEV_JOIN_LEFT_sempred((RuleContext)_localctx, predIndex); + return DEV_JOIN_FULL_sempred((RuleContext)_localctx, predIndex); case 22: - return DEV_JOIN_RIGHT_sempred((RuleContext)_localctx, predIndex); + return DEV_JOIN_LEFT_sempred((RuleContext)_localctx, predIndex); case 23: - return DEV_JOIN_LOOKUP_sempred((RuleContext)_localctx, predIndex); - case 112: - return PROJECT_PARAM_sempred((RuleContext)_localctx, predIndex); - case 113: - return PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 124: - return RENAME_PARAM_sempred((RuleContext)_localctx, predIndex); - case 125: - return RENAME_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 148: - return ENRICH_FIELD_PARAM_sempred((RuleContext)_localctx, predIndex); - case 149: - return ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 155: - return MVEXPAND_PARAM_sempred((RuleContext)_localctx, predIndex); - case 156: - return MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); + return DEV_JOIN_RIGHT_sempred((RuleContext)_localctx, predIndex); } return true; } - private boolean DEV_INLINESTATS_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_CHANGE_POINT_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 0: return this.isDevVersion(); } return true; } - private boolean DEV_LOOKUP_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_INLINESTATS_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 1: return this.isDevVersion(); } return true; } - private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_LOOKUP_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 2: return this.isDevVersion(); } return true; } - private boolean DEV_JOIN_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 3: return this.isDevVersion(); @@ -314,276 +302,217 @@ private boolean DEV_JOIN_RIGHT_sempred(RuleContext _localctx, int predIndex) { } return true; } - private boolean DEV_JOIN_LOOKUP_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 7: - return this.isDevVersion(); - } - return true; - } - private boolean PROJECT_PARAM_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 8: - return this.isDevVersion(); - } - return true; - } - private boolean PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 9: - return this.isDevVersion(); - } - return true; - } - private boolean RENAME_PARAM_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 10: - return this.isDevVersion(); - } - return true; - } - private boolean RENAME_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 11: - return this.isDevVersion(); - } - return true; - } - private boolean ENRICH_FIELD_PARAM_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 12: - return this.isDevVersion(); - } - return true; - } - private boolean ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 13: - return this.isDevVersion(); - } - return true; - } - private boolean MVEXPAND_PARAM_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 14: - return this.isDevVersion(); - } - return true; - } - private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 15: - return this.isDevVersion(); - } - return true; - } public static final String _serializedATN = - "\u0004\u0000\u0082\u065b\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\u0004\u0000\u0086\u0699\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ - "\uffff\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ - "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ - "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ - "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ - "\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007"+ - "\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007"+ - "\u0012\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007"+ - "\u0015\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007"+ - "\u0018\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007"+ - "\u001b\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007"+ - "\u001e\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007"+ - "\"\u0002#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007"+ - "\'\u0002(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007"+ - ",\u0002-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u0007"+ - "1\u00022\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u0007"+ - "6\u00027\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007"+ - ";\u0002<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007"+ - "@\u0002A\u0007A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007"+ - "E\u0002F\u0007F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007"+ - "J\u0002K\u0007K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007"+ - "O\u0002P\u0007P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007"+ - "T\u0002U\u0007U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007"+ - "Y\u0002Z\u0007Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007"+ - "^\u0002_\u0007_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007"+ - "c\u0002d\u0007d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007"+ - "h\u0002i\u0007i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007"+ - "m\u0002n\u0007n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007"+ - "r\u0002s\u0007s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007"+ - "w\u0002x\u0007x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007"+ - "|\u0002}\u0007}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007"+ - "\u0080\u0002\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007"+ - "\u0083\u0002\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007"+ - "\u0086\u0002\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007"+ - "\u0089\u0002\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007"+ - "\u008c\u0002\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007"+ - "\u008f\u0002\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007"+ - "\u0092\u0002\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007"+ - "\u0095\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007"+ - "\u0098\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007"+ - "\u009b\u0002\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007"+ - "\u009e\u0002\u009f\u0007\u009f\u0002\u00a0\u0007\u00a0\u0002\u00a1\u0007"+ - "\u00a1\u0002\u00a2\u0007\u00a2\u0002\u00a3\u0007\u00a3\u0002\u00a4\u0007"+ - "\u00a4\u0002\u00a5\u0007\u00a5\u0002\u00a6\u0007\u00a6\u0002\u00a7\u0007"+ - "\u00a7\u0002\u00a8\u0007\u00a8\u0002\u00a9\u0007\u00a9\u0002\u00aa\u0007"+ - "\u00aa\u0002\u00ab\u0007\u00ab\u0002\u00ac\u0007\u00ac\u0002\u00ad\u0007"+ - "\u00ad\u0002\u00ae\u0007\u00ae\u0002\u00af\u0007\u00af\u0002\u00b0\u0007"+ - "\u00b0\u0002\u00b1\u0007\u00b1\u0002\u00b2\u0007\u00b2\u0002\u00b3\u0007"+ - "\u00b3\u0002\u00b4\u0007\u00b4\u0002\u00b5\u0007\u00b5\u0002\u00b6\u0007"+ - "\u00b6\u0002\u00b7\u0007\u00b7\u0002\u00b8\u0007\u00b8\u0002\u00b9\u0007"+ - "\u00b9\u0002\u00ba\u0007\u00ba\u0002\u00bb\u0007\u00bb\u0002\u00bc\u0007"+ - "\u00bc\u0002\u00bd\u0007\u00bd\u0002\u00be\u0007\u00be\u0002\u00bf\u0007"+ - "\u00bf\u0002\u00c0\u0007\u00c0\u0002\u00c1\u0007\u00c1\u0002\u00c2\u0007"+ - "\u00c2\u0002\u00c3\u0007\u00c3\u0002\u00c4\u0007\u00c4\u0002\u00c5\u0007"+ - "\u00c5\u0002\u00c6\u0007\u00c6\u0002\u00c7\u0007\u00c7\u0002\u00c8\u0007"+ - "\u00c8\u0002\u00c9\u0007\u00c9\u0002\u00ca\u0007\u00ca\u0002\u00cb\u0007"+ - "\u00cb\u0002\u00cc\u0007\u00cc\u0002\u00cd\u0007\u00cd\u0002\u00ce\u0007"+ - "\u00ce\u0002\u00cf\u0007\u00cf\u0002\u00d0\u0007\u00d0\u0002\u00d1\u0007"+ - "\u00d1\u0002\u00d2\u0007\u00d2\u0002\u00d3\u0007\u00d3\u0002\u00d4\u0007"+ - "\u00d4\u0002\u00d5\u0007\u00d5\u0002\u00d6\u0007\u00d6\u0002\u00d7\u0007"+ - "\u00d7\u0002\u00d8\u0007\u00d8\u0002\u00d9\u0007\u00d9\u0001\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b"+ - "\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001"+ - "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ - "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ - "\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001"+ - "\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001"+ - "\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001"+ - "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0004\u0018\u0298\b\u0018\u000b"+ - "\u0018\f\u0018\u0299\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001"+ - "\u0019\u0001\u0019\u0005\u0019\u02a2\b\u0019\n\u0019\f\u0019\u02a5\t\u0019"+ - "\u0001\u0019\u0003\u0019\u02a8\b\u0019\u0001\u0019\u0003\u0019\u02ab\b"+ - "\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+ - "\u001a\u0001\u001a\u0005\u001a\u02b4\b\u001a\n\u001a\f\u001a\u02b7\t\u001a"+ - "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b"+ - "\u0004\u001b\u02bf\b\u001b\u000b\u001b\f\u001b\u02c0\u0001\u001b\u0001"+ - "\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001d\u0001"+ - "\u001d\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001"+ - " \u0001 \u0001!\u0001!\u0003!\u02d4\b!\u0001!\u0004!\u02d7\b!\u000b!\f"+ - "!\u02d8\u0001\"\u0001\"\u0001#\u0001#\u0001$\u0001$\u0001$\u0003$\u02e2"+ - "\b$\u0001%\u0001%\u0001&\u0001&\u0001&\u0003&\u02e9\b&\u0001\'\u0001\'"+ - "\u0001\'\u0005\'\u02ee\b\'\n\'\f\'\u02f1\t\'\u0001\'\u0001\'\u0001\'\u0001"+ - "\'\u0001\'\u0001\'\u0005\'\u02f9\b\'\n\'\f\'\u02fc\t\'\u0001\'\u0001\'"+ - "\u0001\'\u0001\'\u0001\'\u0003\'\u0303\b\'\u0001\'\u0003\'\u0306\b\'\u0003"+ - "\'\u0308\b\'\u0001(\u0004(\u030b\b(\u000b(\f(\u030c\u0001)\u0004)\u0310"+ - "\b)\u000b)\f)\u0311\u0001)\u0001)\u0005)\u0316\b)\n)\f)\u0319\t)\u0001"+ - ")\u0001)\u0004)\u031d\b)\u000b)\f)\u031e\u0001)\u0004)\u0322\b)\u000b"+ - ")\f)\u0323\u0001)\u0001)\u0005)\u0328\b)\n)\f)\u032b\t)\u0003)\u032d\b"+ - ")\u0001)\u0001)\u0001)\u0001)\u0004)\u0333\b)\u000b)\f)\u0334\u0001)\u0001"+ - ")\u0003)\u0339\b)\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001"+ - ",\u0001,\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001"+ - "/\u00010\u00010\u00011\u00011\u00011\u00011\u00011\u00012\u00012\u0001"+ - "3\u00013\u00013\u00013\u00013\u00013\u00014\u00014\u00014\u00014\u0001"+ - "4\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u0001"+ - "7\u00017\u00017\u00018\u00018\u00018\u00018\u00018\u00019\u00019\u0001"+ - ":\u0001:\u0001:\u0001:\u0001;\u0001;\u0001;\u0001;\u0001;\u0001<\u0001"+ - "<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001"+ - "?\u0001?\u0001?\u0001?\u0001?\u0001?\u0001@\u0001@\u0001A\u0001A\u0001"+ - "A\u0001A\u0001A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001"+ - "D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001"+ - "H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001"+ - "M\u0001M\u0001N\u0001N\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001"+ - "Q\u0001Q\u0001Q\u0003Q\u03bd\bQ\u0001Q\u0005Q\u03c0\bQ\nQ\fQ\u03c3\tQ"+ - "\u0001Q\u0001Q\u0004Q\u03c7\bQ\u000bQ\fQ\u03c8\u0003Q\u03cb\bQ\u0001R"+ - "\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001S\u0001"+ - "T\u0001T\u0005T\u03d9\bT\nT\fT\u03dc\tT\u0001T\u0001T\u0003T\u03e0\bT"+ - "\u0001T\u0004T\u03e3\bT\u000bT\fT\u03e4\u0003T\u03e7\bT\u0001U\u0001U"+ - "\u0004U\u03eb\bU\u000bU\fU\u03ec\u0001U\u0001U\u0001V\u0001V\u0001W\u0001"+ - "W\u0001W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001"+ - "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001"+ - "[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001"+ - "^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001"+ - "`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001"+ - "c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001"+ - "e\u0001e\u0001e\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0003"+ - "f\u043a\bf\u0001g\u0004g\u043d\bg\u000bg\fg\u043e\u0001h\u0001h\u0001"+ - "h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001"+ - "k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001"+ - "m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001"+ - "o\u0001p\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001"+ - "q\u0001r\u0001r\u0001r\u0001r\u0003r\u0470\br\u0001s\u0001s\u0003s\u0474"+ - "\bs\u0001s\u0005s\u0477\bs\ns\fs\u047a\ts\u0001s\u0001s\u0003s\u047e\b"+ - "s\u0001s\u0004s\u0481\bs\u000bs\fs\u0482\u0003s\u0485\bs\u0001t\u0001"+ - "t\u0004t\u0489\bt\u000bt\ft\u048a\u0001u\u0001u\u0001u\u0001u\u0001v\u0001"+ - "v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001"+ - "x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001"+ - "{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001"+ - "}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001"+ - "\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001"+ - "\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001"+ - "\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001"+ - "\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ - "\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001"+ - "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001"+ - "\u0087\u0001\u0087\u0001\u0088\u0004\u0088\u04e0\b\u0088\u000b\u0088\f"+ - "\u0088\u04e1\u0001\u0088\u0001\u0088\u0003\u0088\u04e6\b\u0088\u0001\u0088"+ - "\u0004\u0088\u04e9\b\u0088\u000b\u0088\f\u0088\u04ea\u0001\u0089\u0001"+ - "\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001"+ - "\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c\u0001"+ - "\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001\u008d\u0001"+ - "\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ - "\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001"+ - "\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001"+ - "\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001"+ - "\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001"+ - "\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ - "\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001"+ - "\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001"+ - "\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001"+ - "\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001"+ - "\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001"+ - "\u009c\u0001\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001"+ - "\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001"+ - "\u009f\u0001\u009f\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001"+ - "\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a2\u0001\u00a2\u0001"+ - "\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001"+ - "\u00a3\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001"+ - "\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001"+ - "\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001"+ - "\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001"+ - "\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0004\u00a9\u057c\b\u00a9\u000b"+ - "\u00a9\f\u00a9\u057d\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ - "\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001"+ - "\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001"+ - "\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001"+ - "\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001"+ - "\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001"+ - "\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001"+ - "\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001"+ - "\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001"+ - "\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001"+ - "\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001"+ - "\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001"+ - "\u00ba\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001"+ - "\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001"+ - "\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001"+ - "\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00c0\u0001"+ + "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002"+ + "\u0001\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002"+ + "\u0004\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002"+ + "\u0007\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002"+ + "\u000b\u0007\u000b\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e"+ + "\u0002\u000f\u0007\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011"+ + "\u0002\u0012\u0007\u0012\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014"+ + "\u0002\u0015\u0007\u0015\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017"+ + "\u0002\u0018\u0007\u0018\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a"+ + "\u0002\u001b\u0007\u001b\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d"+ + "\u0002\u001e\u0007\u001e\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!"+ + "\u0007!\u0002\"\u0007\"\u0002#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002"+ + "&\u0007&\u0002\'\u0007\'\u0002(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002"+ + "+\u0007+\u0002,\u0007,\u0002-\u0007-\u0002.\u0007.\u0002/\u0007/\u0002"+ + "0\u00070\u00021\u00071\u00022\u00072\u00023\u00073\u00024\u00074\u0002"+ + "5\u00075\u00026\u00076\u00027\u00077\u00028\u00078\u00029\u00079\u0002"+ + ":\u0007:\u0002;\u0007;\u0002<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002"+ + "?\u0007?\u0002@\u0007@\u0002A\u0007A\u0002B\u0007B\u0002C\u0007C\u0002"+ + "D\u0007D\u0002E\u0007E\u0002F\u0007F\u0002G\u0007G\u0002H\u0007H\u0002"+ + "I\u0007I\u0002J\u0007J\u0002K\u0007K\u0002L\u0007L\u0002M\u0007M\u0002"+ + "N\u0007N\u0002O\u0007O\u0002P\u0007P\u0002Q\u0007Q\u0002R\u0007R\u0002"+ + "S\u0007S\u0002T\u0007T\u0002U\u0007U\u0002V\u0007V\u0002W\u0007W\u0002"+ + "X\u0007X\u0002Y\u0007Y\u0002Z\u0007Z\u0002[\u0007[\u0002\\\u0007\\\u0002"+ + "]\u0007]\u0002^\u0007^\u0002_\u0007_\u0002`\u0007`\u0002a\u0007a\u0002"+ + "b\u0007b\u0002c\u0007c\u0002d\u0007d\u0002e\u0007e\u0002f\u0007f\u0002"+ + "g\u0007g\u0002h\u0007h\u0002i\u0007i\u0002j\u0007j\u0002k\u0007k\u0002"+ + "l\u0007l\u0002m\u0007m\u0002n\u0007n\u0002o\u0007o\u0002p\u0007p\u0002"+ + "q\u0007q\u0002r\u0007r\u0002s\u0007s\u0002t\u0007t\u0002u\u0007u\u0002"+ + "v\u0007v\u0002w\u0007w\u0002x\u0007x\u0002y\u0007y\u0002z\u0007z\u0002"+ + "{\u0007{\u0002|\u0007|\u0002}\u0007}\u0002~\u0007~\u0002\u007f\u0007\u007f"+ + "\u0002\u0080\u0007\u0080\u0002\u0081\u0007\u0081\u0002\u0082\u0007\u0082"+ + "\u0002\u0083\u0007\u0083\u0002\u0084\u0007\u0084\u0002\u0085\u0007\u0085"+ + "\u0002\u0086\u0007\u0086\u0002\u0087\u0007\u0087\u0002\u0088\u0007\u0088"+ + "\u0002\u0089\u0007\u0089\u0002\u008a\u0007\u008a\u0002\u008b\u0007\u008b"+ + "\u0002\u008c\u0007\u008c\u0002\u008d\u0007\u008d\u0002\u008e\u0007\u008e"+ + "\u0002\u008f\u0007\u008f\u0002\u0090\u0007\u0090\u0002\u0091\u0007\u0091"+ + "\u0002\u0092\u0007\u0092\u0002\u0093\u0007\u0093\u0002\u0094\u0007\u0094"+ + "\u0002\u0095\u0007\u0095\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097"+ + "\u0002\u0098\u0007\u0098\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a"+ + "\u0002\u009b\u0007\u009b\u0002\u009c\u0007\u009c\u0002\u009d\u0007\u009d"+ + "\u0002\u009e\u0007\u009e\u0002\u009f\u0007\u009f\u0002\u00a0\u0007\u00a0"+ + "\u0002\u00a1\u0007\u00a1\u0002\u00a2\u0007\u00a2\u0002\u00a3\u0007\u00a3"+ + "\u0002\u00a4\u0007\u00a4\u0002\u00a5\u0007\u00a5\u0002\u00a6\u0007\u00a6"+ + "\u0002\u00a7\u0007\u00a7\u0002\u00a8\u0007\u00a8\u0002\u00a9\u0007\u00a9"+ + "\u0002\u00aa\u0007\u00aa\u0002\u00ab\u0007\u00ab\u0002\u00ac\u0007\u00ac"+ + "\u0002\u00ad\u0007\u00ad\u0002\u00ae\u0007\u00ae\u0002\u00af\u0007\u00af"+ + "\u0002\u00b0\u0007\u00b0\u0002\u00b1\u0007\u00b1\u0002\u00b2\u0007\u00b2"+ + "\u0002\u00b3\u0007\u00b3\u0002\u00b4\u0007\u00b4\u0002\u00b5\u0007\u00b5"+ + "\u0002\u00b6\u0007\u00b6\u0002\u00b7\u0007\u00b7\u0002\u00b8\u0007\u00b8"+ + "\u0002\u00b9\u0007\u00b9\u0002\u00ba\u0007\u00ba\u0002\u00bb\u0007\u00bb"+ + "\u0002\u00bc\u0007\u00bc\u0002\u00bd\u0007\u00bd\u0002\u00be\u0007\u00be"+ + "\u0002\u00bf\u0007\u00bf\u0002\u00c0\u0007\u00c0\u0002\u00c1\u0007\u00c1"+ + "\u0002\u00c2\u0007\u00c2\u0002\u00c3\u0007\u00c3\u0002\u00c4\u0007\u00c4"+ + "\u0002\u00c5\u0007\u00c5\u0002\u00c6\u0007\u00c6\u0002\u00c7\u0007\u00c7"+ + "\u0002\u00c8\u0007\u00c8\u0002\u00c9\u0007\u00c9\u0002\u00ca\u0007\u00ca"+ + "\u0002\u00cb\u0007\u00cb\u0002\u00cc\u0007\u00cc\u0002\u00cd\u0007\u00cd"+ + "\u0002\u00ce\u0007\u00ce\u0002\u00cf\u0007\u00cf\u0002\u00d0\u0007\u00d0"+ + "\u0002\u00d1\u0007\u00d1\u0002\u00d2\u0007\u00d2\u0002\u00d3\u0007\u00d3"+ + "\u0002\u00d4\u0007\u00d4\u0002\u00d5\u0007\u00d5\u0002\u00d6\u0007\u00d6"+ + "\u0002\u00d7\u0007\u00d7\u0002\u00d8\u0007\u00d8\u0002\u00d9\u0007\u00d9"+ + "\u0002\u00da\u0007\u00da\u0002\u00db\u0007\u00db\u0002\u00dc\u0007\u00dc"+ + "\u0002\u00dd\u0007\u00dd\u0002\u00de\u0007\u00de\u0002\u00df\u0007\u00df"+ + "\u0002\u00e0\u0007\u00e0\u0002\u00e1\u0007\u00e1\u0002\u00e2\u0007\u00e2"+ + "\u0002\u00e3\u0007\u00e3\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ + "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004"+ + "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ + "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ + "\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e"+ + "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ + "\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010"+ + "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012"+ + "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ + "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ + "\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013"+ + "\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013"+ + "\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014"+ + "\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014"+ + "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015"+ + "\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016"+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017"+ + "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017"+ + "\u0001\u0017\u0001\u0018\u0004\u0018\u02b4\b\u0018\u000b\u0018\f\u0018"+ + "\u02b5\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001"+ + "\u0019\u0005\u0019\u02be\b\u0019\n\u0019\f\u0019\u02c1\t\u0019\u0001\u0019"+ + "\u0003\u0019\u02c4\b\u0019\u0001\u0019\u0003\u0019\u02c7\b\u0019\u0001"+ + "\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+ + "\u001a\u0005\u001a\u02d0\b\u001a\n\u001a\f\u001a\u02d3\t\u001a\u0001\u001a"+ + "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0004\u001b"+ + "\u02db\b\u001b\u000b\u001b\f\u001b\u02dc\u0001\u001b\u0001\u001b\u0001"+ + "\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001"+ + "\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001"+ + "!\u0001!\u0003!\u02f0\b!\u0001!\u0004!\u02f3\b!\u000b!\f!\u02f4\u0001"+ + "\"\u0001\"\u0001#\u0001#\u0001$\u0001$\u0001$\u0003$\u02fe\b$\u0001%\u0001"+ + "%\u0001&\u0001&\u0001&\u0003&\u0305\b&\u0001\'\u0001\'\u0001\'\u0005\'"+ + "\u030a\b\'\n\'\f\'\u030d\t\'\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0001"+ + "\'\u0005\'\u0315\b\'\n\'\f\'\u0318\t\'\u0001\'\u0001\'\u0001\'\u0001\'"+ + "\u0001\'\u0003\'\u031f\b\'\u0001\'\u0003\'\u0322\b\'\u0003\'\u0324\b\'"+ + "\u0001(\u0004(\u0327\b(\u000b(\f(\u0328\u0001)\u0004)\u032c\b)\u000b)"+ + "\f)\u032d\u0001)\u0001)\u0005)\u0332\b)\n)\f)\u0335\t)\u0001)\u0001)\u0004"+ + ")\u0339\b)\u000b)\f)\u033a\u0001)\u0004)\u033e\b)\u000b)\f)\u033f\u0001"+ + ")\u0001)\u0005)\u0344\b)\n)\f)\u0347\t)\u0003)\u0349\b)\u0001)\u0001)"+ + "\u0001)\u0001)\u0004)\u034f\b)\u000b)\f)\u0350\u0001)\u0001)\u0003)\u0355"+ + "\b)\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ + ",\u0001,\u0001-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u0001"+ + "0\u00011\u00011\u00011\u00011\u00011\u00012\u00012\u00013\u00013\u0001"+ + "3\u00013\u00013\u00013\u00014\u00014\u00014\u00014\u00014\u00014\u0001"+ + "5\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u0001"+ + "7\u00018\u00018\u00018\u00018\u00018\u00019\u00019\u0001:\u0001:\u0001"+ + ":\u0001:\u0001;\u0001;\u0001;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001"+ + "<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001?\u0001?\u0001"+ + "?\u0001?\u0001?\u0001?\u0001@\u0001@\u0001A\u0001A\u0001A\u0001A\u0001"+ + "A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001"+ + "E\u0001E\u0001F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001H\u0001"+ + "I\u0001I\u0001J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001M\u0001M\u0001"+ + "N\u0001N\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001Q\u0001Q\u0001"+ + "Q\u0003Q\u03d9\bQ\u0001Q\u0005Q\u03dc\bQ\nQ\fQ\u03df\tQ\u0001Q\u0001Q"+ + "\u0004Q\u03e3\bQ\u000bQ\fQ\u03e4\u0003Q\u03e7\bQ\u0001R\u0001R\u0001R"+ + "\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0005"+ + "T\u03f5\bT\nT\fT\u03f8\tT\u0001T\u0001T\u0003T\u03fc\bT\u0001T\u0004T"+ + "\u03ff\bT\u000bT\fT\u0400\u0003T\u0403\bT\u0001U\u0001U\u0004U\u0407\b"+ + "U\u000bU\fU\u0408\u0001U\u0001U\u0001V\u0001V\u0001W\u0001W\u0001W\u0001"+ + "W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001"+ + "Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001"+ + "\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001"+ + "^\u0001_\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001"+ + "a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001"+ + "c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001e\u0001"+ + "e\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0003f\u0456\bf\u0001"+ + "g\u0004g\u0459\bg\u000bg\fg\u045a\u0001h\u0001h\u0001h\u0001h\u0001i\u0001"+ + "i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001"+ + "k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001m\u0001"+ + "n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001"+ + "p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001r\u0003"+ + "r\u048a\br\u0001s\u0001s\u0003s\u048e\bs\u0001s\u0005s\u0491\bs\ns\fs"+ + "\u0494\ts\u0001s\u0001s\u0003s\u0498\bs\u0001s\u0004s\u049b\bs\u000bs"+ + "\fs\u049c\u0003s\u049f\bs\u0001t\u0001t\u0004t\u04a3\bt\u000bt\ft\u04a4"+ + "\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001w\u0001"+ + "w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001"+ + "y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001"+ + "|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001"+ + "~\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001\u0080"+ + "\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0081"+ + "\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083"+ + "\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084"+ + "\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085"+ + "\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086"+ + "\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001\u0088\u0004\u0088"+ + "\u04f8\b\u0088\u000b\u0088\f\u0088\u04f9\u0001\u0088\u0001\u0088\u0003"+ + "\u0088\u04fe\b\u0088\u0001\u0088\u0004\u0088\u0501\b\u0088\u000b\u0088"+ + "\f\u0088\u0502\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a"+ + "\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b"+ + "\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d"+ + "\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e"+ + "\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f"+ + "\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091"+ + "\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092"+ + "\u0001\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094"+ + "\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095"+ + "\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097"+ + "\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098"+ + "\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099"+ + "\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b"+ + "\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c"+ + "\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001\u009e"+ + "\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u009f"+ + "\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a1\u0001\u00a1"+ + "\u0001\u00a1\u0001\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2"+ + "\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a3"+ + "\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001\u00a5"+ + "\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6"+ + "\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a8"+ + "\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9"+ + "\u0001\u00a9\u0001\u00a9\u0004\u00a9\u0590\b\u00a9\u000b\u00a9\f\u00a9"+ + "\u0591\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00ab\u0001"+ + "\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001"+ + "\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001"+ + "\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001\u00af\u0001"+ + "\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001"+ + "\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b2\u0001"+ + "\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001"+ + "\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b5\u0001"+ + "\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001"+ + "\u00b6\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001"+ + "\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9\u0001"+ + "\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001"+ + "\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc\u0001"+ + "\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001"+ + "\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001"+ + "\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00c0\u0001"+ "\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001"+ "\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001"+ "\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001"+ @@ -604,842 +533,877 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u00d5\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001"+ "\u00d6\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001"+ "\u00d7\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001"+ - "\u00d8\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0002"+ - "\u02b5\u02fa\u0000\u00da\u0010\u0001\u0012\u0002\u0014\u0003\u0016\u0004"+ - "\u0018\u0005\u001a\u0006\u001c\u0007\u001e\b \t\"\n$\u000b&\f(\r*\u000e"+ - ",\u000f.\u00100\u00112\u00124\u00136\u00148\u0015:\u0016<\u0017>\u0018"+ - "@\u0019B\u001aD\u001bF\u001cH\u001dJ\u0000L\u0000N\u0000P\u0000R\u0000"+ - "T\u0000V\u0000X\u0000Z\u0000\\\u0000^\u001e`\u001fb d!f\"h#j$l%n&p\'r"+ - "(t)v*x+z,|-~.\u0080/\u00820\u00841\u00862\u00883\u008a4\u008c5\u008e6"+ - "\u00907\u00928\u00949\u0096:\u0098;\u009a<\u009c=\u009e>\u00a0?\u00a2"+ - "@\u00a4A\u00a6B\u00a8C\u00aaD\u00acE\u00aeF\u00b0\u0000\u00b2G\u00b4H"+ - "\u00b6I\u00b8J\u00ba\u0000\u00bcK\u00beL\u00c0M\u00c2N\u00c4\u0000\u00c6"+ - "\u0000\u00c8O\u00caP\u00ccQ\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4"+ - "\u0000\u00d6\u0000\u00d8\u0000\u00daR\u00dc\u0000\u00deS\u00e0\u0000\u00e2"+ - "\u0000\u00e4T\u00e6U\u00e8V\u00ea\u0000\u00ec\u0000\u00ee\u0000\u00f0"+ - "\u0000\u00f2\u0000\u00f4\u0000\u00f6\u0000\u00f8W\u00faX\u00fcY\u00fe"+ - "Z\u0100\u0000\u0102\u0000\u0104\u0000\u0106\u0000\u0108\u0000\u010a\u0000"+ - "\u010c[\u010e\u0000\u0110\\\u0112]\u0114^\u0116\u0000\u0118\u0000\u011a"+ - "_\u011c`\u011e\u0000\u0120a\u0122\u0000\u0124b\u0126c\u0128d\u012a\u0000"+ - "\u012c\u0000\u012e\u0000\u0130\u0000\u0132\u0000\u0134\u0000\u0136\u0000"+ - "\u0138\u0000\u013a\u0000\u013ce\u013ef\u0140g\u0142\u0000\u0144\u0000"+ - "\u0146\u0000\u0148\u0000\u014a\u0000\u014c\u0000\u014eh\u0150i\u0152j"+ - "\u0154\u0000\u0156k\u0158l\u015am\u015cn\u015e\u0000\u0160\u0000\u0162"+ - "o\u0164p\u0166q\u0168r\u016a\u0000\u016c\u0000\u016e\u0000\u0170\u0000"+ - "\u0172\u0000\u0174\u0000\u0176\u0000\u0178s\u017at\u017cu\u017e\u0000"+ - "\u0180\u0000\u0182\u0000\u0184\u0000\u0186v\u0188w\u018ax\u018c\u0000"+ - "\u018e\u0000\u0190\u0000\u0192\u0000\u0194y\u0196\u0000\u0198\u0000\u019a"+ - "\u0000\u019c\u0000\u019e\u0000\u01a0z\u01a2{\u01a4|\u01a6\u0000\u01a8"+ - "\u0000\u01aa\u0000\u01ac}\u01ae~\u01b0\u007f\u01b2\u0000\u01b4\u0000\u01b6"+ - "\u0080\u01b8\u0081\u01ba\u0082\u01bc\u0000\u01be\u0000\u01c0\u0000\u01c2"+ - "\u0000\u0010\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b"+ - "\f\r\u000e\u000f$\u0002\u0000DDdd\u0002\u0000IIii\u0002\u0000SSss\u0002"+ - "\u0000EEee\u0002\u0000CCcc\u0002\u0000TTtt\u0002\u0000RRrr\u0002\u0000"+ - "OOoo\u0002\u0000PPpp\u0002\u0000NNnn\u0002\u0000HHhh\u0002\u0000VVvv\u0002"+ - "\u0000AAaa\u0002\u0000LLll\u0002\u0000XXxx\u0002\u0000FFff\u0002\u0000"+ - "MMmm\u0002\u0000GGgg\u0002\u0000KKkk\u0002\u0000WWww\u0002\u0000UUuu\u0002"+ - "\u0000JJjj\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000"+ - "\t\n\r\r \u0001\u000009\u0002\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004"+ - "\u0000\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002"+ - "\u0000YYyy\u000b\u0000\t\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b"+ - "\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u0676\u0000\u0010\u0001\u0000\u0000"+ - "\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000"+ - "\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000"+ - "\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000"+ - "\u0000\u0000\u001e\u0001\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000"+ - "\u0000\"\u0001\u0000\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000"+ - "&\u0001\u0000\u0000\u0000\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001"+ - "\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000"+ - "\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u0000"+ - "4\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000\u0000\u00008\u0001"+ - "\u0000\u0000\u0000\u0000:\u0001\u0000\u0000\u0000\u0000<\u0001\u0000\u0000"+ - "\u0000\u0000>\u0001\u0000\u0000\u0000\u0000@\u0001\u0000\u0000\u0000\u0000"+ - "B\u0001\u0000\u0000\u0000\u0000D\u0001\u0000\u0000\u0000\u0000F\u0001"+ - "\u0000\u0000\u0000\u0001H\u0001\u0000\u0000\u0000\u0001^\u0001\u0000\u0000"+ - "\u0000\u0001`\u0001\u0000\u0000\u0000\u0001b\u0001\u0000\u0000\u0000\u0001"+ - "d\u0001\u0000\u0000\u0000\u0001f\u0001\u0000\u0000\u0000\u0001h\u0001"+ - "\u0000\u0000\u0000\u0001j\u0001\u0000\u0000\u0000\u0001l\u0001\u0000\u0000"+ - "\u0000\u0001n\u0001\u0000\u0000\u0000\u0001p\u0001\u0000\u0000\u0000\u0001"+ - "r\u0001\u0000\u0000\u0000\u0001t\u0001\u0000\u0000\u0000\u0001v\u0001"+ - "\u0000\u0000\u0000\u0001x\u0001\u0000\u0000\u0000\u0001z\u0001\u0000\u0000"+ - "\u0000\u0001|\u0001\u0000\u0000\u0000\u0001~\u0001\u0000\u0000\u0000\u0001"+ - "\u0080\u0001\u0000\u0000\u0000\u0001\u0082\u0001\u0000\u0000\u0000\u0001"+ - "\u0084\u0001\u0000\u0000\u0000\u0001\u0086\u0001\u0000\u0000\u0000\u0001"+ - "\u0088\u0001\u0000\u0000\u0000\u0001\u008a\u0001\u0000\u0000\u0000\u0001"+ - "\u008c\u0001\u0000\u0000\u0000\u0001\u008e\u0001\u0000\u0000\u0000\u0001"+ - "\u0090\u0001\u0000\u0000\u0000\u0001\u0092\u0001\u0000\u0000\u0000\u0001"+ - "\u0094\u0001\u0000\u0000\u0000\u0001\u0096\u0001\u0000\u0000\u0000\u0001"+ - "\u0098\u0001\u0000\u0000\u0000\u0001\u009a\u0001\u0000\u0000\u0000\u0001"+ - "\u009c\u0001\u0000\u0000\u0000\u0001\u009e\u0001\u0000\u0000\u0000\u0001"+ - "\u00a0\u0001\u0000\u0000\u0000\u0001\u00a2\u0001\u0000\u0000\u0000\u0001"+ - "\u00a4\u0001\u0000\u0000\u0000\u0001\u00a6\u0001\u0000\u0000\u0000\u0001"+ - "\u00a8\u0001\u0000\u0000\u0000\u0001\u00aa\u0001\u0000\u0000\u0000\u0001"+ - "\u00ac\u0001\u0000\u0000\u0000\u0001\u00ae\u0001\u0000\u0000\u0000\u0001"+ - "\u00b0\u0001\u0000\u0000\u0000\u0001\u00b2\u0001\u0000\u0000\u0000\u0001"+ - "\u00b4\u0001\u0000\u0000\u0000\u0001\u00b6\u0001\u0000\u0000\u0000\u0001"+ - "\u00b8\u0001\u0000\u0000\u0000\u0001\u00bc\u0001\u0000\u0000\u0000\u0001"+ - "\u00be\u0001\u0000\u0000\u0000\u0001\u00c0\u0001\u0000\u0000\u0000\u0001"+ - "\u00c2\u0001\u0000\u0000\u0000\u0002\u00c4\u0001\u0000\u0000\u0000\u0002"+ - "\u00c6\u0001\u0000\u0000\u0000\u0002\u00c8\u0001\u0000\u0000\u0000\u0002"+ - "\u00ca\u0001\u0000\u0000\u0000\u0002\u00cc\u0001\u0000\u0000\u0000\u0003"+ - "\u00ce\u0001\u0000\u0000\u0000\u0003\u00d0\u0001\u0000\u0000\u0000\u0003"+ - "\u00d2\u0001\u0000\u0000\u0000\u0003\u00d4\u0001\u0000\u0000\u0000\u0003"+ - "\u00d6\u0001\u0000\u0000\u0000\u0003\u00d8\u0001\u0000\u0000\u0000\u0003"+ - "\u00da\u0001\u0000\u0000\u0000\u0003\u00de\u0001\u0000\u0000\u0000\u0003"+ - "\u00e0\u0001\u0000\u0000\u0000\u0003\u00e2\u0001\u0000\u0000\u0000\u0003"+ - "\u00e4\u0001\u0000\u0000\u0000\u0003\u00e6\u0001\u0000\u0000\u0000\u0003"+ - "\u00e8\u0001\u0000\u0000\u0000\u0004\u00ea\u0001\u0000\u0000\u0000\u0004"+ - "\u00ec\u0001\u0000\u0000\u0000\u0004\u00ee\u0001\u0000\u0000\u0000\u0004"+ - "\u00f0\u0001\u0000\u0000\u0000\u0004\u00f2\u0001\u0000\u0000\u0000\u0004"+ - "\u00f8\u0001\u0000\u0000\u0000\u0004\u00fa\u0001\u0000\u0000\u0000\u0004"+ - "\u00fc\u0001\u0000\u0000\u0000\u0004\u00fe\u0001\u0000\u0000\u0000\u0005"+ - "\u0100\u0001\u0000\u0000\u0000\u0005\u0102\u0001\u0000\u0000\u0000\u0005"+ - "\u0104\u0001\u0000\u0000\u0000\u0005\u0106\u0001\u0000\u0000\u0000\u0005"+ - "\u0108\u0001\u0000\u0000\u0000\u0005\u010a\u0001\u0000\u0000\u0000\u0005"+ - "\u010c\u0001\u0000\u0000\u0000\u0005\u010e\u0001\u0000\u0000\u0000\u0005"+ - "\u0110\u0001\u0000\u0000\u0000\u0005\u0112\u0001\u0000\u0000\u0000\u0005"+ - "\u0114\u0001\u0000\u0000\u0000\u0006\u0116\u0001\u0000\u0000\u0000\u0006"+ - "\u0118\u0001\u0000\u0000\u0000\u0006\u011a\u0001\u0000\u0000\u0000\u0006"+ - "\u011c\u0001\u0000\u0000\u0000\u0006\u0120\u0001\u0000\u0000\u0000\u0006"+ - "\u0122\u0001\u0000\u0000\u0000\u0006\u0124\u0001\u0000\u0000\u0000\u0006"+ - "\u0126\u0001\u0000\u0000\u0000\u0006\u0128\u0001\u0000\u0000\u0000\u0007"+ - "\u012a\u0001\u0000\u0000\u0000\u0007\u012c\u0001\u0000\u0000\u0000\u0007"+ - "\u012e\u0001\u0000\u0000\u0000\u0007\u0130\u0001\u0000\u0000\u0000\u0007"+ - "\u0132\u0001\u0000\u0000\u0000\u0007\u0134\u0001\u0000\u0000\u0000\u0007"+ - "\u0136\u0001\u0000\u0000\u0000\u0007\u0138\u0001\u0000\u0000\u0000\u0007"+ - "\u013a\u0001\u0000\u0000\u0000\u0007\u013c\u0001\u0000\u0000\u0000\u0007"+ - "\u013e\u0001\u0000\u0000\u0000\u0007\u0140\u0001\u0000\u0000\u0000\b\u0142"+ - "\u0001\u0000\u0000\u0000\b\u0144\u0001\u0000\u0000\u0000\b\u0146\u0001"+ - "\u0000\u0000\u0000\b\u0148\u0001\u0000\u0000\u0000\b\u014a\u0001\u0000"+ - "\u0000\u0000\b\u014c\u0001\u0000\u0000\u0000\b\u014e\u0001\u0000\u0000"+ - "\u0000\b\u0150\u0001\u0000\u0000\u0000\b\u0152\u0001\u0000\u0000\u0000"+ - "\t\u0154\u0001\u0000\u0000\u0000\t\u0156\u0001\u0000\u0000\u0000\t\u0158"+ - "\u0001\u0000\u0000\u0000\t\u015a\u0001\u0000\u0000\u0000\t\u015c\u0001"+ - "\u0000\u0000\u0000\n\u015e\u0001\u0000\u0000\u0000\n\u0160\u0001\u0000"+ - "\u0000\u0000\n\u0162\u0001\u0000\u0000\u0000\n\u0164\u0001\u0000\u0000"+ - "\u0000\n\u0166\u0001\u0000\u0000\u0000\n\u0168\u0001\u0000\u0000\u0000"+ - "\u000b\u016a\u0001\u0000\u0000\u0000\u000b\u016c\u0001\u0000\u0000\u0000"+ - "\u000b\u016e\u0001\u0000\u0000\u0000\u000b\u0170\u0001\u0000\u0000\u0000"+ - "\u000b\u0172\u0001\u0000\u0000\u0000\u000b\u0174\u0001\u0000\u0000\u0000"+ - "\u000b\u0176\u0001\u0000\u0000\u0000\u000b\u0178\u0001\u0000\u0000\u0000"+ - "\u000b\u017a\u0001\u0000\u0000\u0000\u000b\u017c\u0001\u0000\u0000\u0000"+ - "\f\u017e\u0001\u0000\u0000\u0000\f\u0180\u0001\u0000\u0000\u0000\f\u0182"+ - "\u0001\u0000\u0000\u0000\f\u0184\u0001\u0000\u0000\u0000\f\u0186\u0001"+ - "\u0000\u0000\u0000\f\u0188\u0001\u0000\u0000\u0000\f\u018a\u0001\u0000"+ - "\u0000\u0000\r\u018c\u0001\u0000\u0000\u0000\r\u018e\u0001\u0000\u0000"+ - "\u0000\r\u0190\u0001\u0000\u0000\u0000\r\u0192\u0001\u0000\u0000\u0000"+ - "\r\u0194\u0001\u0000\u0000\u0000\r\u0196\u0001\u0000\u0000\u0000\r\u0198"+ - "\u0001\u0000\u0000\u0000\r\u019a\u0001\u0000\u0000\u0000\r\u019c\u0001"+ - "\u0000\u0000\u0000\r\u019e\u0001\u0000\u0000\u0000\r\u01a0\u0001\u0000"+ - "\u0000\u0000\r\u01a2\u0001\u0000\u0000\u0000\r\u01a4\u0001\u0000\u0000"+ - "\u0000\u000e\u01a6\u0001\u0000\u0000\u0000\u000e\u01a8\u0001\u0000\u0000"+ - "\u0000\u000e\u01aa\u0001\u0000\u0000\u0000\u000e\u01ac\u0001\u0000\u0000"+ - "\u0000\u000e\u01ae\u0001\u0000\u0000\u0000\u000e\u01b0\u0001\u0000\u0000"+ - "\u0000\u000f\u01b2\u0001\u0000\u0000\u0000\u000f\u01b4\u0001\u0000\u0000"+ - "\u0000\u000f\u01b6\u0001\u0000\u0000\u0000\u000f\u01b8\u0001\u0000\u0000"+ - "\u0000\u000f\u01ba\u0001\u0000\u0000\u0000\u000f\u01bc\u0001\u0000\u0000"+ - "\u0000\u000f\u01be\u0001\u0000\u0000\u0000\u000f\u01c0\u0001\u0000\u0000"+ - "\u0000\u000f\u01c2\u0001\u0000\u0000\u0000\u0010\u01c4\u0001\u0000\u0000"+ - "\u0000\u0012\u01ce\u0001\u0000\u0000\u0000\u0014\u01d5\u0001\u0000\u0000"+ - "\u0000\u0016\u01de\u0001\u0000\u0000\u0000\u0018\u01e5\u0001\u0000\u0000"+ - "\u0000\u001a\u01ef\u0001\u0000\u0000\u0000\u001c\u01f6\u0001\u0000\u0000"+ - "\u0000\u001e\u01fd\u0001\u0000\u0000\u0000 \u0204\u0001\u0000\u0000\u0000"+ - "\"\u020c\u0001\u0000\u0000\u0000$\u0218\u0001\u0000\u0000\u0000&\u0221"+ - "\u0001\u0000\u0000\u0000(\u0227\u0001\u0000\u0000\u0000*\u022e\u0001\u0000"+ - "\u0000\u0000,\u0235\u0001\u0000\u0000\u0000.\u023d\u0001\u0000\u0000\u0000"+ - "0\u0245\u0001\u0000\u0000\u00002\u0254\u0001\u0000\u0000\u00004\u0260"+ - "\u0001\u0000\u0000\u00006\u026b\u0001\u0000\u0000\u00008\u0273\u0001\u0000"+ - "\u0000\u0000:\u027b\u0001\u0000\u0000\u0000<\u0283\u0001\u0000\u0000\u0000"+ - ">\u028c\u0001\u0000\u0000\u0000@\u0297\u0001\u0000\u0000\u0000B\u029d"+ - "\u0001\u0000\u0000\u0000D\u02ae\u0001\u0000\u0000\u0000F\u02be\u0001\u0000"+ - "\u0000\u0000H\u02c4\u0001\u0000\u0000\u0000J\u02c8\u0001\u0000\u0000\u0000"+ - "L\u02ca\u0001\u0000\u0000\u0000N\u02cc\u0001\u0000\u0000\u0000P\u02cf"+ - "\u0001\u0000\u0000\u0000R\u02d1\u0001\u0000\u0000\u0000T\u02da\u0001\u0000"+ - "\u0000\u0000V\u02dc\u0001\u0000\u0000\u0000X\u02e1\u0001\u0000\u0000\u0000"+ - "Z\u02e3\u0001\u0000\u0000\u0000\\\u02e8\u0001\u0000\u0000\u0000^\u0307"+ - "\u0001\u0000\u0000\u0000`\u030a\u0001\u0000\u0000\u0000b\u0338\u0001\u0000"+ - "\u0000\u0000d\u033a\u0001\u0000\u0000\u0000f\u033d\u0001\u0000\u0000\u0000"+ - "h\u0341\u0001\u0000\u0000\u0000j\u0345\u0001\u0000\u0000\u0000l\u0347"+ - "\u0001\u0000\u0000\u0000n\u034a\u0001\u0000\u0000\u0000p\u034c\u0001\u0000"+ - "\u0000\u0000r\u034e\u0001\u0000\u0000\u0000t\u0353\u0001\u0000\u0000\u0000"+ - "v\u0355\u0001\u0000\u0000\u0000x\u035b\u0001\u0000\u0000\u0000z\u0361"+ - "\u0001\u0000\u0000\u0000|\u0364\u0001\u0000\u0000\u0000~\u0367\u0001\u0000"+ - "\u0000\u0000\u0080\u036c\u0001\u0000\u0000\u0000\u0082\u0371\u0001\u0000"+ - "\u0000\u0000\u0084\u0373\u0001\u0000\u0000\u0000\u0086\u0377\u0001\u0000"+ - "\u0000\u0000\u0088\u037c\u0001\u0000\u0000\u0000\u008a\u0382\u0001\u0000"+ - "\u0000\u0000\u008c\u0385\u0001\u0000\u0000\u0000\u008e\u0387\u0001\u0000"+ - "\u0000\u0000\u0090\u038d\u0001\u0000\u0000\u0000\u0092\u038f\u0001\u0000"+ - "\u0000\u0000\u0094\u0394\u0001\u0000\u0000\u0000\u0096\u0397\u0001\u0000"+ - "\u0000\u0000\u0098\u039a\u0001\u0000\u0000\u0000\u009a\u039d\u0001\u0000"+ - "\u0000\u0000\u009c\u039f\u0001\u0000\u0000\u0000\u009e\u03a2\u0001\u0000"+ - "\u0000\u0000\u00a0\u03a4\u0001\u0000\u0000\u0000\u00a2\u03a7\u0001\u0000"+ - "\u0000\u0000\u00a4\u03a9\u0001\u0000\u0000\u0000\u00a6\u03ab\u0001\u0000"+ - "\u0000\u0000\u00a8\u03ad\u0001\u0000\u0000\u0000\u00aa\u03af\u0001\u0000"+ - "\u0000\u0000\u00ac\u03b1\u0001\u0000\u0000\u0000\u00ae\u03b3\u0001\u0000"+ - "\u0000\u0000\u00b0\u03b5\u0001\u0000\u0000\u0000\u00b2\u03ca\u0001\u0000"+ - "\u0000\u0000\u00b4\u03cc\u0001\u0000\u0000\u0000\u00b6\u03d1\u0001\u0000"+ - "\u0000\u0000\u00b8\u03e6\u0001\u0000\u0000\u0000\u00ba\u03e8\u0001\u0000"+ - "\u0000\u0000\u00bc\u03f0\u0001\u0000\u0000\u0000\u00be\u03f2\u0001\u0000"+ - "\u0000\u0000\u00c0\u03f6\u0001\u0000\u0000\u0000\u00c2\u03fa\u0001\u0000"+ - "\u0000\u0000\u00c4\u03fe\u0001\u0000\u0000\u0000\u00c6\u0403\u0001\u0000"+ - "\u0000\u0000\u00c8\u0408\u0001\u0000\u0000\u0000\u00ca\u040c\u0001\u0000"+ - "\u0000\u0000\u00cc\u0410\u0001\u0000\u0000\u0000\u00ce\u0414\u0001\u0000"+ - "\u0000\u0000\u00d0\u0419\u0001\u0000\u0000\u0000\u00d2\u041d\u0001\u0000"+ - "\u0000\u0000\u00d4\u0421\u0001\u0000\u0000\u0000\u00d6\u0425\u0001\u0000"+ - "\u0000\u0000\u00d8\u0429\u0001\u0000\u0000\u0000\u00da\u042d\u0001\u0000"+ - "\u0000\u0000\u00dc\u0439\u0001\u0000\u0000\u0000\u00de\u043c\u0001\u0000"+ - "\u0000\u0000\u00e0\u0440\u0001\u0000\u0000\u0000\u00e2\u0444\u0001\u0000"+ - "\u0000\u0000\u00e4\u0448\u0001\u0000\u0000\u0000\u00e6\u044c\u0001\u0000"+ - "\u0000\u0000\u00e8\u0450\u0001\u0000\u0000\u0000\u00ea\u0454\u0001\u0000"+ - "\u0000\u0000\u00ec\u0459\u0001\u0000\u0000\u0000\u00ee\u045d\u0001\u0000"+ - "\u0000\u0000\u00f0\u0461\u0001\u0000\u0000\u0000\u00f2\u0466\u0001\u0000"+ - "\u0000\u0000\u00f4\u046f\u0001\u0000\u0000\u0000\u00f6\u0484\u0001\u0000"+ - "\u0000\u0000\u00f8\u0488\u0001\u0000\u0000\u0000\u00fa\u048c\u0001\u0000"+ - "\u0000\u0000\u00fc\u0490\u0001\u0000\u0000\u0000\u00fe\u0494\u0001\u0000"+ - "\u0000\u0000\u0100\u0498\u0001\u0000\u0000\u0000\u0102\u049d\u0001\u0000"+ - "\u0000\u0000\u0104\u04a1\u0001\u0000\u0000\u0000\u0106\u04a5\u0001\u0000"+ - "\u0000\u0000\u0108\u04a9\u0001\u0000\u0000\u0000\u010a\u04ae\u0001\u0000"+ - "\u0000\u0000\u010c\u04b3\u0001\u0000\u0000\u0000\u010e\u04b6\u0001\u0000"+ - "\u0000\u0000\u0110\u04ba\u0001\u0000\u0000\u0000\u0112\u04be\u0001\u0000"+ - "\u0000\u0000\u0114\u04c2\u0001\u0000\u0000\u0000\u0116\u04c6\u0001\u0000"+ - "\u0000\u0000\u0118\u04cb\u0001\u0000\u0000\u0000\u011a\u04d0\u0001\u0000"+ - "\u0000\u0000\u011c\u04d5\u0001\u0000\u0000\u0000\u011e\u04dc\u0001\u0000"+ - "\u0000\u0000\u0120\u04e5\u0001\u0000\u0000\u0000\u0122\u04ec\u0001\u0000"+ - "\u0000\u0000\u0124\u04f0\u0001\u0000\u0000\u0000\u0126\u04f4\u0001\u0000"+ - "\u0000\u0000\u0128\u04f8\u0001\u0000\u0000\u0000\u012a\u04fc\u0001\u0000"+ - "\u0000\u0000\u012c\u0502\u0001\u0000\u0000\u0000\u012e\u0506\u0001\u0000"+ - "\u0000\u0000\u0130\u050a\u0001\u0000\u0000\u0000\u0132\u050e\u0001\u0000"+ - "\u0000\u0000\u0134\u0512\u0001\u0000\u0000\u0000\u0136\u0516\u0001\u0000"+ - "\u0000\u0000\u0138\u051a\u0001\u0000\u0000\u0000\u013a\u051f\u0001\u0000"+ - "\u0000\u0000\u013c\u0524\u0001\u0000\u0000\u0000\u013e\u0528\u0001\u0000"+ - "\u0000\u0000\u0140\u052c\u0001\u0000\u0000\u0000\u0142\u0530\u0001\u0000"+ - "\u0000\u0000\u0144\u0535\u0001\u0000\u0000\u0000\u0146\u0539\u0001\u0000"+ - "\u0000\u0000\u0148\u053e\u0001\u0000\u0000\u0000\u014a\u0543\u0001\u0000"+ - "\u0000\u0000\u014c\u0547\u0001\u0000\u0000\u0000\u014e\u054b\u0001\u0000"+ - "\u0000\u0000\u0150\u054f\u0001\u0000\u0000\u0000\u0152\u0553\u0001\u0000"+ - "\u0000\u0000\u0154\u0557\u0001\u0000\u0000\u0000\u0156\u055c\u0001\u0000"+ - "\u0000\u0000\u0158\u0561\u0001\u0000\u0000\u0000\u015a\u0565\u0001\u0000"+ - "\u0000\u0000\u015c\u0569\u0001\u0000\u0000\u0000\u015e\u056d\u0001\u0000"+ - "\u0000\u0000\u0160\u0572\u0001\u0000\u0000\u0000\u0162\u057b\u0001\u0000"+ - "\u0000\u0000\u0164\u057f\u0001\u0000\u0000\u0000\u0166\u0583\u0001\u0000"+ - "\u0000\u0000\u0168\u0587\u0001\u0000\u0000\u0000\u016a\u058b\u0001\u0000"+ - "\u0000\u0000\u016c\u0590\u0001\u0000\u0000\u0000\u016e\u0594\u0001\u0000"+ - "\u0000\u0000\u0170\u0598\u0001\u0000\u0000\u0000\u0172\u059c\u0001\u0000"+ - "\u0000\u0000\u0174\u05a1\u0001\u0000\u0000\u0000\u0176\u05a5\u0001\u0000"+ - "\u0000\u0000\u0178\u05a9\u0001\u0000\u0000\u0000\u017a\u05ad\u0001\u0000"+ - "\u0000\u0000\u017c\u05b1\u0001\u0000\u0000\u0000\u017e\u05b5\u0001\u0000"+ - "\u0000\u0000\u0180\u05bb\u0001\u0000\u0000\u0000\u0182\u05bf\u0001\u0000"+ - "\u0000\u0000\u0184\u05c3\u0001\u0000\u0000\u0000\u0186\u05c7\u0001\u0000"+ - "\u0000\u0000\u0188\u05cb\u0001\u0000\u0000\u0000\u018a\u05cf\u0001\u0000"+ - "\u0000\u0000\u018c\u05d3\u0001\u0000\u0000\u0000\u018e\u05d8\u0001\u0000"+ - "\u0000\u0000\u0190\u05dc\u0001\u0000\u0000\u0000\u0192\u05e0\u0001\u0000"+ - "\u0000\u0000\u0194\u05e6\u0001\u0000\u0000\u0000\u0196\u05ef\u0001\u0000"+ - "\u0000\u0000\u0198\u05f3\u0001\u0000\u0000\u0000\u019a\u05f7\u0001\u0000"+ - "\u0000\u0000\u019c\u05fb\u0001\u0000\u0000\u0000\u019e\u05ff\u0001\u0000"+ - "\u0000\u0000\u01a0\u0603\u0001\u0000\u0000\u0000\u01a2\u0607\u0001\u0000"+ - "\u0000\u0000\u01a4\u060b\u0001\u0000\u0000\u0000\u01a6\u060f\u0001\u0000"+ - "\u0000\u0000\u01a8\u0614\u0001\u0000\u0000\u0000\u01aa\u061a\u0001\u0000"+ - "\u0000\u0000\u01ac\u0620\u0001\u0000\u0000\u0000\u01ae\u0624\u0001\u0000"+ - "\u0000\u0000\u01b0\u0628\u0001\u0000\u0000\u0000\u01b2\u062c\u0001\u0000"+ - "\u0000\u0000\u01b4\u0632\u0001\u0000\u0000\u0000\u01b6\u0638\u0001\u0000"+ - "\u0000\u0000\u01b8\u063c\u0001\u0000\u0000\u0000\u01ba\u0640\u0001\u0000"+ - "\u0000\u0000\u01bc\u0644\u0001\u0000\u0000\u0000\u01be\u064a\u0001\u0000"+ - "\u0000\u0000\u01c0\u0650\u0001\u0000\u0000\u0000\u01c2\u0656\u0001\u0000"+ - "\u0000\u0000\u01c4\u01c5\u0007\u0000\u0000\u0000\u01c5\u01c6\u0007\u0001"+ - "\u0000\u0000\u01c6\u01c7\u0007\u0002\u0000\u0000\u01c7\u01c8\u0007\u0002"+ - "\u0000\u0000\u01c8\u01c9\u0007\u0003\u0000\u0000\u01c9\u01ca\u0007\u0004"+ - "\u0000\u0000\u01ca\u01cb\u0007\u0005\u0000\u0000\u01cb\u01cc\u0001\u0000"+ - "\u0000\u0000\u01cc\u01cd\u0006\u0000\u0000\u0000\u01cd\u0011\u0001\u0000"+ - "\u0000\u0000\u01ce\u01cf\u0007\u0000\u0000\u0000\u01cf\u01d0\u0007\u0006"+ - "\u0000\u0000\u01d0\u01d1\u0007\u0007\u0000\u0000\u01d1\u01d2\u0007\b\u0000"+ - "\u0000\u01d2\u01d3\u0001\u0000\u0000\u0000\u01d3\u01d4\u0006\u0001\u0001"+ - "\u0000\u01d4\u0013\u0001\u0000\u0000\u0000\u01d5\u01d6\u0007\u0003\u0000"+ - "\u0000\u01d6\u01d7\u0007\t\u0000\u0000\u01d7\u01d8\u0007\u0006\u0000\u0000"+ - "\u01d8\u01d9\u0007\u0001\u0000\u0000\u01d9\u01da\u0007\u0004\u0000\u0000"+ - "\u01da\u01db\u0007\n\u0000\u0000\u01db\u01dc\u0001\u0000\u0000\u0000\u01dc"+ - "\u01dd\u0006\u0002\u0002\u0000\u01dd\u0015\u0001\u0000\u0000\u0000\u01de"+ - "\u01df\u0007\u0003\u0000\u0000\u01df\u01e0\u0007\u000b\u0000\u0000\u01e0"+ - "\u01e1\u0007\f\u0000\u0000\u01e1\u01e2\u0007\r\u0000\u0000\u01e2\u01e3"+ - "\u0001\u0000\u0000\u0000\u01e3\u01e4\u0006\u0003\u0000\u0000\u01e4\u0017"+ - "\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0003\u0000\u0000\u01e6\u01e7"+ - "\u0007\u000e\u0000\u0000\u01e7\u01e8\u0007\b\u0000\u0000\u01e8\u01e9\u0007"+ - "\r\u0000\u0000\u01e9\u01ea\u0007\f\u0000\u0000\u01ea\u01eb\u0007\u0001"+ - "\u0000\u0000\u01eb\u01ec\u0007\t\u0000\u0000\u01ec\u01ed\u0001\u0000\u0000"+ - "\u0000\u01ed\u01ee\u0006\u0004\u0003\u0000\u01ee\u0019\u0001\u0000\u0000"+ - "\u0000\u01ef\u01f0\u0007\u000f\u0000\u0000\u01f0\u01f1\u0007\u0006\u0000"+ - "\u0000\u01f1\u01f2\u0007\u0007\u0000\u0000\u01f2\u01f3\u0007\u0010\u0000"+ - "\u0000\u01f3\u01f4\u0001\u0000\u0000\u0000\u01f4\u01f5\u0006\u0005\u0004"+ - "\u0000\u01f5\u001b\u0001\u0000\u0000\u0000\u01f6\u01f7\u0007\u0011\u0000"+ - "\u0000\u01f7\u01f8\u0007\u0006\u0000\u0000\u01f8\u01f9\u0007\u0007\u0000"+ - "\u0000\u01f9\u01fa\u0007\u0012\u0000\u0000\u01fa\u01fb\u0001\u0000\u0000"+ - "\u0000\u01fb\u01fc\u0006\u0006\u0000\u0000\u01fc\u001d\u0001\u0000\u0000"+ - "\u0000\u01fd\u01fe\u0007\u0012\u0000\u0000\u01fe\u01ff\u0007\u0003\u0000"+ - "\u0000\u01ff\u0200\u0007\u0003\u0000\u0000\u0200\u0201\u0007\b\u0000\u0000"+ - "\u0201\u0202\u0001\u0000\u0000\u0000\u0202\u0203\u0006\u0007\u0001\u0000"+ - "\u0203\u001f\u0001\u0000\u0000\u0000\u0204\u0205\u0007\r\u0000\u0000\u0205"+ - "\u0206\u0007\u0001\u0000\u0000\u0206\u0207\u0007\u0010\u0000\u0000\u0207"+ - "\u0208\u0007\u0001\u0000\u0000\u0208\u0209\u0007\u0005\u0000\u0000\u0209"+ - "\u020a\u0001\u0000\u0000\u0000\u020a\u020b\u0006\b\u0000\u0000\u020b!"+ - "\u0001\u0000\u0000\u0000\u020c\u020d\u0007\u0010\u0000\u0000\u020d\u020e"+ - "\u0007\u000b\u0000\u0000\u020e\u020f\u0005_\u0000\u0000\u020f\u0210\u0007"+ - "\u0003\u0000\u0000\u0210\u0211\u0007\u000e\u0000\u0000\u0211\u0212\u0007"+ - "\b\u0000\u0000\u0212\u0213\u0007\f\u0000\u0000\u0213\u0214\u0007\t\u0000"+ - "\u0000\u0214\u0215\u0007\u0000\u0000\u0000\u0215\u0216\u0001\u0000\u0000"+ - "\u0000\u0216\u0217\u0006\t\u0005\u0000\u0217#\u0001\u0000\u0000\u0000"+ - "\u0218\u0219\u0007\u0006\u0000\u0000\u0219\u021a\u0007\u0003\u0000\u0000"+ - "\u021a\u021b\u0007\t\u0000\u0000\u021b\u021c\u0007\f\u0000\u0000\u021c"+ - "\u021d\u0007\u0010\u0000\u0000\u021d\u021e\u0007\u0003\u0000\u0000\u021e"+ - "\u021f\u0001\u0000\u0000\u0000\u021f\u0220\u0006\n\u0006\u0000\u0220%"+ - "\u0001\u0000\u0000\u0000\u0221\u0222\u0007\u0006\u0000\u0000\u0222\u0223"+ - "\u0007\u0007\u0000\u0000\u0223\u0224\u0007\u0013\u0000\u0000\u0224\u0225"+ - "\u0001\u0000\u0000\u0000\u0225\u0226\u0006\u000b\u0000\u0000\u0226\'\u0001"+ - "\u0000\u0000\u0000\u0227\u0228\u0007\u0002\u0000\u0000\u0228\u0229\u0007"+ - "\n\u0000\u0000\u0229\u022a\u0007\u0007\u0000\u0000\u022a\u022b\u0007\u0013"+ - "\u0000\u0000\u022b\u022c\u0001\u0000\u0000\u0000\u022c\u022d\u0006\f\u0007"+ - "\u0000\u022d)\u0001\u0000\u0000\u0000\u022e\u022f\u0007\u0002\u0000\u0000"+ - "\u022f\u0230\u0007\u0007\u0000\u0000\u0230\u0231\u0007\u0006\u0000\u0000"+ - "\u0231\u0232\u0007\u0005\u0000\u0000\u0232\u0233\u0001\u0000\u0000\u0000"+ - "\u0233\u0234\u0006\r\u0000\u0000\u0234+\u0001\u0000\u0000\u0000\u0235"+ - "\u0236\u0007\u0002\u0000\u0000\u0236\u0237\u0007\u0005\u0000\u0000\u0237"+ - "\u0238\u0007\f\u0000\u0000\u0238\u0239\u0007\u0005\u0000\u0000\u0239\u023a"+ - "\u0007\u0002\u0000\u0000\u023a\u023b\u0001\u0000\u0000\u0000\u023b\u023c"+ - "\u0006\u000e\u0000\u0000\u023c-\u0001\u0000\u0000\u0000\u023d\u023e\u0007"+ - "\u0013\u0000\u0000\u023e\u023f\u0007\n\u0000\u0000\u023f\u0240\u0007\u0003"+ - "\u0000\u0000\u0240\u0241\u0007\u0006\u0000\u0000\u0241\u0242\u0007\u0003"+ - "\u0000\u0000\u0242\u0243\u0001\u0000\u0000\u0000\u0243\u0244\u0006\u000f"+ - "\u0000\u0000\u0244/\u0001\u0000\u0000\u0000\u0245\u0246\u0004\u0010\u0000"+ - "\u0000\u0246\u0247\u0007\u0001\u0000\u0000\u0247\u0248\u0007\t\u0000\u0000"+ - "\u0248\u0249\u0007\r\u0000\u0000\u0249\u024a\u0007\u0001\u0000\u0000\u024a"+ - "\u024b\u0007\t\u0000\u0000\u024b\u024c\u0007\u0003\u0000\u0000\u024c\u024d"+ - "\u0007\u0002\u0000\u0000\u024d\u024e\u0007\u0005\u0000\u0000\u024e\u024f"+ - "\u0007\f\u0000\u0000\u024f\u0250\u0007\u0005\u0000\u0000\u0250\u0251\u0007"+ - "\u0002\u0000\u0000\u0251\u0252\u0001\u0000\u0000\u0000\u0252\u0253\u0006"+ - "\u0010\u0000\u0000\u02531\u0001\u0000\u0000\u0000\u0254\u0255\u0004\u0011"+ - "\u0001\u0000\u0255\u0256\u0007\r\u0000\u0000\u0256\u0257\u0007\u0007\u0000"+ - "\u0000\u0257\u0258\u0007\u0007\u0000\u0000\u0258\u0259\u0007\u0012\u0000"+ - "\u0000\u0259\u025a\u0007\u0014\u0000\u0000\u025a\u025b\u0007\b\u0000\u0000"+ - "\u025b\u025c\u0005_\u0000\u0000\u025c\u025d\u0005\u8001\uf414\u0000\u0000"+ - "\u025d\u025e\u0001\u0000\u0000\u0000\u025e\u025f\u0006\u0011\b\u0000\u025f"+ - "3\u0001\u0000\u0000\u0000\u0260\u0261\u0004\u0012\u0002\u0000\u0261\u0262"+ - "\u0007\u0010\u0000\u0000\u0262\u0263\u0007\u0003\u0000\u0000\u0263\u0264"+ - "\u0007\u0005\u0000\u0000\u0264\u0265\u0007\u0006\u0000\u0000\u0265\u0266"+ - "\u0007\u0001\u0000\u0000\u0266\u0267\u0007\u0004\u0000\u0000\u0267\u0268"+ - "\u0007\u0002\u0000\u0000\u0268\u0269\u0001\u0000\u0000\u0000\u0269\u026a"+ - "\u0006\u0012\t\u0000\u026a5\u0001\u0000\u0000\u0000\u026b\u026c\u0004"+ - "\u0013\u0003\u0000\u026c\u026d\u0007\u0015\u0000\u0000\u026d\u026e\u0007"+ - "\u0007\u0000\u0000\u026e\u026f\u0007\u0001\u0000\u0000\u026f\u0270\u0007"+ - "\t\u0000\u0000\u0270\u0271\u0001\u0000\u0000\u0000\u0271\u0272\u0006\u0013"+ - "\n\u0000\u02727\u0001\u0000\u0000\u0000\u0273\u0274\u0004\u0014\u0004"+ - "\u0000\u0274\u0275\u0007\u000f\u0000\u0000\u0275\u0276\u0007\u0014\u0000"+ - "\u0000\u0276\u0277\u0007\r\u0000\u0000\u0277\u0278\u0007\r\u0000\u0000"+ - "\u0278\u0279\u0001\u0000\u0000\u0000\u0279\u027a\u0006\u0014\n\u0000\u027a"+ - "9\u0001\u0000\u0000\u0000\u027b\u027c\u0004\u0015\u0005\u0000\u027c\u027d"+ - "\u0007\r\u0000\u0000\u027d\u027e\u0007\u0003\u0000\u0000\u027e\u027f\u0007"+ - "\u000f\u0000\u0000\u027f\u0280\u0007\u0005\u0000\u0000\u0280\u0281\u0001"+ - "\u0000\u0000\u0000\u0281\u0282\u0006\u0015\n\u0000\u0282;\u0001\u0000"+ - "\u0000\u0000\u0283\u0284\u0004\u0016\u0006\u0000\u0284\u0285\u0007\u0006"+ - "\u0000\u0000\u0285\u0286\u0007\u0001\u0000\u0000\u0286\u0287\u0007\u0011"+ - "\u0000\u0000\u0287\u0288\u0007\n\u0000\u0000\u0288\u0289\u0007\u0005\u0000"+ - "\u0000\u0289\u028a\u0001\u0000\u0000\u0000\u028a\u028b\u0006\u0016\n\u0000"+ - "\u028b=\u0001\u0000\u0000\u0000\u028c\u028d\u0004\u0017\u0007\u0000\u028d"+ - "\u028e\u0007\r\u0000\u0000\u028e\u028f\u0007\u0007\u0000\u0000\u028f\u0290"+ - "\u0007\u0007\u0000\u0000\u0290\u0291\u0007\u0012\u0000\u0000\u0291\u0292"+ - "\u0007\u0014\u0000\u0000\u0292\u0293\u0007\b\u0000\u0000\u0293\u0294\u0001"+ - "\u0000\u0000\u0000\u0294\u0295\u0006\u0017\n\u0000\u0295?\u0001\u0000"+ - "\u0000\u0000\u0296\u0298\b\u0016\u0000\u0000\u0297\u0296\u0001\u0000\u0000"+ - "\u0000\u0298\u0299\u0001\u0000\u0000\u0000\u0299\u0297\u0001\u0000\u0000"+ - "\u0000\u0299\u029a\u0001\u0000\u0000\u0000\u029a\u029b\u0001\u0000\u0000"+ - "\u0000\u029b\u029c\u0006\u0018\u0000\u0000\u029cA\u0001\u0000\u0000\u0000"+ - "\u029d\u029e\u0005/\u0000\u0000\u029e\u029f\u0005/\u0000\u0000\u029f\u02a3"+ - "\u0001\u0000\u0000\u0000\u02a0\u02a2\b\u0017\u0000\u0000\u02a1\u02a0\u0001"+ - "\u0000\u0000\u0000\u02a2\u02a5\u0001\u0000\u0000\u0000\u02a3\u02a1\u0001"+ - "\u0000\u0000\u0000\u02a3\u02a4\u0001\u0000\u0000\u0000\u02a4\u02a7\u0001"+ - "\u0000\u0000\u0000\u02a5\u02a3\u0001\u0000\u0000\u0000\u02a6\u02a8\u0005"+ - "\r\u0000\u0000\u02a7\u02a6\u0001\u0000\u0000\u0000\u02a7\u02a8\u0001\u0000"+ - "\u0000\u0000\u02a8\u02aa\u0001\u0000\u0000\u0000\u02a9\u02ab\u0005\n\u0000"+ - "\u0000\u02aa\u02a9\u0001\u0000\u0000\u0000\u02aa\u02ab\u0001\u0000\u0000"+ - "\u0000\u02ab\u02ac\u0001\u0000\u0000\u0000\u02ac\u02ad\u0006\u0019\u000b"+ - "\u0000\u02adC\u0001\u0000\u0000\u0000\u02ae\u02af\u0005/\u0000\u0000\u02af"+ - "\u02b0\u0005*\u0000\u0000\u02b0\u02b5\u0001\u0000\u0000\u0000\u02b1\u02b4"+ - "\u0003D\u001a\u0000\u02b2\u02b4\t\u0000\u0000\u0000\u02b3\u02b1\u0001"+ - "\u0000\u0000\u0000\u02b3\u02b2\u0001\u0000\u0000\u0000\u02b4\u02b7\u0001"+ - "\u0000\u0000\u0000\u02b5\u02b6\u0001\u0000\u0000\u0000\u02b5\u02b3\u0001"+ - "\u0000\u0000\u0000\u02b6\u02b8\u0001\u0000\u0000\u0000\u02b7\u02b5\u0001"+ - "\u0000\u0000\u0000\u02b8\u02b9\u0005*\u0000\u0000\u02b9\u02ba\u0005/\u0000"+ - "\u0000\u02ba\u02bb\u0001\u0000\u0000\u0000\u02bb\u02bc\u0006\u001a\u000b"+ - "\u0000\u02bcE\u0001\u0000\u0000\u0000\u02bd\u02bf\u0007\u0018\u0000\u0000"+ - "\u02be\u02bd\u0001\u0000\u0000\u0000\u02bf\u02c0\u0001\u0000\u0000\u0000"+ - "\u02c0\u02be\u0001\u0000\u0000\u0000\u02c0\u02c1\u0001\u0000\u0000\u0000"+ - "\u02c1\u02c2\u0001\u0000\u0000\u0000\u02c2\u02c3\u0006\u001b\u000b\u0000"+ - "\u02c3G\u0001\u0000\u0000\u0000\u02c4\u02c5\u0005|\u0000\u0000\u02c5\u02c6"+ - "\u0001\u0000\u0000\u0000\u02c6\u02c7\u0006\u001c\f\u0000\u02c7I\u0001"+ - "\u0000\u0000\u0000\u02c8\u02c9\u0007\u0019\u0000\u0000\u02c9K\u0001\u0000"+ - "\u0000\u0000\u02ca\u02cb\u0007\u001a\u0000\u0000\u02cbM\u0001\u0000\u0000"+ - "\u0000\u02cc\u02cd\u0005\\\u0000\u0000\u02cd\u02ce\u0007\u001b\u0000\u0000"+ - "\u02ceO\u0001\u0000\u0000\u0000\u02cf\u02d0\b\u001c\u0000\u0000\u02d0"+ - "Q\u0001\u0000\u0000\u0000\u02d1\u02d3\u0007\u0003\u0000\u0000\u02d2\u02d4"+ - "\u0007\u001d\u0000\u0000\u02d3\u02d2\u0001\u0000\u0000\u0000\u02d3\u02d4"+ - "\u0001\u0000\u0000\u0000\u02d4\u02d6\u0001\u0000\u0000\u0000\u02d5\u02d7"+ - "\u0003J\u001d\u0000\u02d6\u02d5\u0001\u0000\u0000\u0000\u02d7\u02d8\u0001"+ - "\u0000\u0000\u0000\u02d8\u02d6\u0001\u0000\u0000\u0000\u02d8\u02d9\u0001"+ - "\u0000\u0000\u0000\u02d9S\u0001\u0000\u0000\u0000\u02da\u02db\u0005@\u0000"+ - "\u0000\u02dbU\u0001\u0000\u0000\u0000\u02dc\u02dd\u0005`\u0000\u0000\u02dd"+ - "W\u0001\u0000\u0000\u0000\u02de\u02e2\b\u001e\u0000\u0000\u02df\u02e0"+ - "\u0005`\u0000\u0000\u02e0\u02e2\u0005`\u0000\u0000\u02e1\u02de\u0001\u0000"+ - "\u0000\u0000\u02e1\u02df\u0001\u0000\u0000\u0000\u02e2Y\u0001\u0000\u0000"+ - "\u0000\u02e3\u02e4\u0005_\u0000\u0000\u02e4[\u0001\u0000\u0000\u0000\u02e5"+ - "\u02e9\u0003L\u001e\u0000\u02e6\u02e9\u0003J\u001d\u0000\u02e7\u02e9\u0003"+ - "Z%\u0000\u02e8\u02e5\u0001\u0000\u0000\u0000\u02e8\u02e6\u0001\u0000\u0000"+ - "\u0000\u02e8\u02e7\u0001\u0000\u0000\u0000\u02e9]\u0001\u0000\u0000\u0000"+ - "\u02ea\u02ef\u0005\"\u0000\u0000\u02eb\u02ee\u0003N\u001f\u0000\u02ec"+ - "\u02ee\u0003P \u0000\u02ed\u02eb\u0001\u0000\u0000\u0000\u02ed\u02ec\u0001"+ - "\u0000\u0000\u0000\u02ee\u02f1\u0001\u0000\u0000\u0000\u02ef\u02ed\u0001"+ - "\u0000\u0000\u0000\u02ef\u02f0\u0001\u0000\u0000\u0000\u02f0\u02f2\u0001"+ - "\u0000\u0000\u0000\u02f1\u02ef\u0001\u0000\u0000\u0000\u02f2\u0308\u0005"+ - "\"\u0000\u0000\u02f3\u02f4\u0005\"\u0000\u0000\u02f4\u02f5\u0005\"\u0000"+ - "\u0000\u02f5\u02f6\u0005\"\u0000\u0000\u02f6\u02fa\u0001\u0000\u0000\u0000"+ - "\u02f7\u02f9\b\u0017\u0000\u0000\u02f8\u02f7\u0001\u0000\u0000\u0000\u02f9"+ - "\u02fc\u0001\u0000\u0000\u0000\u02fa\u02fb\u0001\u0000\u0000\u0000\u02fa"+ - "\u02f8\u0001\u0000\u0000\u0000\u02fb\u02fd\u0001\u0000\u0000\u0000\u02fc"+ - "\u02fa\u0001\u0000\u0000\u0000\u02fd\u02fe\u0005\"\u0000\u0000\u02fe\u02ff"+ - "\u0005\"\u0000\u0000\u02ff\u0300\u0005\"\u0000\u0000\u0300\u0302\u0001"+ - "\u0000\u0000\u0000\u0301\u0303\u0005\"\u0000\u0000\u0302\u0301\u0001\u0000"+ - "\u0000\u0000\u0302\u0303\u0001\u0000\u0000\u0000\u0303\u0305\u0001\u0000"+ - "\u0000\u0000\u0304\u0306\u0005\"\u0000\u0000\u0305\u0304\u0001\u0000\u0000"+ - "\u0000\u0305\u0306\u0001\u0000\u0000\u0000\u0306\u0308\u0001\u0000\u0000"+ - "\u0000\u0307\u02ea\u0001\u0000\u0000\u0000\u0307\u02f3\u0001\u0000\u0000"+ - "\u0000\u0308_\u0001\u0000\u0000\u0000\u0309\u030b\u0003J\u001d\u0000\u030a"+ - "\u0309\u0001\u0000\u0000\u0000\u030b\u030c\u0001\u0000\u0000\u0000\u030c"+ - "\u030a\u0001\u0000\u0000\u0000\u030c\u030d\u0001\u0000\u0000\u0000\u030d"+ - "a\u0001\u0000\u0000\u0000\u030e\u0310\u0003J\u001d\u0000\u030f\u030e\u0001"+ - "\u0000\u0000\u0000\u0310\u0311\u0001\u0000\u0000\u0000\u0311\u030f\u0001"+ - "\u0000\u0000\u0000\u0311\u0312\u0001\u0000\u0000\u0000\u0312\u0313\u0001"+ - "\u0000\u0000\u0000\u0313\u0317\u0003t2\u0000\u0314\u0316\u0003J\u001d"+ - "\u0000\u0315\u0314\u0001\u0000\u0000\u0000\u0316\u0319\u0001\u0000\u0000"+ - "\u0000\u0317\u0315\u0001\u0000\u0000\u0000\u0317\u0318\u0001\u0000\u0000"+ - "\u0000\u0318\u0339\u0001\u0000\u0000\u0000\u0319\u0317\u0001\u0000\u0000"+ - "\u0000\u031a\u031c\u0003t2\u0000\u031b\u031d\u0003J\u001d\u0000\u031c"+ - "\u031b\u0001\u0000\u0000\u0000\u031d\u031e\u0001\u0000\u0000\u0000\u031e"+ - "\u031c\u0001\u0000\u0000\u0000\u031e\u031f\u0001\u0000\u0000\u0000\u031f"+ - "\u0339\u0001\u0000\u0000\u0000\u0320\u0322\u0003J\u001d\u0000\u0321\u0320"+ - "\u0001\u0000\u0000\u0000\u0322\u0323\u0001\u0000\u0000\u0000\u0323\u0321"+ - "\u0001\u0000\u0000\u0000\u0323\u0324\u0001\u0000\u0000\u0000\u0324\u032c"+ - "\u0001\u0000\u0000\u0000\u0325\u0329\u0003t2\u0000\u0326\u0328\u0003J"+ - "\u001d\u0000\u0327\u0326\u0001\u0000\u0000\u0000\u0328\u032b\u0001\u0000"+ - "\u0000\u0000\u0329\u0327\u0001\u0000\u0000\u0000\u0329\u032a\u0001\u0000"+ - "\u0000\u0000\u032a\u032d\u0001\u0000\u0000\u0000\u032b\u0329\u0001\u0000"+ - "\u0000\u0000\u032c\u0325\u0001\u0000\u0000\u0000\u032c\u032d\u0001\u0000"+ - "\u0000\u0000\u032d\u032e\u0001\u0000\u0000\u0000\u032e\u032f\u0003R!\u0000"+ - "\u032f\u0339\u0001\u0000\u0000\u0000\u0330\u0332\u0003t2\u0000\u0331\u0333"+ - "\u0003J\u001d\u0000\u0332\u0331\u0001\u0000\u0000\u0000\u0333\u0334\u0001"+ - "\u0000\u0000\u0000\u0334\u0332\u0001\u0000\u0000\u0000\u0334\u0335\u0001"+ - "\u0000\u0000\u0000\u0335\u0336\u0001\u0000\u0000\u0000\u0336\u0337\u0003"+ - "R!\u0000\u0337\u0339\u0001\u0000\u0000\u0000\u0338\u030f\u0001\u0000\u0000"+ - "\u0000\u0338\u031a\u0001\u0000\u0000\u0000\u0338\u0321\u0001\u0000\u0000"+ - "\u0000\u0338\u0330\u0001\u0000\u0000\u0000\u0339c\u0001\u0000\u0000\u0000"+ - "\u033a\u033b\u0007\u001f\u0000\u0000\u033b\u033c\u0007 \u0000\u0000\u033c"+ - "e\u0001\u0000\u0000\u0000\u033d\u033e\u0007\f\u0000\u0000\u033e\u033f"+ - "\u0007\t\u0000\u0000\u033f\u0340\u0007\u0000\u0000\u0000\u0340g\u0001"+ - "\u0000\u0000\u0000\u0341\u0342\u0007\f\u0000\u0000\u0342\u0343\u0007\u0002"+ - "\u0000\u0000\u0343\u0344\u0007\u0004\u0000\u0000\u0344i\u0001\u0000\u0000"+ - "\u0000\u0345\u0346\u0005=\u0000\u0000\u0346k\u0001\u0000\u0000\u0000\u0347"+ - "\u0348\u0005:\u0000\u0000\u0348\u0349\u0005:\u0000\u0000\u0349m\u0001"+ - "\u0000\u0000\u0000\u034a\u034b\u0005:\u0000\u0000\u034bo\u0001\u0000\u0000"+ - "\u0000\u034c\u034d\u0005,\u0000\u0000\u034dq\u0001\u0000\u0000\u0000\u034e"+ - "\u034f\u0007\u0000\u0000\u0000\u034f\u0350\u0007\u0003\u0000\u0000\u0350"+ - "\u0351\u0007\u0002\u0000\u0000\u0351\u0352\u0007\u0004\u0000\u0000\u0352"+ - "s\u0001\u0000\u0000\u0000\u0353\u0354\u0005.\u0000\u0000\u0354u\u0001"+ - "\u0000\u0000\u0000\u0355\u0356\u0007\u000f\u0000\u0000\u0356\u0357\u0007"+ - "\f\u0000\u0000\u0357\u0358\u0007\r\u0000\u0000\u0358\u0359\u0007\u0002"+ - "\u0000\u0000\u0359\u035a\u0007\u0003\u0000\u0000\u035aw\u0001\u0000\u0000"+ - "\u0000\u035b\u035c\u0007\u000f\u0000\u0000\u035c\u035d\u0007\u0001\u0000"+ - "\u0000\u035d\u035e\u0007\u0006\u0000\u0000\u035e\u035f\u0007\u0002\u0000"+ - "\u0000\u035f\u0360\u0007\u0005\u0000\u0000\u0360y\u0001\u0000\u0000\u0000"+ - "\u0361\u0362\u0007\u0001\u0000\u0000\u0362\u0363\u0007\t\u0000\u0000\u0363"+ - "{\u0001\u0000\u0000\u0000\u0364\u0365\u0007\u0001\u0000\u0000\u0365\u0366"+ - "\u0007\u0002\u0000\u0000\u0366}\u0001\u0000\u0000\u0000\u0367\u0368\u0007"+ - "\r\u0000\u0000\u0368\u0369\u0007\f\u0000\u0000\u0369\u036a\u0007\u0002"+ - "\u0000\u0000\u036a\u036b\u0007\u0005\u0000\u0000\u036b\u007f\u0001\u0000"+ - "\u0000\u0000\u036c\u036d\u0007\r\u0000\u0000\u036d\u036e\u0007\u0001\u0000"+ - "\u0000\u036e\u036f\u0007\u0012\u0000\u0000\u036f\u0370\u0007\u0003\u0000"+ - "\u0000\u0370\u0081\u0001\u0000\u0000\u0000\u0371\u0372\u0005(\u0000\u0000"+ - "\u0372\u0083\u0001\u0000\u0000\u0000\u0373\u0374\u0007\t\u0000\u0000\u0374"+ - "\u0375\u0007\u0007\u0000\u0000\u0375\u0376\u0007\u0005\u0000\u0000\u0376"+ - "\u0085\u0001\u0000\u0000\u0000\u0377\u0378\u0007\t\u0000\u0000\u0378\u0379"+ - "\u0007\u0014\u0000\u0000\u0379\u037a\u0007\r\u0000\u0000\u037a\u037b\u0007"+ - "\r\u0000\u0000\u037b\u0087\u0001\u0000\u0000\u0000\u037c\u037d\u0007\t"+ - "\u0000\u0000\u037d\u037e\u0007\u0014\u0000\u0000\u037e\u037f\u0007\r\u0000"+ - "\u0000\u037f\u0380\u0007\r\u0000\u0000\u0380\u0381\u0007\u0002\u0000\u0000"+ - "\u0381\u0089\u0001\u0000\u0000\u0000\u0382\u0383\u0007\u0007\u0000\u0000"+ - "\u0383\u0384\u0007\u0006\u0000\u0000\u0384\u008b\u0001\u0000\u0000\u0000"+ - "\u0385\u0386\u0005?\u0000\u0000\u0386\u008d\u0001\u0000\u0000\u0000\u0387"+ - "\u0388\u0007\u0006\u0000\u0000\u0388\u0389\u0007\r\u0000\u0000\u0389\u038a"+ - "\u0007\u0001\u0000\u0000\u038a\u038b\u0007\u0012\u0000\u0000\u038b\u038c"+ - "\u0007\u0003\u0000\u0000\u038c\u008f\u0001\u0000\u0000\u0000\u038d\u038e"+ - "\u0005)\u0000\u0000\u038e\u0091\u0001\u0000\u0000\u0000\u038f\u0390\u0007"+ - "\u0005\u0000\u0000\u0390\u0391\u0007\u0006\u0000\u0000\u0391\u0392\u0007"+ - "\u0014\u0000\u0000\u0392\u0393\u0007\u0003\u0000\u0000\u0393\u0093\u0001"+ - "\u0000\u0000\u0000\u0394\u0395\u0005=\u0000\u0000\u0395\u0396\u0005=\u0000"+ - "\u0000\u0396\u0095\u0001\u0000\u0000\u0000\u0397\u0398\u0005=\u0000\u0000"+ - "\u0398\u0399\u0005~\u0000\u0000\u0399\u0097\u0001\u0000\u0000\u0000\u039a"+ - "\u039b\u0005!\u0000\u0000\u039b\u039c\u0005=\u0000\u0000\u039c\u0099\u0001"+ - "\u0000\u0000\u0000\u039d\u039e\u0005<\u0000\u0000\u039e\u009b\u0001\u0000"+ - "\u0000\u0000\u039f\u03a0\u0005<\u0000\u0000\u03a0\u03a1\u0005=\u0000\u0000"+ - "\u03a1\u009d\u0001\u0000\u0000\u0000\u03a2\u03a3\u0005>\u0000\u0000\u03a3"+ - "\u009f\u0001\u0000\u0000\u0000\u03a4\u03a5\u0005>\u0000\u0000\u03a5\u03a6"+ - "\u0005=\u0000\u0000\u03a6\u00a1\u0001\u0000\u0000\u0000\u03a7\u03a8\u0005"+ - "+\u0000\u0000\u03a8\u00a3\u0001\u0000\u0000\u0000\u03a9\u03aa\u0005-\u0000"+ - "\u0000\u03aa\u00a5\u0001\u0000\u0000\u0000\u03ab\u03ac\u0005*\u0000\u0000"+ - "\u03ac\u00a7\u0001\u0000\u0000\u0000\u03ad\u03ae\u0005/\u0000\u0000\u03ae"+ - "\u00a9\u0001\u0000\u0000\u0000\u03af\u03b0\u0005%\u0000\u0000\u03b0\u00ab"+ - "\u0001\u0000\u0000\u0000\u03b1\u03b2\u0005{\u0000\u0000\u03b2\u00ad\u0001"+ - "\u0000\u0000\u0000\u03b3\u03b4\u0005}\u0000\u0000\u03b4\u00af\u0001\u0000"+ - "\u0000\u0000\u03b5\u03b6\u0003.\u000f\u0000\u03b6\u03b7\u0001\u0000\u0000"+ - "\u0000\u03b7\u03b8\u0006P\r\u0000\u03b8\u00b1\u0001\u0000\u0000\u0000"+ - "\u03b9\u03bc\u0003\u008c>\u0000\u03ba\u03bd\u0003L\u001e\u0000\u03bb\u03bd"+ - "\u0003Z%\u0000\u03bc\u03ba\u0001\u0000\u0000\u0000\u03bc\u03bb\u0001\u0000"+ - "\u0000\u0000\u03bd\u03c1\u0001\u0000\u0000\u0000\u03be\u03c0\u0003\\&"+ - "\u0000\u03bf\u03be\u0001\u0000\u0000\u0000\u03c0\u03c3\u0001\u0000\u0000"+ - "\u0000\u03c1\u03bf\u0001\u0000\u0000\u0000\u03c1\u03c2\u0001\u0000\u0000"+ - "\u0000\u03c2\u03cb\u0001\u0000\u0000\u0000\u03c3\u03c1\u0001\u0000\u0000"+ - "\u0000\u03c4\u03c6\u0003\u008c>\u0000\u03c5\u03c7\u0003J\u001d\u0000\u03c6"+ - "\u03c5\u0001\u0000\u0000\u0000\u03c7\u03c8\u0001\u0000\u0000\u0000\u03c8"+ - "\u03c6\u0001\u0000\u0000\u0000\u03c8\u03c9\u0001\u0000\u0000\u0000\u03c9"+ - "\u03cb\u0001\u0000\u0000\u0000\u03ca\u03b9\u0001\u0000\u0000\u0000\u03ca"+ - "\u03c4\u0001\u0000\u0000\u0000\u03cb\u00b3\u0001\u0000\u0000\u0000\u03cc"+ - "\u03cd\u0005[\u0000\u0000\u03cd\u03ce\u0001\u0000\u0000\u0000\u03ce\u03cf"+ - "\u0006R\u0000\u0000\u03cf\u03d0\u0006R\u0000\u0000\u03d0\u00b5\u0001\u0000"+ - "\u0000\u0000\u03d1\u03d2\u0005]\u0000\u0000\u03d2\u03d3\u0001\u0000\u0000"+ - "\u0000\u03d3\u03d4\u0006S\f\u0000\u03d4\u03d5\u0006S\f\u0000\u03d5\u00b7"+ - "\u0001\u0000\u0000\u0000\u03d6\u03da\u0003L\u001e\u0000\u03d7\u03d9\u0003"+ - "\\&\u0000\u03d8\u03d7\u0001\u0000\u0000\u0000\u03d9\u03dc\u0001\u0000"+ - "\u0000\u0000\u03da\u03d8\u0001\u0000\u0000\u0000\u03da\u03db\u0001\u0000"+ - "\u0000\u0000\u03db\u03e7\u0001\u0000\u0000\u0000\u03dc\u03da\u0001\u0000"+ - "\u0000\u0000\u03dd\u03e0\u0003Z%\u0000\u03de\u03e0\u0003T\"\u0000\u03df"+ - "\u03dd\u0001\u0000\u0000\u0000\u03df\u03de\u0001\u0000\u0000\u0000\u03e0"+ - "\u03e2\u0001\u0000\u0000\u0000\u03e1\u03e3\u0003\\&\u0000\u03e2\u03e1"+ - "\u0001\u0000\u0000\u0000\u03e3\u03e4\u0001\u0000\u0000\u0000\u03e4\u03e2"+ - "\u0001\u0000\u0000\u0000\u03e4\u03e5\u0001\u0000\u0000\u0000\u03e5\u03e7"+ - "\u0001\u0000\u0000\u0000\u03e6\u03d6\u0001\u0000\u0000\u0000\u03e6\u03df"+ - "\u0001\u0000\u0000\u0000\u03e7\u00b9\u0001\u0000\u0000\u0000\u03e8\u03ea"+ - "\u0003V#\u0000\u03e9\u03eb\u0003X$\u0000\u03ea\u03e9\u0001\u0000\u0000"+ - "\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec\u03ea\u0001\u0000\u0000"+ - "\u0000\u03ec\u03ed\u0001\u0000\u0000\u0000\u03ed\u03ee\u0001\u0000\u0000"+ - "\u0000\u03ee\u03ef\u0003V#\u0000\u03ef\u00bb\u0001\u0000\u0000\u0000\u03f0"+ - "\u03f1\u0003\u00baU\u0000\u03f1\u00bd\u0001\u0000\u0000\u0000\u03f2\u03f3"+ - "\u0003B\u0019\u0000\u03f3\u03f4\u0001\u0000\u0000\u0000\u03f4\u03f5\u0006"+ - "W\u000b\u0000\u03f5\u00bf\u0001\u0000\u0000\u0000\u03f6\u03f7\u0003D\u001a"+ - "\u0000\u03f7\u03f8\u0001\u0000\u0000\u0000\u03f8\u03f9\u0006X\u000b\u0000"+ - "\u03f9\u00c1\u0001\u0000\u0000\u0000\u03fa\u03fb\u0003F\u001b\u0000\u03fb"+ - "\u03fc\u0001\u0000\u0000\u0000\u03fc\u03fd\u0006Y\u000b\u0000\u03fd\u00c3"+ - "\u0001\u0000\u0000\u0000\u03fe\u03ff\u0003\u00b4R\u0000\u03ff\u0400\u0001"+ - "\u0000\u0000\u0000\u0400\u0401\u0006Z\u000e\u0000\u0401\u0402\u0006Z\u000f"+ - "\u0000\u0402\u00c5\u0001\u0000\u0000\u0000\u0403\u0404\u0003H\u001c\u0000"+ - "\u0404\u0405\u0001\u0000\u0000\u0000\u0405\u0406\u0006[\u0010\u0000\u0406"+ - "\u0407\u0006[\f\u0000\u0407\u00c7\u0001\u0000\u0000\u0000\u0408\u0409"+ - "\u0003F\u001b\u0000\u0409\u040a\u0001\u0000\u0000\u0000\u040a\u040b\u0006"+ - "\\\u000b\u0000\u040b\u00c9\u0001\u0000\u0000\u0000\u040c\u040d\u0003B"+ - "\u0019\u0000\u040d\u040e\u0001\u0000\u0000\u0000\u040e\u040f\u0006]\u000b"+ - "\u0000\u040f\u00cb\u0001\u0000\u0000\u0000\u0410\u0411\u0003D\u001a\u0000"+ - "\u0411\u0412\u0001\u0000\u0000\u0000\u0412\u0413\u0006^\u000b\u0000\u0413"+ - "\u00cd\u0001\u0000\u0000\u0000\u0414\u0415\u0003H\u001c\u0000\u0415\u0416"+ - "\u0001\u0000\u0000\u0000\u0416\u0417\u0006_\u0010\u0000\u0417\u0418\u0006"+ - "_\f\u0000\u0418\u00cf\u0001\u0000\u0000\u0000\u0419\u041a\u0003\u00b4"+ - "R\u0000\u041a\u041b\u0001\u0000\u0000\u0000\u041b\u041c\u0006`\u000e\u0000"+ - "\u041c\u00d1\u0001\u0000\u0000\u0000\u041d\u041e\u0003\u00b6S\u0000\u041e"+ - "\u041f\u0001\u0000\u0000\u0000\u041f\u0420\u0006a\u0011\u0000\u0420\u00d3"+ - "\u0001\u0000\u0000\u0000\u0421\u0422\u0003n/\u0000\u0422\u0423\u0001\u0000"+ - "\u0000\u0000\u0423\u0424\u0006b\u0012\u0000\u0424\u00d5\u0001\u0000\u0000"+ - "\u0000\u0425\u0426\u0003p0\u0000\u0426\u0427\u0001\u0000\u0000\u0000\u0427"+ - "\u0428\u0006c\u0013\u0000\u0428\u00d7\u0001\u0000\u0000\u0000\u0429\u042a"+ - "\u0003j-\u0000\u042a\u042b\u0001\u0000\u0000\u0000\u042b\u042c\u0006d"+ - "\u0014\u0000\u042c\u00d9\u0001\u0000\u0000\u0000\u042d\u042e\u0007\u0010"+ - "\u0000\u0000\u042e\u042f\u0007\u0003\u0000\u0000\u042f\u0430\u0007\u0005"+ - "\u0000\u0000\u0430\u0431\u0007\f\u0000\u0000\u0431\u0432\u0007\u0000\u0000"+ - "\u0000\u0432\u0433\u0007\f\u0000\u0000\u0433\u0434\u0007\u0005\u0000\u0000"+ - "\u0434\u0435\u0007\f\u0000\u0000\u0435\u00db\u0001\u0000\u0000\u0000\u0436"+ - "\u043a\b!\u0000\u0000\u0437\u0438\u0005/\u0000\u0000\u0438\u043a\b\"\u0000"+ - "\u0000\u0439\u0436\u0001\u0000\u0000\u0000\u0439\u0437\u0001\u0000\u0000"+ - "\u0000\u043a\u00dd\u0001\u0000\u0000\u0000\u043b\u043d\u0003\u00dcf\u0000"+ - "\u043c\u043b\u0001\u0000\u0000\u0000\u043d\u043e\u0001\u0000\u0000\u0000"+ - "\u043e\u043c\u0001\u0000\u0000\u0000\u043e\u043f\u0001\u0000\u0000\u0000"+ - "\u043f\u00df\u0001\u0000\u0000\u0000\u0440\u0441\u0003\u00deg\u0000\u0441"+ - "\u0442\u0001\u0000\u0000\u0000\u0442\u0443\u0006h\u0015\u0000\u0443\u00e1"+ - "\u0001\u0000\u0000\u0000\u0444\u0445\u0003^\'\u0000\u0445\u0446\u0001"+ - "\u0000\u0000\u0000\u0446\u0447\u0006i\u0016\u0000\u0447\u00e3\u0001\u0000"+ - "\u0000\u0000\u0448\u0449\u0003B\u0019\u0000\u0449\u044a\u0001\u0000\u0000"+ - "\u0000\u044a\u044b\u0006j\u000b\u0000\u044b\u00e5\u0001\u0000\u0000\u0000"+ - "\u044c\u044d\u0003D\u001a\u0000\u044d\u044e\u0001\u0000\u0000\u0000\u044e"+ - "\u044f\u0006k\u000b\u0000\u044f\u00e7\u0001\u0000\u0000\u0000\u0450\u0451"+ - "\u0003F\u001b\u0000\u0451\u0452\u0001\u0000\u0000\u0000\u0452\u0453\u0006"+ - "l\u000b\u0000\u0453\u00e9\u0001\u0000\u0000\u0000\u0454\u0455\u0003H\u001c"+ - "\u0000\u0455\u0456\u0001\u0000\u0000\u0000\u0456\u0457\u0006m\u0010\u0000"+ - "\u0457\u0458\u0006m\f\u0000\u0458\u00eb\u0001\u0000\u0000\u0000\u0459"+ - "\u045a\u0003t2\u0000\u045a\u045b\u0001\u0000\u0000\u0000\u045b\u045c\u0006"+ - "n\u0017\u0000\u045c\u00ed\u0001\u0000\u0000\u0000\u045d\u045e\u0003p0"+ - "\u0000\u045e\u045f\u0001\u0000\u0000\u0000\u045f\u0460\u0006o\u0013\u0000"+ - "\u0460\u00ef\u0001\u0000\u0000\u0000\u0461\u0462\u0004p\b\u0000\u0462"+ - "\u0463\u0003\u008c>\u0000\u0463\u0464\u0001\u0000\u0000\u0000\u0464\u0465"+ - "\u0006p\u0018\u0000\u0465\u00f1\u0001\u0000\u0000\u0000\u0466\u0467\u0004"+ - "q\t\u0000\u0467\u0468\u0003\u00b2Q\u0000\u0468\u0469\u0001\u0000\u0000"+ - "\u0000\u0469\u046a\u0006q\u0019\u0000\u046a\u00f3\u0001\u0000\u0000\u0000"+ - "\u046b\u0470\u0003L\u001e\u0000\u046c\u0470\u0003J\u001d\u0000\u046d\u0470"+ - "\u0003Z%\u0000\u046e\u0470\u0003\u00a6K\u0000\u046f\u046b\u0001\u0000"+ - "\u0000\u0000\u046f\u046c\u0001\u0000\u0000\u0000\u046f\u046d\u0001\u0000"+ - "\u0000\u0000\u046f\u046e\u0001\u0000\u0000\u0000\u0470\u00f5\u0001\u0000"+ - "\u0000\u0000\u0471\u0474\u0003L\u001e\u0000\u0472\u0474\u0003\u00a6K\u0000"+ - "\u0473\u0471\u0001\u0000\u0000\u0000\u0473\u0472\u0001\u0000\u0000\u0000"+ - "\u0474\u0478\u0001\u0000\u0000\u0000\u0475\u0477\u0003\u00f4r\u0000\u0476"+ - "\u0475\u0001\u0000\u0000\u0000\u0477\u047a\u0001\u0000\u0000\u0000\u0478"+ - "\u0476\u0001\u0000\u0000\u0000\u0478\u0479\u0001\u0000\u0000\u0000\u0479"+ - "\u0485\u0001\u0000\u0000\u0000\u047a\u0478\u0001\u0000\u0000\u0000\u047b"+ - "\u047e\u0003Z%\u0000\u047c\u047e\u0003T\"\u0000\u047d\u047b\u0001\u0000"+ - "\u0000\u0000\u047d\u047c\u0001\u0000\u0000\u0000\u047e\u0480\u0001\u0000"+ - "\u0000\u0000\u047f\u0481\u0003\u00f4r\u0000\u0480\u047f\u0001\u0000\u0000"+ - "\u0000\u0481\u0482\u0001\u0000\u0000\u0000\u0482\u0480\u0001\u0000\u0000"+ - "\u0000\u0482\u0483\u0001\u0000\u0000\u0000\u0483\u0485\u0001\u0000\u0000"+ - "\u0000\u0484\u0473\u0001\u0000\u0000\u0000\u0484\u047d\u0001\u0000\u0000"+ - "\u0000\u0485\u00f7\u0001\u0000\u0000\u0000\u0486\u0489\u0003\u00f6s\u0000"+ - "\u0487\u0489\u0003\u00baU\u0000\u0488\u0486\u0001\u0000\u0000\u0000\u0488"+ - "\u0487\u0001\u0000\u0000\u0000\u0489\u048a\u0001\u0000\u0000\u0000\u048a"+ - "\u0488\u0001\u0000\u0000\u0000\u048a\u048b\u0001\u0000\u0000\u0000\u048b"+ - "\u00f9\u0001\u0000\u0000\u0000\u048c\u048d\u0003B\u0019\u0000\u048d\u048e"+ - "\u0001\u0000\u0000\u0000\u048e\u048f\u0006u\u000b\u0000\u048f\u00fb\u0001"+ - "\u0000\u0000\u0000\u0490\u0491\u0003D\u001a\u0000\u0491\u0492\u0001\u0000"+ - "\u0000\u0000\u0492\u0493\u0006v\u000b\u0000\u0493\u00fd\u0001\u0000\u0000"+ - "\u0000\u0494\u0495\u0003F\u001b\u0000\u0495\u0496\u0001\u0000\u0000\u0000"+ - "\u0496\u0497\u0006w\u000b\u0000\u0497\u00ff\u0001\u0000\u0000\u0000\u0498"+ - "\u0499\u0003H\u001c\u0000\u0499\u049a\u0001\u0000\u0000\u0000\u049a\u049b"+ - "\u0006x\u0010\u0000\u049b\u049c\u0006x\f\u0000\u049c\u0101\u0001\u0000"+ - "\u0000\u0000\u049d\u049e\u0003j-\u0000\u049e\u049f\u0001\u0000\u0000\u0000"+ - "\u049f\u04a0\u0006y\u0014\u0000\u04a0\u0103\u0001\u0000\u0000\u0000\u04a1"+ - "\u04a2\u0003p0\u0000\u04a2\u04a3\u0001\u0000\u0000\u0000\u04a3\u04a4\u0006"+ - "z\u0013\u0000\u04a4\u0105\u0001\u0000\u0000\u0000\u04a5\u04a6\u0003t2"+ - "\u0000\u04a6\u04a7\u0001\u0000\u0000\u0000\u04a7\u04a8\u0006{\u0017\u0000"+ - "\u04a8\u0107\u0001\u0000\u0000\u0000\u04a9\u04aa\u0004|\n\u0000\u04aa"+ - "\u04ab\u0003\u008c>\u0000\u04ab\u04ac\u0001\u0000\u0000\u0000\u04ac\u04ad"+ - "\u0006|\u0018\u0000\u04ad\u0109\u0001\u0000\u0000\u0000\u04ae\u04af\u0004"+ - "}\u000b\u0000\u04af\u04b0\u0003\u00b2Q\u0000\u04b0\u04b1\u0001\u0000\u0000"+ - "\u0000\u04b1\u04b2\u0006}\u0019\u0000\u04b2\u010b\u0001\u0000\u0000\u0000"+ - "\u04b3\u04b4\u0007\f\u0000\u0000\u04b4\u04b5\u0007\u0002\u0000\u0000\u04b5"+ - "\u010d\u0001\u0000\u0000\u0000\u04b6\u04b7\u0003\u00f8t\u0000\u04b7\u04b8"+ - "\u0001\u0000\u0000\u0000\u04b8\u04b9\u0006\u007f\u001a\u0000\u04b9\u010f"+ - "\u0001\u0000\u0000\u0000\u04ba\u04bb\u0003B\u0019\u0000\u04bb\u04bc\u0001"+ - "\u0000\u0000\u0000\u04bc\u04bd\u0006\u0080\u000b\u0000\u04bd\u0111\u0001"+ - "\u0000\u0000\u0000\u04be\u04bf\u0003D\u001a\u0000\u04bf\u04c0\u0001\u0000"+ - "\u0000\u0000\u04c0\u04c1\u0006\u0081\u000b\u0000\u04c1\u0113\u0001\u0000"+ - "\u0000\u0000\u04c2\u04c3\u0003F\u001b\u0000\u04c3\u04c4\u0001\u0000\u0000"+ - "\u0000\u04c4\u04c5\u0006\u0082\u000b\u0000\u04c5\u0115\u0001\u0000\u0000"+ - "\u0000\u04c6\u04c7\u0003H\u001c\u0000\u04c7\u04c8\u0001\u0000\u0000\u0000"+ - "\u04c8\u04c9\u0006\u0083\u0010\u0000\u04c9\u04ca\u0006\u0083\f\u0000\u04ca"+ - "\u0117\u0001\u0000\u0000\u0000\u04cb\u04cc\u0003\u00b4R\u0000\u04cc\u04cd"+ - "\u0001\u0000\u0000\u0000\u04cd\u04ce\u0006\u0084\u000e\u0000\u04ce\u04cf"+ - "\u0006\u0084\u001b\u0000\u04cf\u0119\u0001\u0000\u0000\u0000\u04d0\u04d1"+ - "\u0007\u0007\u0000\u0000\u04d1\u04d2\u0007\t\u0000\u0000\u04d2\u04d3\u0001"+ - "\u0000\u0000\u0000\u04d3\u04d4\u0006\u0085\u001c\u0000\u04d4\u011b\u0001"+ - "\u0000\u0000\u0000\u04d5\u04d6\u0007\u0013\u0000\u0000\u04d6\u04d7\u0007"+ - "\u0001\u0000\u0000\u04d7\u04d8\u0007\u0005\u0000\u0000\u04d8\u04d9\u0007"+ - "\n\u0000\u0000\u04d9\u04da\u0001\u0000\u0000\u0000\u04da\u04db\u0006\u0086"+ - "\u001c\u0000\u04db\u011d\u0001\u0000\u0000\u0000\u04dc\u04dd\b#\u0000"+ - "\u0000\u04dd\u011f\u0001\u0000\u0000\u0000\u04de\u04e0\u0003\u011e\u0087"+ - "\u0000\u04df\u04de\u0001\u0000\u0000\u0000\u04e0\u04e1\u0001\u0000\u0000"+ - "\u0000\u04e1\u04df\u0001\u0000\u0000\u0000\u04e1\u04e2\u0001\u0000\u0000"+ - "\u0000\u04e2\u04e3\u0001\u0000\u0000\u0000\u04e3\u04e4\u0003n/\u0000\u04e4"+ - "\u04e6\u0001\u0000\u0000\u0000\u04e5\u04df\u0001\u0000\u0000\u0000\u04e5"+ - "\u04e6\u0001\u0000\u0000\u0000\u04e6\u04e8\u0001\u0000\u0000\u0000\u04e7"+ - "\u04e9\u0003\u011e\u0087\u0000\u04e8\u04e7\u0001\u0000\u0000\u0000\u04e9"+ - "\u04ea\u0001\u0000\u0000\u0000\u04ea\u04e8\u0001\u0000\u0000\u0000\u04ea"+ - "\u04eb\u0001\u0000\u0000\u0000\u04eb\u0121\u0001\u0000\u0000\u0000\u04ec"+ - "\u04ed\u0003\u0120\u0088\u0000\u04ed\u04ee\u0001\u0000\u0000\u0000\u04ee"+ - "\u04ef\u0006\u0089\u001d\u0000\u04ef\u0123\u0001\u0000\u0000\u0000\u04f0"+ - "\u04f1\u0003B\u0019\u0000\u04f1\u04f2\u0001\u0000\u0000\u0000\u04f2\u04f3"+ - "\u0006\u008a\u000b\u0000\u04f3\u0125\u0001\u0000\u0000\u0000\u04f4\u04f5"+ - "\u0003D\u001a\u0000\u04f5\u04f6\u0001\u0000\u0000\u0000\u04f6\u04f7\u0006"+ - "\u008b\u000b\u0000\u04f7\u0127\u0001\u0000\u0000\u0000\u04f8\u04f9\u0003"+ - "F\u001b\u0000\u04f9\u04fa\u0001\u0000\u0000\u0000\u04fa\u04fb\u0006\u008c"+ - "\u000b\u0000\u04fb\u0129\u0001\u0000\u0000\u0000\u04fc\u04fd\u0003H\u001c"+ - "\u0000\u04fd\u04fe\u0001\u0000\u0000\u0000\u04fe\u04ff\u0006\u008d\u0010"+ - "\u0000\u04ff\u0500\u0006\u008d\f\u0000\u0500\u0501\u0006\u008d\f\u0000"+ - "\u0501\u012b\u0001\u0000\u0000\u0000\u0502\u0503\u0003j-\u0000\u0503\u0504"+ - "\u0001\u0000\u0000\u0000\u0504\u0505\u0006\u008e\u0014\u0000\u0505\u012d"+ - "\u0001\u0000\u0000\u0000\u0506\u0507\u0003p0\u0000\u0507\u0508\u0001\u0000"+ - "\u0000\u0000\u0508\u0509\u0006\u008f\u0013\u0000\u0509\u012f\u0001\u0000"+ - "\u0000\u0000\u050a\u050b\u0003t2\u0000\u050b\u050c\u0001\u0000\u0000\u0000"+ - "\u050c\u050d\u0006\u0090\u0017\u0000\u050d\u0131\u0001\u0000\u0000\u0000"+ - "\u050e\u050f\u0003\u011c\u0086\u0000\u050f\u0510\u0001\u0000\u0000\u0000"+ - "\u0510\u0511\u0006\u0091\u001e\u0000\u0511\u0133\u0001\u0000\u0000\u0000"+ - "\u0512\u0513\u0003\u00f8t\u0000\u0513\u0514\u0001\u0000\u0000\u0000\u0514"+ - "\u0515\u0006\u0092\u001a\u0000\u0515\u0135\u0001\u0000\u0000\u0000\u0516"+ - "\u0517\u0003\u00bcV\u0000\u0517\u0518\u0001\u0000\u0000\u0000\u0518\u0519"+ - "\u0006\u0093\u001f\u0000\u0519\u0137\u0001\u0000\u0000\u0000\u051a\u051b"+ - "\u0004\u0094\f\u0000\u051b\u051c\u0003\u008c>\u0000\u051c\u051d\u0001"+ - "\u0000\u0000\u0000\u051d\u051e\u0006\u0094\u0018\u0000\u051e\u0139\u0001"+ - "\u0000\u0000\u0000\u051f\u0520\u0004\u0095\r\u0000\u0520\u0521\u0003\u00b2"+ - "Q\u0000\u0521\u0522\u0001\u0000\u0000\u0000\u0522\u0523\u0006\u0095\u0019"+ - "\u0000\u0523\u013b\u0001\u0000\u0000\u0000\u0524\u0525\u0003B\u0019\u0000"+ - "\u0525\u0526\u0001\u0000\u0000\u0000\u0526\u0527\u0006\u0096\u000b\u0000"+ - "\u0527\u013d\u0001\u0000\u0000\u0000\u0528\u0529\u0003D\u001a\u0000\u0529"+ - "\u052a\u0001\u0000\u0000\u0000\u052a\u052b\u0006\u0097\u000b\u0000\u052b"+ - "\u013f\u0001\u0000\u0000\u0000\u052c\u052d\u0003F\u001b\u0000\u052d\u052e"+ - "\u0001\u0000\u0000\u0000\u052e\u052f\u0006\u0098\u000b\u0000\u052f\u0141"+ - "\u0001\u0000\u0000\u0000\u0530\u0531\u0003H\u001c\u0000\u0531\u0532\u0001"+ - "\u0000\u0000\u0000\u0532\u0533\u0006\u0099\u0010\u0000\u0533\u0534\u0006"+ - "\u0099\f\u0000\u0534\u0143\u0001\u0000\u0000\u0000\u0535\u0536\u0003t"+ - "2\u0000\u0536\u0537\u0001\u0000\u0000\u0000\u0537\u0538\u0006\u009a\u0017"+ - "\u0000\u0538\u0145\u0001\u0000\u0000\u0000\u0539\u053a\u0004\u009b\u000e"+ - "\u0000\u053a\u053b\u0003\u008c>\u0000\u053b\u053c\u0001\u0000\u0000\u0000"+ - "\u053c\u053d\u0006\u009b\u0018\u0000\u053d\u0147\u0001\u0000\u0000\u0000"+ - "\u053e\u053f\u0004\u009c\u000f\u0000\u053f\u0540\u0003\u00b2Q\u0000\u0540"+ - "\u0541\u0001\u0000\u0000\u0000\u0541\u0542\u0006\u009c\u0019\u0000\u0542"+ - "\u0149\u0001\u0000\u0000\u0000\u0543\u0544\u0003\u00bcV\u0000\u0544\u0545"+ - "\u0001\u0000\u0000\u0000\u0545\u0546\u0006\u009d\u001f\u0000\u0546\u014b"+ - "\u0001\u0000\u0000\u0000\u0547\u0548\u0003\u00b8T\u0000\u0548\u0549\u0001"+ - "\u0000\u0000\u0000\u0549\u054a\u0006\u009e \u0000\u054a\u014d\u0001\u0000"+ - "\u0000\u0000\u054b\u054c\u0003B\u0019\u0000\u054c\u054d\u0001\u0000\u0000"+ - "\u0000\u054d\u054e\u0006\u009f\u000b\u0000\u054e\u014f\u0001\u0000\u0000"+ - "\u0000\u054f\u0550\u0003D\u001a\u0000\u0550\u0551\u0001\u0000\u0000\u0000"+ - "\u0551\u0552\u0006\u00a0\u000b\u0000\u0552\u0151\u0001\u0000\u0000\u0000"+ - "\u0553\u0554\u0003F\u001b\u0000\u0554\u0555\u0001\u0000\u0000\u0000\u0555"+ - "\u0556\u0006\u00a1\u000b\u0000\u0556\u0153\u0001\u0000\u0000\u0000\u0557"+ - "\u0558\u0003H\u001c\u0000\u0558\u0559\u0001\u0000\u0000\u0000\u0559\u055a"+ - "\u0006\u00a2\u0010\u0000\u055a\u055b\u0006\u00a2\f\u0000\u055b\u0155\u0001"+ - "\u0000\u0000\u0000\u055c\u055d\u0007\u0001\u0000\u0000\u055d\u055e\u0007"+ - "\t\u0000\u0000\u055e\u055f\u0007\u000f\u0000\u0000\u055f\u0560\u0007\u0007"+ - "\u0000\u0000\u0560\u0157\u0001\u0000\u0000\u0000\u0561\u0562\u0003B\u0019"+ - "\u0000\u0562\u0563\u0001\u0000\u0000\u0000\u0563\u0564\u0006\u00a4\u000b"+ - "\u0000\u0564\u0159\u0001\u0000\u0000\u0000\u0565\u0566\u0003D\u001a\u0000"+ - "\u0566\u0567\u0001\u0000\u0000\u0000\u0567\u0568\u0006\u00a5\u000b\u0000"+ - "\u0568\u015b\u0001\u0000\u0000\u0000\u0569\u056a\u0003F\u001b\u0000\u056a"+ - "\u056b\u0001\u0000\u0000\u0000\u056b\u056c\u0006\u00a6\u000b\u0000\u056c"+ - "\u015d\u0001\u0000\u0000\u0000\u056d\u056e\u0003\u00b6S\u0000\u056e\u056f"+ - "\u0001\u0000\u0000\u0000\u056f\u0570\u0006\u00a7\u0011\u0000\u0570\u0571"+ - "\u0006\u00a7\f\u0000\u0571\u015f\u0001\u0000\u0000\u0000\u0572\u0573\u0003"+ - "n/\u0000\u0573\u0574\u0001\u0000\u0000\u0000\u0574\u0575\u0006\u00a8\u0012"+ - "\u0000\u0575\u0161\u0001\u0000\u0000\u0000\u0576\u057c\u0003T\"\u0000"+ - "\u0577\u057c\u0003J\u001d\u0000\u0578\u057c\u0003t2\u0000\u0579\u057c"+ - "\u0003L\u001e\u0000\u057a\u057c\u0003Z%\u0000\u057b\u0576\u0001\u0000"+ - "\u0000\u0000\u057b\u0577\u0001\u0000\u0000\u0000\u057b\u0578\u0001\u0000"+ - "\u0000\u0000\u057b\u0579\u0001\u0000\u0000\u0000\u057b\u057a\u0001\u0000"+ - "\u0000\u0000\u057c\u057d\u0001\u0000\u0000\u0000\u057d\u057b\u0001\u0000"+ - "\u0000\u0000\u057d\u057e\u0001\u0000\u0000\u0000\u057e\u0163\u0001\u0000"+ - "\u0000\u0000\u057f\u0580\u0003B\u0019\u0000\u0580\u0581\u0001\u0000\u0000"+ - "\u0000\u0581\u0582\u0006\u00aa\u000b\u0000\u0582\u0165\u0001\u0000\u0000"+ - "\u0000\u0583\u0584\u0003D\u001a\u0000\u0584\u0585\u0001\u0000\u0000\u0000"+ - "\u0585\u0586\u0006\u00ab\u000b\u0000\u0586\u0167\u0001\u0000\u0000\u0000"+ - "\u0587\u0588\u0003F\u001b\u0000\u0588\u0589\u0001\u0000\u0000\u0000\u0589"+ - "\u058a\u0006\u00ac\u000b\u0000\u058a\u0169\u0001\u0000\u0000\u0000\u058b"+ - "\u058c\u0003H\u001c\u0000\u058c\u058d\u0001\u0000\u0000\u0000\u058d\u058e"+ - "\u0006\u00ad\u0010\u0000\u058e\u058f\u0006\u00ad\f\u0000\u058f\u016b\u0001"+ - "\u0000\u0000\u0000\u0590\u0591\u0003n/\u0000\u0591\u0592\u0001\u0000\u0000"+ - "\u0000\u0592\u0593\u0006\u00ae\u0012\u0000\u0593\u016d\u0001\u0000\u0000"+ - "\u0000\u0594\u0595\u0003p0\u0000\u0595\u0596\u0001\u0000\u0000\u0000\u0596"+ - "\u0597\u0006\u00af\u0013\u0000\u0597\u016f\u0001\u0000\u0000\u0000\u0598"+ - "\u0599\u0003t2\u0000\u0599\u059a\u0001\u0000\u0000\u0000\u059a\u059b\u0006"+ - "\u00b0\u0017\u0000\u059b\u0171\u0001\u0000\u0000\u0000\u059c\u059d\u0003"+ - "\u011a\u0085\u0000\u059d\u059e\u0001\u0000\u0000\u0000\u059e\u059f\u0006"+ - "\u00b1!\u0000\u059f\u05a0\u0006\u00b1\"\u0000\u05a0\u0173\u0001\u0000"+ - "\u0000\u0000\u05a1\u05a2\u0003\u00deg\u0000\u05a2\u05a3\u0001\u0000\u0000"+ - "\u0000\u05a3\u05a4\u0006\u00b2\u0015\u0000\u05a4\u0175\u0001\u0000\u0000"+ - "\u0000\u05a5\u05a6\u0003^\'\u0000\u05a6\u05a7\u0001\u0000\u0000\u0000"+ - "\u05a7\u05a8\u0006\u00b3\u0016\u0000\u05a8\u0177\u0001\u0000\u0000\u0000"+ - "\u05a9\u05aa\u0003B\u0019\u0000\u05aa\u05ab\u0001\u0000\u0000\u0000\u05ab"+ - "\u05ac\u0006\u00b4\u000b\u0000\u05ac\u0179\u0001\u0000\u0000\u0000\u05ad"+ - "\u05ae\u0003D\u001a\u0000\u05ae\u05af\u0001\u0000\u0000\u0000\u05af\u05b0"+ - "\u0006\u00b5\u000b\u0000\u05b0\u017b\u0001\u0000\u0000\u0000\u05b1\u05b2"+ - "\u0003F\u001b\u0000\u05b2\u05b3\u0001\u0000\u0000\u0000\u05b3\u05b4\u0006"+ - "\u00b6\u000b\u0000\u05b4\u017d\u0001\u0000\u0000\u0000\u05b5\u05b6\u0003"+ - "H\u001c\u0000\u05b6\u05b7\u0001\u0000\u0000\u0000\u05b7\u05b8\u0006\u00b7"+ - "\u0010\u0000\u05b8\u05b9\u0006\u00b7\f\u0000\u05b9\u05ba\u0006\u00b7\f"+ - "\u0000\u05ba\u017f\u0001\u0000\u0000\u0000\u05bb\u05bc\u0003p0\u0000\u05bc"+ - "\u05bd\u0001\u0000\u0000\u0000\u05bd\u05be\u0006\u00b8\u0013\u0000\u05be"+ - "\u0181\u0001\u0000\u0000\u0000\u05bf\u05c0\u0003t2\u0000\u05c0\u05c1\u0001"+ - "\u0000\u0000\u0000\u05c1\u05c2\u0006\u00b9\u0017\u0000\u05c2\u0183\u0001"+ - "\u0000\u0000\u0000\u05c3\u05c4\u0003\u00f8t\u0000\u05c4\u05c5\u0001\u0000"+ - "\u0000\u0000\u05c5\u05c6\u0006\u00ba\u001a\u0000\u05c6\u0185\u0001\u0000"+ - "\u0000\u0000\u05c7\u05c8\u0003B\u0019\u0000\u05c8\u05c9\u0001\u0000\u0000"+ - "\u0000\u05c9\u05ca\u0006\u00bb\u000b\u0000\u05ca\u0187\u0001\u0000\u0000"+ - "\u0000\u05cb\u05cc\u0003D\u001a\u0000\u05cc\u05cd\u0001\u0000\u0000\u0000"+ - "\u05cd\u05ce\u0006\u00bc\u000b\u0000\u05ce\u0189\u0001\u0000\u0000\u0000"+ - "\u05cf\u05d0\u0003F\u001b\u0000\u05d0\u05d1\u0001\u0000\u0000\u0000\u05d1"+ - "\u05d2\u0006\u00bd\u000b\u0000\u05d2\u018b\u0001\u0000\u0000\u0000\u05d3"+ - "\u05d4\u0003H\u001c\u0000\u05d4\u05d5\u0001\u0000\u0000\u0000\u05d5\u05d6"+ - "\u0006\u00be\u0010\u0000\u05d6\u05d7\u0006\u00be\f\u0000\u05d7\u018d\u0001"+ - "\u0000\u0000\u0000\u05d8\u05d9\u00036\u0013\u0000\u05d9\u05da\u0001\u0000"+ - "\u0000\u0000\u05da\u05db\u0006\u00bf#\u0000\u05db\u018f\u0001\u0000\u0000"+ - "\u0000\u05dc\u05dd\u0003\u010c~\u0000\u05dd\u05de\u0001\u0000\u0000\u0000"+ - "\u05de\u05df\u0006\u00c0$\u0000\u05df\u0191\u0001\u0000\u0000\u0000\u05e0"+ - "\u05e1\u0003\u011a\u0085\u0000\u05e1\u05e2\u0001\u0000\u0000\u0000\u05e2"+ - "\u05e3\u0006\u00c1!\u0000\u05e3\u05e4\u0006\u00c1\f\u0000\u05e4\u05e5"+ - "\u0006\u00c1\u0000\u0000\u05e5\u0193\u0001\u0000\u0000\u0000\u05e6\u05e7"+ - "\u0007\u0014\u0000\u0000\u05e7\u05e8\u0007\u0002\u0000\u0000\u05e8\u05e9"+ - "\u0007\u0001\u0000\u0000\u05e9\u05ea\u0007\t\u0000\u0000\u05ea\u05eb\u0007"+ - "\u0011\u0000\u0000\u05eb\u05ec\u0001\u0000\u0000\u0000\u05ec\u05ed\u0006"+ - "\u00c2\f\u0000\u05ed\u05ee\u0006\u00c2\u0000\u0000\u05ee\u0195\u0001\u0000"+ - "\u0000\u0000\u05ef\u05f0\u0003\u00deg\u0000\u05f0\u05f1\u0001\u0000\u0000"+ - "\u0000\u05f1\u05f2\u0006\u00c3\u0015\u0000\u05f2\u0197\u0001\u0000\u0000"+ - "\u0000\u05f3\u05f4\u0003^\'\u0000\u05f4\u05f5\u0001\u0000\u0000\u0000"+ - "\u05f5\u05f6\u0006\u00c4\u0016\u0000\u05f6\u0199\u0001\u0000\u0000\u0000"+ - "\u05f7\u05f8\u0003n/\u0000\u05f8\u05f9\u0001\u0000\u0000\u0000\u05f9\u05fa"+ - "\u0006\u00c5\u0012\u0000\u05fa\u019b\u0001\u0000\u0000\u0000\u05fb\u05fc"+ - "\u0003\u00b8T\u0000\u05fc\u05fd\u0001\u0000\u0000\u0000\u05fd\u05fe\u0006"+ - "\u00c6 \u0000\u05fe\u019d\u0001\u0000\u0000\u0000\u05ff\u0600\u0003\u00bc"+ - "V\u0000\u0600\u0601\u0001\u0000\u0000\u0000\u0601\u0602\u0006\u00c7\u001f"+ - "\u0000\u0602\u019f\u0001\u0000\u0000\u0000\u0603\u0604\u0003B\u0019\u0000"+ - "\u0604\u0605\u0001\u0000\u0000\u0000\u0605\u0606\u0006\u00c8\u000b\u0000"+ - "\u0606\u01a1\u0001\u0000\u0000\u0000\u0607\u0608\u0003D\u001a\u0000\u0608"+ - "\u0609\u0001\u0000\u0000\u0000\u0609\u060a\u0006\u00c9\u000b\u0000\u060a"+ - "\u01a3\u0001\u0000\u0000\u0000\u060b\u060c\u0003F\u001b\u0000\u060c\u060d"+ - "\u0001\u0000\u0000\u0000\u060d\u060e\u0006\u00ca\u000b\u0000\u060e\u01a5"+ - "\u0001\u0000\u0000\u0000\u060f\u0610\u0003H\u001c\u0000\u0610\u0611\u0001"+ - "\u0000\u0000\u0000\u0611\u0612\u0006\u00cb\u0010\u0000\u0612\u0613\u0006"+ - "\u00cb\f\u0000\u0613\u01a7\u0001\u0000\u0000\u0000\u0614\u0615\u0003\u00de"+ - "g\u0000\u0615\u0616\u0001\u0000\u0000\u0000\u0616\u0617\u0006\u00cc\u0015"+ - "\u0000\u0617\u0618\u0006\u00cc\f\u0000\u0618\u0619\u0006\u00cc%\u0000"+ - "\u0619\u01a9\u0001\u0000\u0000\u0000\u061a\u061b\u0003^\'\u0000\u061b"+ - "\u061c\u0001\u0000\u0000\u0000\u061c\u061d\u0006\u00cd\u0016\u0000\u061d"+ - "\u061e\u0006\u00cd\f\u0000\u061e\u061f\u0006\u00cd%\u0000\u061f\u01ab"+ - "\u0001\u0000\u0000\u0000\u0620\u0621\u0003B\u0019\u0000\u0621\u0622\u0001"+ - "\u0000\u0000\u0000\u0622\u0623\u0006\u00ce\u000b\u0000\u0623\u01ad\u0001"+ - "\u0000\u0000\u0000\u0624\u0625\u0003D\u001a\u0000\u0625\u0626\u0001\u0000"+ - "\u0000\u0000\u0626\u0627\u0006\u00cf\u000b\u0000\u0627\u01af\u0001\u0000"+ - "\u0000\u0000\u0628\u0629\u0003F\u001b\u0000\u0629\u062a\u0001\u0000\u0000"+ - "\u0000\u062a\u062b\u0006\u00d0\u000b\u0000\u062b\u01b1\u0001\u0000\u0000"+ - "\u0000\u062c\u062d\u0003n/\u0000\u062d\u062e\u0001\u0000\u0000\u0000\u062e"+ - "\u062f\u0006\u00d1\u0012\u0000\u062f\u0630\u0006\u00d1\f\u0000\u0630\u0631"+ - "\u0006\u00d1\t\u0000\u0631\u01b3\u0001\u0000\u0000\u0000\u0632\u0633\u0003"+ - "p0\u0000\u0633\u0634\u0001\u0000\u0000\u0000\u0634\u0635\u0006\u00d2\u0013"+ - "\u0000\u0635\u0636\u0006\u00d2\f\u0000\u0636\u0637\u0006\u00d2\t\u0000"+ - "\u0637\u01b5\u0001\u0000\u0000\u0000\u0638\u0639\u0003B\u0019\u0000\u0639"+ - "\u063a\u0001\u0000\u0000\u0000\u063a\u063b\u0006\u00d3\u000b\u0000\u063b"+ - "\u01b7\u0001\u0000\u0000\u0000\u063c\u063d\u0003D\u001a\u0000\u063d\u063e"+ - "\u0001\u0000\u0000\u0000\u063e\u063f\u0006\u00d4\u000b\u0000\u063f\u01b9"+ - "\u0001\u0000\u0000\u0000\u0640\u0641\u0003F\u001b\u0000\u0641\u0642\u0001"+ - "\u0000\u0000\u0000\u0642\u0643\u0006\u00d5\u000b\u0000\u0643\u01bb\u0001"+ - "\u0000\u0000\u0000\u0644\u0645\u0003\u00bcV\u0000\u0645\u0646\u0001\u0000"+ - "\u0000\u0000\u0646\u0647\u0006\u00d6\f\u0000\u0647\u0648\u0006\u00d6\u0000"+ - "\u0000\u0648\u0649\u0006\u00d6\u001f\u0000\u0649\u01bd\u0001\u0000\u0000"+ - "\u0000\u064a\u064b\u0003\u00b8T\u0000\u064b\u064c\u0001\u0000\u0000\u0000"+ - "\u064c\u064d\u0006\u00d7\f\u0000\u064d\u064e\u0006\u00d7\u0000\u0000\u064e"+ - "\u064f\u0006\u00d7 \u0000\u064f\u01bf\u0001\u0000\u0000\u0000\u0650\u0651"+ - "\u0003d*\u0000\u0651\u0652\u0001\u0000\u0000\u0000\u0652\u0653\u0006\u00d8"+ - "\f\u0000\u0653\u0654\u0006\u00d8\u0000\u0000\u0654\u0655\u0006\u00d8&"+ - "\u0000\u0655\u01c1\u0001\u0000\u0000\u0000\u0656\u0657\u0003H\u001c\u0000"+ - "\u0657\u0658\u0001\u0000\u0000\u0000\u0658\u0659\u0006\u00d9\u0010\u0000"+ - "\u0659\u065a\u0006\u00d9\f\u0000\u065a\u01c3\u0001\u0000\u0000\u0000B"+ - "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e"+ - "\u000f\u0299\u02a3\u02a7\u02aa\u02b3\u02b5\u02c0\u02d3\u02d8\u02e1\u02e8"+ - "\u02ed\u02ef\u02fa\u0302\u0305\u0307\u030c\u0311\u0317\u031e\u0323\u0329"+ - "\u032c\u0334\u0338\u03bc\u03c1\u03c8\u03ca\u03da\u03df\u03e4\u03e6\u03ec"+ - "\u0439\u043e\u046f\u0473\u0478\u047d\u0482\u0484\u0488\u048a\u04e1\u04e5"+ - "\u04ea\u057b\u057d\'\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006\u0000"+ - "\u0005\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005"+ - "\t\u0000\u0005\u000b\u0000\u0005\u000e\u0000\u0005\r\u0000\u0000\u0001"+ - "\u0000\u0004\u0000\u0000\u0007\u0010\u0000\u0007H\u0000\u0005\u0000\u0000"+ - "\u0007\u001d\u0000\u0007I\u0000\u0007&\u0000\u0007\'\u0000\u0007$\u0000"+ - "\u0007S\u0000\u0007\u001e\u0000\u0007)\u0000\u00075\u0000\u0007G\u0000"+ - "\u0007W\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007a\u0000\u0007`\u0000"+ - "\u0007K\u0000\u0007J\u0000\u0007_\u0000\u0005\f\u0000\u0007\u0014\u0000"+ - "\u0007[\u0000\u0005\u000f\u0000\u0007!\u0000"; + "\u00d8\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0001"+ + "\u00da\u0001\u00da\u0001\u00da\u0001\u00da\u0001\u00da\u0001\u00db\u0001"+ + "\u00db\u0001\u00db\u0001\u00db\u0001\u00dc\u0001\u00dc\u0001\u00dc\u0001"+ + "\u00dc\u0001\u00dd\u0001\u00dd\u0001\u00dd\u0001\u00dd\u0001\u00de\u0001"+ + "\u00de\u0001\u00de\u0001\u00de\u0001\u00df\u0001\u00df\u0001\u00df\u0001"+ + "\u00df\u0001\u00e0\u0001\u00e0\u0001\u00e0\u0001\u00e0\u0001\u00e1\u0001"+ + "\u00e1\u0001\u00e1\u0001\u00e1\u0001\u00e2\u0001\u00e2\u0001\u00e2\u0001"+ + "\u00e2\u0001\u00e3\u0001\u00e3\u0001\u00e3\u0001\u00e3\u0002\u02d1\u0316"+ + "\u0000\u00e4\u0011\u0001\u0013\u0002\u0015\u0003\u0017\u0004\u0019\u0005"+ + "\u001b\u0006\u001d\u0007\u001f\b!\t#\n%\u000b\'\f)\r+\u000e-\u000f/\u0010"+ + "1\u00113\u00125\u00137\u00149\u0015;\u0016=\u0017?\u0018A\u0019C\u001a"+ + "E\u001bG\u001cI\u001dK\u0000M\u0000O\u0000Q\u0000S\u0000U\u0000W\u0000"+ + "Y\u0000[\u0000]\u0000_\u001ea\u001fc e!g\"i#k$m%o&q\'s(u)w*y+{,}-\u007f"+ + ".\u0081/\u00830\u00851\u00872\u00893\u008b4\u008d5\u008f6\u00917\u0093"+ + "8\u00959\u0097:\u0099;\u009b<\u009d=\u009f>\u00a1?\u00a3@\u00a5A\u00a7"+ + "B\u00a9C\u00abD\u00adE\u00afF\u00b1\u0000\u00b3G\u00b5H\u00b7I\u00b9J"+ + "\u00bb\u0000\u00bdK\u00bfL\u00c1M\u00c3N\u00c5\u0000\u00c7\u0000\u00c9"+ + "O\u00cbP\u00cdQ\u00cf\u0000\u00d1\u0000\u00d3\u0000\u00d5\u0000\u00d7"+ + "\u0000\u00d9\u0000\u00dbR\u00dd\u0000\u00dfS\u00e1\u0000\u00e3\u0000\u00e5"+ + "T\u00e7U\u00e9V\u00eb\u0000\u00ed\u0000\u00ef\u0000\u00f1\u0000\u00f3"+ + "\u0000\u00f5\u0000\u00f7\u0000\u00f9W\u00fbX\u00fdY\u00ffZ\u0101\u0000"+ + "\u0103\u0000\u0105\u0000\u0107\u0000\u0109\u0000\u010b\u0000\u010d[\u010f"+ + "\u0000\u0111\\\u0113]\u0115^\u0117\u0000\u0119\u0000\u011b_\u011d`\u011f"+ + "\u0000\u0121a\u0123\u0000\u0125b\u0127c\u0129d\u012b\u0000\u012d\u0000"+ + "\u012f\u0000\u0131\u0000\u0133\u0000\u0135\u0000\u0137\u0000\u0139\u0000"+ + "\u013b\u0000\u013de\u013ff\u0141g\u0143\u0000\u0145\u0000\u0147\u0000"+ + "\u0149\u0000\u014b\u0000\u014d\u0000\u014fh\u0151i\u0153j\u0155\u0000"+ + "\u0157k\u0159l\u015bm\u015dn\u015f\u0000\u0161\u0000\u0163o\u0165p\u0167"+ + "q\u0169r\u016b\u0000\u016d\u0000\u016f\u0000\u0171\u0000\u0173\u0000\u0175"+ + "\u0000\u0177\u0000\u0179s\u017bt\u017du\u017f\u0000\u0181\u0000\u0183"+ + "\u0000\u0185\u0000\u0187v\u0189w\u018bx\u018d\u0000\u018fy\u0191\u0000"+ + "\u0193\u0000\u0195z\u0197\u0000\u0199\u0000\u019b\u0000\u019d\u0000\u019f"+ + "\u0000\u01a1{\u01a3|\u01a5}\u01a7\u0000\u01a9\u0000\u01ab\u0000\u01ad"+ + "~\u01af\u007f\u01b1\u0080\u01b3\u0000\u01b5\u0000\u01b7\u0081\u01b9\u0082"+ + "\u01bb\u0083\u01bd\u0000\u01bf\u0000\u01c1\u0000\u01c3\u0000\u01c5\u0000"+ + "\u01c7\u0000\u01c9\u0000\u01cb\u0000\u01cd\u0000\u01cf\u0000\u01d1\u0000"+ + "\u01d3\u0084\u01d5\u0085\u01d7\u0086\u0011\u0000\u0001\u0002\u0003\u0004"+ + "\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010$\u0002\u0000DDdd"+ + "\u0002\u0000IIii\u0002\u0000SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002"+ + "\u0000TTtt\u0002\u0000RRrr\u0002\u0000OOoo\u0002\u0000PPpp\u0002\u0000"+ + "NNnn\u0002\u0000HHhh\u0002\u0000VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002"+ + "\u0000XXxx\u0002\u0000FFff\u0002\u0000MMmm\u0002\u0000GGgg\u0002\u0000"+ + "KKkk\u0002\u0000WWww\u0002\u0000UUuu\u0006\u0000\t\n\r\r //[[]]\u0002"+ + "\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002\u0000AZaz\b\u0000"+ + "\"\"NNRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001"+ + "\u0000``\u0002\u0000BBbb\u0002\u0000YYyy\u000b\u0000\t\n\r\r \"\",,/"+ + "/::==[[]]||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u0002"+ + "\u0000JJjj\u06b3\u0000\u0011\u0001\u0000\u0000\u0000\u0000\u0013\u0001"+ + "\u0000\u0000\u0000\u0000\u0015\u0001\u0000\u0000\u0000\u0000\u0017\u0001"+ + "\u0000\u0000\u0000\u0000\u0019\u0001\u0000\u0000\u0000\u0000\u001b\u0001"+ + "\u0000\u0000\u0000\u0000\u001d\u0001\u0000\u0000\u0000\u0000\u001f\u0001"+ + "\u0000\u0000\u0000\u0000!\u0001\u0000\u0000\u0000\u0000#\u0001\u0000\u0000"+ + "\u0000\u0000%\u0001\u0000\u0000\u0000\u0000\'\u0001\u0000\u0000\u0000"+ + "\u0000)\u0001\u0000\u0000\u0000\u0000+\u0001\u0000\u0000\u0000\u0000-"+ + "\u0001\u0000\u0000\u0000\u0000/\u0001\u0000\u0000\u0000\u00001\u0001\u0000"+ + "\u0000\u0000\u00003\u0001\u0000\u0000\u0000\u00005\u0001\u0000\u0000\u0000"+ + "\u00007\u0001\u0000\u0000\u0000\u00009\u0001\u0000\u0000\u0000\u0000;"+ + "\u0001\u0000\u0000\u0000\u0000=\u0001\u0000\u0000\u0000\u0000?\u0001\u0000"+ + "\u0000\u0000\u0000A\u0001\u0000\u0000\u0000\u0000C\u0001\u0000\u0000\u0000"+ + "\u0000E\u0001\u0000\u0000\u0000\u0000G\u0001\u0000\u0000\u0000\u0001I"+ + "\u0001\u0000\u0000\u0000\u0001_\u0001\u0000\u0000\u0000\u0001a\u0001\u0000"+ + "\u0000\u0000\u0001c\u0001\u0000\u0000\u0000\u0001e\u0001\u0000\u0000\u0000"+ + "\u0001g\u0001\u0000\u0000\u0000\u0001i\u0001\u0000\u0000\u0000\u0001k"+ + "\u0001\u0000\u0000\u0000\u0001m\u0001\u0000\u0000\u0000\u0001o\u0001\u0000"+ + "\u0000\u0000\u0001q\u0001\u0000\u0000\u0000\u0001s\u0001\u0000\u0000\u0000"+ + "\u0001u\u0001\u0000\u0000\u0000\u0001w\u0001\u0000\u0000\u0000\u0001y"+ + "\u0001\u0000\u0000\u0000\u0001{\u0001\u0000\u0000\u0000\u0001}\u0001\u0000"+ + "\u0000\u0000\u0001\u007f\u0001\u0000\u0000\u0000\u0001\u0081\u0001\u0000"+ + "\u0000\u0000\u0001\u0083\u0001\u0000\u0000\u0000\u0001\u0085\u0001\u0000"+ + "\u0000\u0000\u0001\u0087\u0001\u0000\u0000\u0000\u0001\u0089\u0001\u0000"+ + "\u0000\u0000\u0001\u008b\u0001\u0000\u0000\u0000\u0001\u008d\u0001\u0000"+ + "\u0000\u0000\u0001\u008f\u0001\u0000\u0000\u0000\u0001\u0091\u0001\u0000"+ + "\u0000\u0000\u0001\u0093\u0001\u0000\u0000\u0000\u0001\u0095\u0001\u0000"+ + "\u0000\u0000\u0001\u0097\u0001\u0000\u0000\u0000\u0001\u0099\u0001\u0000"+ + "\u0000\u0000\u0001\u009b\u0001\u0000\u0000\u0000\u0001\u009d\u0001\u0000"+ + "\u0000\u0000\u0001\u009f\u0001\u0000\u0000\u0000\u0001\u00a1\u0001\u0000"+ + "\u0000\u0000\u0001\u00a3\u0001\u0000\u0000\u0000\u0001\u00a5\u0001\u0000"+ + "\u0000\u0000\u0001\u00a7\u0001\u0000\u0000\u0000\u0001\u00a9\u0001\u0000"+ + "\u0000\u0000\u0001\u00ab\u0001\u0000\u0000\u0000\u0001\u00ad\u0001\u0000"+ + "\u0000\u0000\u0001\u00af\u0001\u0000\u0000\u0000\u0001\u00b1\u0001\u0000"+ + "\u0000\u0000\u0001\u00b3\u0001\u0000\u0000\u0000\u0001\u00b5\u0001\u0000"+ + "\u0000\u0000\u0001\u00b7\u0001\u0000\u0000\u0000\u0001\u00b9\u0001\u0000"+ + "\u0000\u0000\u0001\u00bd\u0001\u0000\u0000\u0000\u0001\u00bf\u0001\u0000"+ + "\u0000\u0000\u0001\u00c1\u0001\u0000\u0000\u0000\u0001\u00c3\u0001\u0000"+ + "\u0000\u0000\u0002\u00c5\u0001\u0000\u0000\u0000\u0002\u00c7\u0001\u0000"+ + "\u0000\u0000\u0002\u00c9\u0001\u0000\u0000\u0000\u0002\u00cb\u0001\u0000"+ + "\u0000\u0000\u0002\u00cd\u0001\u0000\u0000\u0000\u0003\u00cf\u0001\u0000"+ + "\u0000\u0000\u0003\u00d1\u0001\u0000\u0000\u0000\u0003\u00d3\u0001\u0000"+ + "\u0000\u0000\u0003\u00d5\u0001\u0000\u0000\u0000\u0003\u00d7\u0001\u0000"+ + "\u0000\u0000\u0003\u00d9\u0001\u0000\u0000\u0000\u0003\u00db\u0001\u0000"+ + "\u0000\u0000\u0003\u00df\u0001\u0000\u0000\u0000\u0003\u00e1\u0001\u0000"+ + "\u0000\u0000\u0003\u00e3\u0001\u0000\u0000\u0000\u0003\u00e5\u0001\u0000"+ + "\u0000\u0000\u0003\u00e7\u0001\u0000\u0000\u0000\u0003\u00e9\u0001\u0000"+ + "\u0000\u0000\u0004\u00eb\u0001\u0000\u0000\u0000\u0004\u00ed\u0001\u0000"+ + "\u0000\u0000\u0004\u00ef\u0001\u0000\u0000\u0000\u0004\u00f1\u0001\u0000"+ + "\u0000\u0000\u0004\u00f3\u0001\u0000\u0000\u0000\u0004\u00f9\u0001\u0000"+ + "\u0000\u0000\u0004\u00fb\u0001\u0000\u0000\u0000\u0004\u00fd\u0001\u0000"+ + "\u0000\u0000\u0004\u00ff\u0001\u0000\u0000\u0000\u0005\u0101\u0001\u0000"+ + "\u0000\u0000\u0005\u0103\u0001\u0000\u0000\u0000\u0005\u0105\u0001\u0000"+ + "\u0000\u0000\u0005\u0107\u0001\u0000\u0000\u0000\u0005\u0109\u0001\u0000"+ + "\u0000\u0000\u0005\u010b\u0001\u0000\u0000\u0000\u0005\u010d\u0001\u0000"+ + "\u0000\u0000\u0005\u010f\u0001\u0000\u0000\u0000\u0005\u0111\u0001\u0000"+ + "\u0000\u0000\u0005\u0113\u0001\u0000\u0000\u0000\u0005\u0115\u0001\u0000"+ + "\u0000\u0000\u0006\u0117\u0001\u0000\u0000\u0000\u0006\u0119\u0001\u0000"+ + "\u0000\u0000\u0006\u011b\u0001\u0000\u0000\u0000\u0006\u011d\u0001\u0000"+ + "\u0000\u0000\u0006\u0121\u0001\u0000\u0000\u0000\u0006\u0123\u0001\u0000"+ + "\u0000\u0000\u0006\u0125\u0001\u0000\u0000\u0000\u0006\u0127\u0001\u0000"+ + "\u0000\u0000\u0006\u0129\u0001\u0000\u0000\u0000\u0007\u012b\u0001\u0000"+ + "\u0000\u0000\u0007\u012d\u0001\u0000\u0000\u0000\u0007\u012f\u0001\u0000"+ + "\u0000\u0000\u0007\u0131\u0001\u0000\u0000\u0000\u0007\u0133\u0001\u0000"+ + "\u0000\u0000\u0007\u0135\u0001\u0000\u0000\u0000\u0007\u0137\u0001\u0000"+ + "\u0000\u0000\u0007\u0139\u0001\u0000\u0000\u0000\u0007\u013b\u0001\u0000"+ + "\u0000\u0000\u0007\u013d\u0001\u0000\u0000\u0000\u0007\u013f\u0001\u0000"+ + "\u0000\u0000\u0007\u0141\u0001\u0000\u0000\u0000\b\u0143\u0001\u0000\u0000"+ + "\u0000\b\u0145\u0001\u0000\u0000\u0000\b\u0147\u0001\u0000\u0000\u0000"+ + "\b\u0149\u0001\u0000\u0000\u0000\b\u014b\u0001\u0000\u0000\u0000\b\u014d"+ + "\u0001\u0000\u0000\u0000\b\u014f\u0001\u0000\u0000\u0000\b\u0151\u0001"+ + "\u0000\u0000\u0000\b\u0153\u0001\u0000\u0000\u0000\t\u0155\u0001\u0000"+ + "\u0000\u0000\t\u0157\u0001\u0000\u0000\u0000\t\u0159\u0001\u0000\u0000"+ + "\u0000\t\u015b\u0001\u0000\u0000\u0000\t\u015d\u0001\u0000\u0000\u0000"+ + "\n\u015f\u0001\u0000\u0000\u0000\n\u0161\u0001\u0000\u0000\u0000\n\u0163"+ + "\u0001\u0000\u0000\u0000\n\u0165\u0001\u0000\u0000\u0000\n\u0167\u0001"+ + "\u0000\u0000\u0000\n\u0169\u0001\u0000\u0000\u0000\u000b\u016b\u0001\u0000"+ + "\u0000\u0000\u000b\u016d\u0001\u0000\u0000\u0000\u000b\u016f\u0001\u0000"+ + "\u0000\u0000\u000b\u0171\u0001\u0000\u0000\u0000\u000b\u0173\u0001\u0000"+ + "\u0000\u0000\u000b\u0175\u0001\u0000\u0000\u0000\u000b\u0177\u0001\u0000"+ + "\u0000\u0000\u000b\u0179\u0001\u0000\u0000\u0000\u000b\u017b\u0001\u0000"+ + "\u0000\u0000\u000b\u017d\u0001\u0000\u0000\u0000\f\u017f\u0001\u0000\u0000"+ + "\u0000\f\u0181\u0001\u0000\u0000\u0000\f\u0183\u0001\u0000\u0000\u0000"+ + "\f\u0185\u0001\u0000\u0000\u0000\f\u0187\u0001\u0000\u0000\u0000\f\u0189"+ + "\u0001\u0000\u0000\u0000\f\u018b\u0001\u0000\u0000\u0000\r\u018d\u0001"+ + "\u0000\u0000\u0000\r\u018f\u0001\u0000\u0000\u0000\r\u0191\u0001\u0000"+ + "\u0000\u0000\r\u0193\u0001\u0000\u0000\u0000\r\u0195\u0001\u0000\u0000"+ + "\u0000\r\u0197\u0001\u0000\u0000\u0000\r\u0199\u0001\u0000\u0000\u0000"+ + "\r\u019b\u0001\u0000\u0000\u0000\r\u019d\u0001\u0000\u0000\u0000\r\u019f"+ + "\u0001\u0000\u0000\u0000\r\u01a1\u0001\u0000\u0000\u0000\r\u01a3\u0001"+ + "\u0000\u0000\u0000\r\u01a5\u0001\u0000\u0000\u0000\u000e\u01a7\u0001\u0000"+ + "\u0000\u0000\u000e\u01a9\u0001\u0000\u0000\u0000\u000e\u01ab\u0001\u0000"+ + "\u0000\u0000\u000e\u01ad\u0001\u0000\u0000\u0000\u000e\u01af\u0001\u0000"+ + "\u0000\u0000\u000e\u01b1\u0001\u0000\u0000\u0000\u000f\u01b3\u0001\u0000"+ + "\u0000\u0000\u000f\u01b5\u0001\u0000\u0000\u0000\u000f\u01b7\u0001\u0000"+ + "\u0000\u0000\u000f\u01b9\u0001\u0000\u0000\u0000\u000f\u01bb\u0001\u0000"+ + "\u0000\u0000\u000f\u01bd\u0001\u0000\u0000\u0000\u000f\u01bf\u0001\u0000"+ + "\u0000\u0000\u000f\u01c1\u0001\u0000\u0000\u0000\u000f\u01c3\u0001\u0000"+ + "\u0000\u0000\u0010\u01c5\u0001\u0000\u0000\u0000\u0010\u01c7\u0001\u0000"+ + "\u0000\u0000\u0010\u01c9\u0001\u0000\u0000\u0000\u0010\u01cb\u0001\u0000"+ + "\u0000\u0000\u0010\u01cd\u0001\u0000\u0000\u0000\u0010\u01cf\u0001\u0000"+ + "\u0000\u0000\u0010\u01d1\u0001\u0000\u0000\u0000\u0010\u01d3\u0001\u0000"+ + "\u0000\u0000\u0010\u01d5\u0001\u0000\u0000\u0000\u0010\u01d7\u0001\u0000"+ + "\u0000\u0000\u0011\u01d9\u0001\u0000\u0000\u0000\u0013\u01e3\u0001\u0000"+ + "\u0000\u0000\u0015\u01ea\u0001\u0000\u0000\u0000\u0017\u01f3\u0001\u0000"+ + "\u0000\u0000\u0019\u01fa\u0001\u0000\u0000\u0000\u001b\u0204\u0001\u0000"+ + "\u0000\u0000\u001d\u020b\u0001\u0000\u0000\u0000\u001f\u0212\u0001\u0000"+ + "\u0000\u0000!\u0219\u0001\u0000\u0000\u0000#\u0221\u0001\u0000\u0000\u0000"+ + "%\u022d\u0001\u0000\u0000\u0000\'\u0236\u0001\u0000\u0000\u0000)\u023c"+ + "\u0001\u0000\u0000\u0000+\u0243\u0001\u0000\u0000\u0000-\u024a\u0001\u0000"+ + "\u0000\u0000/\u0252\u0001\u0000\u0000\u00001\u025a\u0001\u0000\u0000\u0000"+ + "3\u0263\u0001\u0000\u0000\u00005\u0273\u0001\u0000\u0000\u00007\u0282"+ + "\u0001\u0000\u0000\u00009\u028e\u0001\u0000\u0000\u0000;\u0299\u0001\u0000"+ + "\u0000\u0000=\u02a1\u0001\u0000\u0000\u0000?\u02a9\u0001\u0000\u0000\u0000"+ + "A\u02b3\u0001\u0000\u0000\u0000C\u02b9\u0001\u0000\u0000\u0000E\u02ca"+ + "\u0001\u0000\u0000\u0000G\u02da\u0001\u0000\u0000\u0000I\u02e0\u0001\u0000"+ + "\u0000\u0000K\u02e4\u0001\u0000\u0000\u0000M\u02e6\u0001\u0000\u0000\u0000"+ + "O\u02e8\u0001\u0000\u0000\u0000Q\u02eb\u0001\u0000\u0000\u0000S\u02ed"+ + "\u0001\u0000\u0000\u0000U\u02f6\u0001\u0000\u0000\u0000W\u02f8\u0001\u0000"+ + "\u0000\u0000Y\u02fd\u0001\u0000\u0000\u0000[\u02ff\u0001\u0000\u0000\u0000"+ + "]\u0304\u0001\u0000\u0000\u0000_\u0323\u0001\u0000\u0000\u0000a\u0326"+ + "\u0001\u0000\u0000\u0000c\u0354\u0001\u0000\u0000\u0000e\u0356\u0001\u0000"+ + "\u0000\u0000g\u0359\u0001\u0000\u0000\u0000i\u035d\u0001\u0000\u0000\u0000"+ + "k\u0361\u0001\u0000\u0000\u0000m\u0363\u0001\u0000\u0000\u0000o\u0366"+ + "\u0001\u0000\u0000\u0000q\u0368\u0001\u0000\u0000\u0000s\u036a\u0001\u0000"+ + "\u0000\u0000u\u036f\u0001\u0000\u0000\u0000w\u0371\u0001\u0000\u0000\u0000"+ + "y\u0377\u0001\u0000\u0000\u0000{\u037d\u0001\u0000\u0000\u0000}\u0380"+ + "\u0001\u0000\u0000\u0000\u007f\u0383\u0001\u0000\u0000\u0000\u0081\u0388"+ + "\u0001\u0000\u0000\u0000\u0083\u038d\u0001\u0000\u0000\u0000\u0085\u038f"+ + "\u0001\u0000\u0000\u0000\u0087\u0393\u0001\u0000\u0000\u0000\u0089\u0398"+ + "\u0001\u0000\u0000\u0000\u008b\u039e\u0001\u0000\u0000\u0000\u008d\u03a1"+ + "\u0001\u0000\u0000\u0000\u008f\u03a3\u0001\u0000\u0000\u0000\u0091\u03a9"+ + "\u0001\u0000\u0000\u0000\u0093\u03ab\u0001\u0000\u0000\u0000\u0095\u03b0"+ + "\u0001\u0000\u0000\u0000\u0097\u03b3\u0001\u0000\u0000\u0000\u0099\u03b6"+ + "\u0001\u0000\u0000\u0000\u009b\u03b9\u0001\u0000\u0000\u0000\u009d\u03bb"+ + "\u0001\u0000\u0000\u0000\u009f\u03be\u0001\u0000\u0000\u0000\u00a1\u03c0"+ + "\u0001\u0000\u0000\u0000\u00a3\u03c3\u0001\u0000\u0000\u0000\u00a5\u03c5"+ + "\u0001\u0000\u0000\u0000\u00a7\u03c7\u0001\u0000\u0000\u0000\u00a9\u03c9"+ + "\u0001\u0000\u0000\u0000\u00ab\u03cb\u0001\u0000\u0000\u0000\u00ad\u03cd"+ + "\u0001\u0000\u0000\u0000\u00af\u03cf\u0001\u0000\u0000\u0000\u00b1\u03d1"+ + "\u0001\u0000\u0000\u0000\u00b3\u03e6\u0001\u0000\u0000\u0000\u00b5\u03e8"+ + "\u0001\u0000\u0000\u0000\u00b7\u03ed\u0001\u0000\u0000\u0000\u00b9\u0402"+ + "\u0001\u0000\u0000\u0000\u00bb\u0404\u0001\u0000\u0000\u0000\u00bd\u040c"+ + "\u0001\u0000\u0000\u0000\u00bf\u040e\u0001\u0000\u0000\u0000\u00c1\u0412"+ + "\u0001\u0000\u0000\u0000\u00c3\u0416\u0001\u0000\u0000\u0000\u00c5\u041a"+ + "\u0001\u0000\u0000\u0000\u00c7\u041f\u0001\u0000\u0000\u0000\u00c9\u0424"+ + "\u0001\u0000\u0000\u0000\u00cb\u0428\u0001\u0000\u0000\u0000\u00cd\u042c"+ + "\u0001\u0000\u0000\u0000\u00cf\u0430\u0001\u0000\u0000\u0000\u00d1\u0435"+ + "\u0001\u0000\u0000\u0000\u00d3\u0439\u0001\u0000\u0000\u0000\u00d5\u043d"+ + "\u0001\u0000\u0000\u0000\u00d7\u0441\u0001\u0000\u0000\u0000\u00d9\u0445"+ + "\u0001\u0000\u0000\u0000\u00db\u0449\u0001\u0000\u0000\u0000\u00dd\u0455"+ + "\u0001\u0000\u0000\u0000\u00df\u0458\u0001\u0000\u0000\u0000\u00e1\u045c"+ + "\u0001\u0000\u0000\u0000\u00e3\u0460\u0001\u0000\u0000\u0000\u00e5\u0464"+ + "\u0001\u0000\u0000\u0000\u00e7\u0468\u0001\u0000\u0000\u0000\u00e9\u046c"+ + "\u0001\u0000\u0000\u0000\u00eb\u0470\u0001\u0000\u0000\u0000\u00ed\u0475"+ + "\u0001\u0000\u0000\u0000\u00ef\u0479\u0001\u0000\u0000\u0000\u00f1\u047d"+ + "\u0001\u0000\u0000\u0000\u00f3\u0481\u0001\u0000\u0000\u0000\u00f5\u0489"+ + "\u0001\u0000\u0000\u0000\u00f7\u049e\u0001\u0000\u0000\u0000\u00f9\u04a2"+ + "\u0001\u0000\u0000\u0000\u00fb\u04a6\u0001\u0000\u0000\u0000\u00fd\u04aa"+ + "\u0001\u0000\u0000\u0000\u00ff\u04ae\u0001\u0000\u0000\u0000\u0101\u04b2"+ + "\u0001\u0000\u0000\u0000\u0103\u04b7\u0001\u0000\u0000\u0000\u0105\u04bb"+ + "\u0001\u0000\u0000\u0000\u0107\u04bf\u0001\u0000\u0000\u0000\u0109\u04c3"+ + "\u0001\u0000\u0000\u0000\u010b\u04c7\u0001\u0000\u0000\u0000\u010d\u04cb"+ + "\u0001\u0000\u0000\u0000\u010f\u04ce\u0001\u0000\u0000\u0000\u0111\u04d2"+ + "\u0001\u0000\u0000\u0000\u0113\u04d6\u0001\u0000\u0000\u0000\u0115\u04da"+ + "\u0001\u0000\u0000\u0000\u0117\u04de\u0001\u0000\u0000\u0000\u0119\u04e3"+ + "\u0001\u0000\u0000\u0000\u011b\u04e8\u0001\u0000\u0000\u0000\u011d\u04ed"+ + "\u0001\u0000\u0000\u0000\u011f\u04f4\u0001\u0000\u0000\u0000\u0121\u04fd"+ + "\u0001\u0000\u0000\u0000\u0123\u0504\u0001\u0000\u0000\u0000\u0125\u0508"+ + "\u0001\u0000\u0000\u0000\u0127\u050c\u0001\u0000\u0000\u0000\u0129\u0510"+ + "\u0001\u0000\u0000\u0000\u012b\u0514\u0001\u0000\u0000\u0000\u012d\u051a"+ + "\u0001\u0000\u0000\u0000\u012f\u051e\u0001\u0000\u0000\u0000\u0131\u0522"+ + "\u0001\u0000\u0000\u0000\u0133\u0526\u0001\u0000\u0000\u0000\u0135\u052a"+ + "\u0001\u0000\u0000\u0000\u0137\u052e\u0001\u0000\u0000\u0000\u0139\u0532"+ + "\u0001\u0000\u0000\u0000\u013b\u0536\u0001\u0000\u0000\u0000\u013d\u053a"+ + "\u0001\u0000\u0000\u0000\u013f\u053e\u0001\u0000\u0000\u0000\u0141\u0542"+ + "\u0001\u0000\u0000\u0000\u0143\u0546\u0001\u0000\u0000\u0000\u0145\u054b"+ + "\u0001\u0000\u0000\u0000\u0147\u054f\u0001\u0000\u0000\u0000\u0149\u0553"+ + "\u0001\u0000\u0000\u0000\u014b\u0557\u0001\u0000\u0000\u0000\u014d\u055b"+ + "\u0001\u0000\u0000\u0000\u014f\u055f\u0001\u0000\u0000\u0000\u0151\u0563"+ + "\u0001\u0000\u0000\u0000\u0153\u0567\u0001\u0000\u0000\u0000\u0155\u056b"+ + "\u0001\u0000\u0000\u0000\u0157\u0570\u0001\u0000\u0000\u0000\u0159\u0575"+ + "\u0001\u0000\u0000\u0000\u015b\u0579\u0001\u0000\u0000\u0000\u015d\u057d"+ + "\u0001\u0000\u0000\u0000\u015f\u0581\u0001\u0000\u0000\u0000\u0161\u0586"+ + "\u0001\u0000\u0000\u0000\u0163\u058f\u0001\u0000\u0000\u0000\u0165\u0593"+ + "\u0001\u0000\u0000\u0000\u0167\u0597\u0001\u0000\u0000\u0000\u0169\u059b"+ + "\u0001\u0000\u0000\u0000\u016b\u059f\u0001\u0000\u0000\u0000\u016d\u05a4"+ + "\u0001\u0000\u0000\u0000\u016f\u05a8\u0001\u0000\u0000\u0000\u0171\u05ac"+ + "\u0001\u0000\u0000\u0000\u0173\u05b0\u0001\u0000\u0000\u0000\u0175\u05b5"+ + "\u0001\u0000\u0000\u0000\u0177\u05b9\u0001\u0000\u0000\u0000\u0179\u05bd"+ + "\u0001\u0000\u0000\u0000\u017b\u05c1\u0001\u0000\u0000\u0000\u017d\u05c5"+ + "\u0001\u0000\u0000\u0000\u017f\u05c9\u0001\u0000\u0000\u0000\u0181\u05cf"+ + "\u0001\u0000\u0000\u0000\u0183\u05d3\u0001\u0000\u0000\u0000\u0185\u05d7"+ + "\u0001\u0000\u0000\u0000\u0187\u05db\u0001\u0000\u0000\u0000\u0189\u05df"+ + "\u0001\u0000\u0000\u0000\u018b\u05e3\u0001\u0000\u0000\u0000\u018d\u05e7"+ + "\u0001\u0000\u0000\u0000\u018f\u05ec\u0001\u0000\u0000\u0000\u0191\u05f1"+ + "\u0001\u0000\u0000\u0000\u0193\u05f5\u0001\u0000\u0000\u0000\u0195\u05fb"+ + "\u0001\u0000\u0000\u0000\u0197\u0604\u0001\u0000\u0000\u0000\u0199\u0608"+ + "\u0001\u0000\u0000\u0000\u019b\u060c\u0001\u0000\u0000\u0000\u019d\u0610"+ + "\u0001\u0000\u0000\u0000\u019f\u0614\u0001\u0000\u0000\u0000\u01a1\u0618"+ + "\u0001\u0000\u0000\u0000\u01a3\u061c\u0001\u0000\u0000\u0000\u01a5\u0620"+ + "\u0001\u0000\u0000\u0000\u01a7\u0624\u0001\u0000\u0000\u0000\u01a9\u0629"+ + "\u0001\u0000\u0000\u0000\u01ab\u062f\u0001\u0000\u0000\u0000\u01ad\u0635"+ + "\u0001\u0000\u0000\u0000\u01af\u0639\u0001\u0000\u0000\u0000\u01b1\u063d"+ + "\u0001\u0000\u0000\u0000\u01b3\u0641\u0001\u0000\u0000\u0000\u01b5\u0647"+ + "\u0001\u0000\u0000\u0000\u01b7\u064d\u0001\u0000\u0000\u0000\u01b9\u0651"+ + "\u0001\u0000\u0000\u0000\u01bb\u0655\u0001\u0000\u0000\u0000\u01bd\u0659"+ + "\u0001\u0000\u0000\u0000\u01bf\u065f\u0001\u0000\u0000\u0000\u01c1\u0665"+ + "\u0001\u0000\u0000\u0000\u01c3\u066b\u0001\u0000\u0000\u0000\u01c5\u0670"+ + "\u0001\u0000\u0000\u0000\u01c7\u0675\u0001\u0000\u0000\u0000\u01c9\u0679"+ + "\u0001\u0000\u0000\u0000\u01cb\u067d\u0001\u0000\u0000\u0000\u01cd\u0681"+ + "\u0001\u0000\u0000\u0000\u01cf\u0685\u0001\u0000\u0000\u0000\u01d1\u0689"+ + "\u0001\u0000\u0000\u0000\u01d3\u068d\u0001\u0000\u0000\u0000\u01d5\u0691"+ + "\u0001\u0000\u0000\u0000\u01d7\u0695\u0001\u0000\u0000\u0000\u01d9\u01da"+ + "\u0007\u0000\u0000\u0000\u01da\u01db\u0007\u0001\u0000\u0000\u01db\u01dc"+ + "\u0007\u0002\u0000\u0000\u01dc\u01dd\u0007\u0002\u0000\u0000\u01dd\u01de"+ + "\u0007\u0003\u0000\u0000\u01de\u01df\u0007\u0004\u0000\u0000\u01df\u01e0"+ + "\u0007\u0005\u0000\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000\u01e1\u01e2"+ + "\u0006\u0000\u0000\u0000\u01e2\u0012\u0001\u0000\u0000\u0000\u01e3\u01e4"+ + "\u0007\u0000\u0000\u0000\u01e4\u01e5\u0007\u0006\u0000\u0000\u01e5\u01e6"+ + "\u0007\u0007\u0000\u0000\u01e6\u01e7\u0007\b\u0000\u0000\u01e7\u01e8\u0001"+ + "\u0000\u0000\u0000\u01e8\u01e9\u0006\u0001\u0001\u0000\u01e9\u0014\u0001"+ + "\u0000\u0000\u0000\u01ea\u01eb\u0007\u0003\u0000\u0000\u01eb\u01ec\u0007"+ + "\t\u0000\u0000\u01ec\u01ed\u0007\u0006\u0000\u0000\u01ed\u01ee\u0007\u0001"+ + "\u0000\u0000\u01ee\u01ef\u0007\u0004\u0000\u0000\u01ef\u01f0\u0007\n\u0000"+ + "\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1\u01f2\u0006\u0002\u0002"+ + "\u0000\u01f2\u0016\u0001\u0000\u0000\u0000\u01f3\u01f4\u0007\u0003\u0000"+ + "\u0000\u01f4\u01f5\u0007\u000b\u0000\u0000\u01f5\u01f6\u0007\f\u0000\u0000"+ + "\u01f6\u01f7\u0007\r\u0000\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8"+ + "\u01f9\u0006\u0003\u0000\u0000\u01f9\u0018\u0001\u0000\u0000\u0000\u01fa"+ + "\u01fb\u0007\u0003\u0000\u0000\u01fb\u01fc\u0007\u000e\u0000\u0000\u01fc"+ + "\u01fd\u0007\b\u0000\u0000\u01fd\u01fe\u0007\r\u0000\u0000\u01fe\u01ff"+ + "\u0007\f\u0000\u0000\u01ff\u0200\u0007\u0001\u0000\u0000\u0200\u0201\u0007"+ + "\t\u0000\u0000\u0201\u0202\u0001\u0000\u0000\u0000\u0202\u0203\u0006\u0004"+ + "\u0003\u0000\u0203\u001a\u0001\u0000\u0000\u0000\u0204\u0205\u0007\u000f"+ + "\u0000\u0000\u0205\u0206\u0007\u0006\u0000\u0000\u0206\u0207\u0007\u0007"+ + "\u0000\u0000\u0207\u0208\u0007\u0010\u0000\u0000\u0208\u0209\u0001\u0000"+ + "\u0000\u0000\u0209\u020a\u0006\u0005\u0004\u0000\u020a\u001c\u0001\u0000"+ + "\u0000\u0000\u020b\u020c\u0007\u0011\u0000\u0000\u020c\u020d\u0007\u0006"+ + "\u0000\u0000\u020d\u020e\u0007\u0007\u0000\u0000\u020e\u020f\u0007\u0012"+ + "\u0000\u0000\u020f\u0210\u0001\u0000\u0000\u0000\u0210\u0211\u0006\u0006"+ + "\u0000\u0000\u0211\u001e\u0001\u0000\u0000\u0000\u0212\u0213\u0007\u0012"+ + "\u0000\u0000\u0213\u0214\u0007\u0003\u0000\u0000\u0214\u0215\u0007\u0003"+ + "\u0000\u0000\u0215\u0216\u0007\b\u0000\u0000\u0216\u0217\u0001\u0000\u0000"+ + "\u0000\u0217\u0218\u0006\u0007\u0001\u0000\u0218 \u0001\u0000\u0000\u0000"+ + "\u0219\u021a\u0007\r\u0000\u0000\u021a\u021b\u0007\u0001\u0000\u0000\u021b"+ + "\u021c\u0007\u0010\u0000\u0000\u021c\u021d\u0007\u0001\u0000\u0000\u021d"+ + "\u021e\u0007\u0005\u0000\u0000\u021e\u021f\u0001\u0000\u0000\u0000\u021f"+ + "\u0220\u0006\b\u0000\u0000\u0220\"\u0001\u0000\u0000\u0000\u0221\u0222"+ + "\u0007\u0010\u0000\u0000\u0222\u0223\u0007\u000b\u0000\u0000\u0223\u0224"+ + "\u0005_\u0000\u0000\u0224\u0225\u0007\u0003\u0000\u0000\u0225\u0226\u0007"+ + "\u000e\u0000\u0000\u0226\u0227\u0007\b\u0000\u0000\u0227\u0228\u0007\f"+ + "\u0000\u0000\u0228\u0229\u0007\t\u0000\u0000\u0229\u022a\u0007\u0000\u0000"+ + "\u0000\u022a\u022b\u0001\u0000\u0000\u0000\u022b\u022c\u0006\t\u0005\u0000"+ + "\u022c$\u0001\u0000\u0000\u0000\u022d\u022e\u0007\u0006\u0000\u0000\u022e"+ + "\u022f\u0007\u0003\u0000\u0000\u022f\u0230\u0007\t\u0000\u0000\u0230\u0231"+ + "\u0007\f\u0000\u0000\u0231\u0232\u0007\u0010\u0000\u0000\u0232\u0233\u0007"+ + "\u0003\u0000\u0000\u0233\u0234\u0001\u0000\u0000\u0000\u0234\u0235\u0006"+ + "\n\u0006\u0000\u0235&\u0001\u0000\u0000\u0000\u0236\u0237\u0007\u0006"+ + "\u0000\u0000\u0237\u0238\u0007\u0007\u0000\u0000\u0238\u0239\u0007\u0013"+ + "\u0000\u0000\u0239\u023a\u0001\u0000\u0000\u0000\u023a\u023b\u0006\u000b"+ + "\u0000\u0000\u023b(\u0001\u0000\u0000\u0000\u023c\u023d\u0007\u0002\u0000"+ + "\u0000\u023d\u023e\u0007\n\u0000\u0000\u023e\u023f\u0007\u0007\u0000\u0000"+ + "\u023f\u0240\u0007\u0013\u0000\u0000\u0240\u0241\u0001\u0000\u0000\u0000"+ + "\u0241\u0242\u0006\f\u0007\u0000\u0242*\u0001\u0000\u0000\u0000\u0243"+ + "\u0244\u0007\u0002\u0000\u0000\u0244\u0245\u0007\u0007\u0000\u0000\u0245"+ + "\u0246\u0007\u0006\u0000\u0000\u0246\u0247\u0007\u0005\u0000\u0000\u0247"+ + "\u0248\u0001\u0000\u0000\u0000\u0248\u0249\u0006\r\u0000\u0000\u0249,"+ + "\u0001\u0000\u0000\u0000\u024a\u024b\u0007\u0002\u0000\u0000\u024b\u024c"+ + "\u0007\u0005\u0000\u0000\u024c\u024d\u0007\f\u0000\u0000\u024d\u024e\u0007"+ + "\u0005\u0000\u0000\u024e\u024f\u0007\u0002\u0000\u0000\u024f\u0250\u0001"+ + "\u0000\u0000\u0000\u0250\u0251\u0006\u000e\u0000\u0000\u0251.\u0001\u0000"+ + "\u0000\u0000\u0252\u0253\u0007\u0013\u0000\u0000\u0253\u0254\u0007\n\u0000"+ + "\u0000\u0254\u0255\u0007\u0003\u0000\u0000\u0255\u0256\u0007\u0006\u0000"+ + "\u0000\u0256\u0257\u0007\u0003\u0000\u0000\u0257\u0258\u0001\u0000\u0000"+ + "\u0000\u0258\u0259\u0006\u000f\u0000\u0000\u02590\u0001\u0000\u0000\u0000"+ + "\u025a\u025b\u0007\r\u0000\u0000\u025b\u025c\u0007\u0007\u0000\u0000\u025c"+ + "\u025d\u0007\u0007\u0000\u0000\u025d\u025e\u0007\u0012\u0000\u0000\u025e"+ + "\u025f\u0007\u0014\u0000\u0000\u025f\u0260\u0007\b\u0000\u0000\u0260\u0261"+ + "\u0001\u0000\u0000\u0000\u0261\u0262\u0006\u0010\b\u0000\u02622\u0001"+ + "\u0000\u0000\u0000\u0263\u0264\u0004\u0011\u0000\u0000\u0264\u0265\u0007"+ + "\u0004\u0000\u0000\u0265\u0266\u0007\n\u0000\u0000\u0266\u0267\u0007\f"+ + "\u0000\u0000\u0267\u0268\u0007\t\u0000\u0000\u0268\u0269\u0007\u0011\u0000"+ + "\u0000\u0269\u026a\u0007\u0003\u0000\u0000\u026a\u026b\u0005_\u0000\u0000"+ + "\u026b\u026c\u0007\b\u0000\u0000\u026c\u026d\u0007\u0007\u0000\u0000\u026d"+ + "\u026e\u0007\u0001\u0000\u0000\u026e\u026f\u0007\t\u0000\u0000\u026f\u0270"+ + "\u0007\u0005\u0000\u0000\u0270\u0271\u0001\u0000\u0000\u0000\u0271\u0272"+ + "\u0006\u0011\t\u0000\u02724\u0001\u0000\u0000\u0000\u0273\u0274\u0004"+ + "\u0012\u0001\u0000\u0274\u0275\u0007\u0001\u0000\u0000\u0275\u0276\u0007"+ + "\t\u0000\u0000\u0276\u0277\u0007\r\u0000\u0000\u0277\u0278\u0007\u0001"+ + "\u0000\u0000\u0278\u0279\u0007\t\u0000\u0000\u0279\u027a\u0007\u0003\u0000"+ + "\u0000\u027a\u027b\u0007\u0002\u0000\u0000\u027b\u027c\u0007\u0005\u0000"+ + "\u0000\u027c\u027d\u0007\f\u0000\u0000\u027d\u027e\u0007\u0005\u0000\u0000"+ + "\u027e\u027f\u0007\u0002\u0000\u0000\u027f\u0280\u0001\u0000\u0000\u0000"+ + "\u0280\u0281\u0006\u0012\u0000\u0000\u02816\u0001\u0000\u0000\u0000\u0282"+ + "\u0283\u0004\u0013\u0002\u0000\u0283\u0284\u0007\r\u0000\u0000\u0284\u0285"+ + "\u0007\u0007\u0000\u0000\u0285\u0286\u0007\u0007\u0000\u0000\u0286\u0287"+ + "\u0007\u0012\u0000\u0000\u0287\u0288\u0007\u0014\u0000\u0000\u0288\u0289"+ + "\u0007\b\u0000\u0000\u0289\u028a\u0005_\u0000\u0000\u028a\u028b\u0005"+ + "\u8001\uf414\u0000\u0000\u028b\u028c\u0001\u0000\u0000\u0000\u028c\u028d"+ + "\u0006\u0013\n\u0000\u028d8\u0001\u0000\u0000\u0000\u028e\u028f\u0004"+ + "\u0014\u0003\u0000\u028f\u0290\u0007\u0010\u0000\u0000\u0290\u0291\u0007"+ + "\u0003\u0000\u0000\u0291\u0292\u0007\u0005\u0000\u0000\u0292\u0293\u0007"+ + "\u0006\u0000\u0000\u0293\u0294\u0007\u0001\u0000\u0000\u0294\u0295\u0007"+ + "\u0004\u0000\u0000\u0295\u0296\u0007\u0002\u0000\u0000\u0296\u0297\u0001"+ + "\u0000\u0000\u0000\u0297\u0298\u0006\u0014\u000b\u0000\u0298:\u0001\u0000"+ + "\u0000\u0000\u0299\u029a\u0004\u0015\u0004\u0000\u029a\u029b\u0007\u000f"+ + "\u0000\u0000\u029b\u029c\u0007\u0014\u0000\u0000\u029c\u029d\u0007\r\u0000"+ + "\u0000\u029d\u029e\u0007\r\u0000\u0000\u029e\u029f\u0001\u0000\u0000\u0000"+ + "\u029f\u02a0\u0006\u0015\b\u0000\u02a0<\u0001\u0000\u0000\u0000\u02a1"+ + "\u02a2\u0004\u0016\u0005\u0000\u02a2\u02a3\u0007\r\u0000\u0000\u02a3\u02a4"+ + "\u0007\u0003\u0000\u0000\u02a4\u02a5\u0007\u000f\u0000\u0000\u02a5\u02a6"+ + "\u0007\u0005\u0000\u0000\u02a6\u02a7\u0001\u0000\u0000\u0000\u02a7\u02a8"+ + "\u0006\u0016\b\u0000\u02a8>\u0001\u0000\u0000\u0000\u02a9\u02aa\u0004"+ + "\u0017\u0006\u0000\u02aa\u02ab\u0007\u0006\u0000\u0000\u02ab\u02ac\u0007"+ + "\u0001\u0000\u0000\u02ac\u02ad\u0007\u0011\u0000\u0000\u02ad\u02ae\u0007"+ + "\n\u0000\u0000\u02ae\u02af\u0007\u0005\u0000\u0000\u02af\u02b0\u0001\u0000"+ + "\u0000\u0000\u02b0\u02b1\u0006\u0017\b\u0000\u02b1@\u0001\u0000\u0000"+ + "\u0000\u02b2\u02b4\b\u0015\u0000\u0000\u02b3\u02b2\u0001\u0000\u0000\u0000"+ + "\u02b4\u02b5\u0001\u0000\u0000\u0000\u02b5\u02b3\u0001\u0000\u0000\u0000"+ + "\u02b5\u02b6\u0001\u0000\u0000\u0000\u02b6\u02b7\u0001\u0000\u0000\u0000"+ + "\u02b7\u02b8\u0006\u0018\u0000\u0000\u02b8B\u0001\u0000\u0000\u0000\u02b9"+ + "\u02ba\u0005/\u0000\u0000\u02ba\u02bb\u0005/\u0000\u0000\u02bb\u02bf\u0001"+ + "\u0000\u0000\u0000\u02bc\u02be\b\u0016\u0000\u0000\u02bd\u02bc\u0001\u0000"+ + "\u0000\u0000\u02be\u02c1\u0001\u0000\u0000\u0000\u02bf\u02bd\u0001\u0000"+ + "\u0000\u0000\u02bf\u02c0\u0001\u0000\u0000\u0000\u02c0\u02c3\u0001\u0000"+ + "\u0000\u0000\u02c1\u02bf\u0001\u0000\u0000\u0000\u02c2\u02c4\u0005\r\u0000"+ + "\u0000\u02c3\u02c2\u0001\u0000\u0000\u0000\u02c3\u02c4\u0001\u0000\u0000"+ + "\u0000\u02c4\u02c6\u0001\u0000\u0000\u0000\u02c5\u02c7\u0005\n\u0000\u0000"+ + "\u02c6\u02c5\u0001\u0000\u0000\u0000\u02c6\u02c7\u0001\u0000\u0000\u0000"+ + "\u02c7\u02c8\u0001\u0000\u0000\u0000\u02c8\u02c9\u0006\u0019\f\u0000\u02c9"+ + "D\u0001\u0000\u0000\u0000\u02ca\u02cb\u0005/\u0000\u0000\u02cb\u02cc\u0005"+ + "*\u0000\u0000\u02cc\u02d1\u0001\u0000\u0000\u0000\u02cd\u02d0\u0003E\u001a"+ + "\u0000\u02ce\u02d0\t\u0000\u0000\u0000\u02cf\u02cd\u0001\u0000\u0000\u0000"+ + "\u02cf\u02ce\u0001\u0000\u0000\u0000\u02d0\u02d3\u0001\u0000\u0000\u0000"+ + "\u02d1\u02d2\u0001\u0000\u0000\u0000\u02d1\u02cf\u0001\u0000\u0000\u0000"+ + "\u02d2\u02d4\u0001\u0000\u0000\u0000\u02d3\u02d1\u0001\u0000\u0000\u0000"+ + "\u02d4\u02d5\u0005*\u0000\u0000\u02d5\u02d6\u0005/\u0000\u0000\u02d6\u02d7"+ + "\u0001\u0000\u0000\u0000\u02d7\u02d8\u0006\u001a\f\u0000\u02d8F\u0001"+ + "\u0000\u0000\u0000\u02d9\u02db\u0007\u0017\u0000\u0000\u02da\u02d9\u0001"+ + "\u0000\u0000\u0000\u02db\u02dc\u0001\u0000\u0000\u0000\u02dc\u02da\u0001"+ + "\u0000\u0000\u0000\u02dc\u02dd\u0001\u0000\u0000\u0000\u02dd\u02de\u0001"+ + "\u0000\u0000\u0000\u02de\u02df\u0006\u001b\f\u0000\u02dfH\u0001\u0000"+ + "\u0000\u0000\u02e0\u02e1\u0005|\u0000\u0000\u02e1\u02e2\u0001\u0000\u0000"+ + "\u0000\u02e2\u02e3\u0006\u001c\r\u0000\u02e3J\u0001\u0000\u0000\u0000"+ + "\u02e4\u02e5\u0007\u0018\u0000\u0000\u02e5L\u0001\u0000\u0000\u0000\u02e6"+ + "\u02e7\u0007\u0019\u0000\u0000\u02e7N\u0001\u0000\u0000\u0000\u02e8\u02e9"+ + "\u0005\\\u0000\u0000\u02e9\u02ea\u0007\u001a\u0000\u0000\u02eaP\u0001"+ + "\u0000\u0000\u0000\u02eb\u02ec\b\u001b\u0000\u0000\u02ecR\u0001\u0000"+ + "\u0000\u0000\u02ed\u02ef\u0007\u0003\u0000\u0000\u02ee\u02f0\u0007\u001c"+ + "\u0000\u0000\u02ef\u02ee\u0001\u0000\u0000\u0000\u02ef\u02f0\u0001\u0000"+ + "\u0000\u0000\u02f0\u02f2\u0001\u0000\u0000\u0000\u02f1\u02f3\u0003K\u001d"+ + "\u0000\u02f2\u02f1\u0001\u0000\u0000\u0000\u02f3\u02f4\u0001\u0000\u0000"+ + "\u0000\u02f4\u02f2\u0001\u0000\u0000\u0000\u02f4\u02f5\u0001\u0000\u0000"+ + "\u0000\u02f5T\u0001\u0000\u0000\u0000\u02f6\u02f7\u0005@\u0000\u0000\u02f7"+ + "V\u0001\u0000\u0000\u0000\u02f8\u02f9\u0005`\u0000\u0000\u02f9X\u0001"+ + "\u0000\u0000\u0000\u02fa\u02fe\b\u001d\u0000\u0000\u02fb\u02fc\u0005`"+ + "\u0000\u0000\u02fc\u02fe\u0005`\u0000\u0000\u02fd\u02fa\u0001\u0000\u0000"+ + "\u0000\u02fd\u02fb\u0001\u0000\u0000\u0000\u02feZ\u0001\u0000\u0000\u0000"+ + "\u02ff\u0300\u0005_\u0000\u0000\u0300\\\u0001\u0000\u0000\u0000\u0301"+ + "\u0305\u0003M\u001e\u0000\u0302\u0305\u0003K\u001d\u0000\u0303\u0305\u0003"+ + "[%\u0000\u0304\u0301\u0001\u0000\u0000\u0000\u0304\u0302\u0001\u0000\u0000"+ + "\u0000\u0304\u0303\u0001\u0000\u0000\u0000\u0305^\u0001\u0000\u0000\u0000"+ + "\u0306\u030b\u0005\"\u0000\u0000\u0307\u030a\u0003O\u001f\u0000\u0308"+ + "\u030a\u0003Q \u0000\u0309\u0307\u0001\u0000\u0000\u0000\u0309\u0308\u0001"+ + "\u0000\u0000\u0000\u030a\u030d\u0001\u0000\u0000\u0000\u030b\u0309\u0001"+ + "\u0000\u0000\u0000\u030b\u030c\u0001\u0000\u0000\u0000\u030c\u030e\u0001"+ + "\u0000\u0000\u0000\u030d\u030b\u0001\u0000\u0000\u0000\u030e\u0324\u0005"+ + "\"\u0000\u0000\u030f\u0310\u0005\"\u0000\u0000\u0310\u0311\u0005\"\u0000"+ + "\u0000\u0311\u0312\u0005\"\u0000\u0000\u0312\u0316\u0001\u0000\u0000\u0000"+ + "\u0313\u0315\b\u0016\u0000\u0000\u0314\u0313\u0001\u0000\u0000\u0000\u0315"+ + "\u0318\u0001\u0000\u0000\u0000\u0316\u0317\u0001\u0000\u0000\u0000\u0316"+ + "\u0314\u0001\u0000\u0000\u0000\u0317\u0319\u0001\u0000\u0000\u0000\u0318"+ + "\u0316\u0001\u0000\u0000\u0000\u0319\u031a\u0005\"\u0000\u0000\u031a\u031b"+ + "\u0005\"\u0000\u0000\u031b\u031c\u0005\"\u0000\u0000\u031c\u031e\u0001"+ + "\u0000\u0000\u0000\u031d\u031f\u0005\"\u0000\u0000\u031e\u031d\u0001\u0000"+ + "\u0000\u0000\u031e\u031f\u0001\u0000\u0000\u0000\u031f\u0321\u0001\u0000"+ + "\u0000\u0000\u0320\u0322\u0005\"\u0000\u0000\u0321\u0320\u0001\u0000\u0000"+ + "\u0000\u0321\u0322\u0001\u0000\u0000\u0000\u0322\u0324\u0001\u0000\u0000"+ + "\u0000\u0323\u0306\u0001\u0000\u0000\u0000\u0323\u030f\u0001\u0000\u0000"+ + "\u0000\u0324`\u0001\u0000\u0000\u0000\u0325\u0327\u0003K\u001d\u0000\u0326"+ + "\u0325\u0001\u0000\u0000\u0000\u0327\u0328\u0001\u0000\u0000\u0000\u0328"+ + "\u0326\u0001\u0000\u0000\u0000\u0328\u0329\u0001\u0000\u0000\u0000\u0329"+ + "b\u0001\u0000\u0000\u0000\u032a\u032c\u0003K\u001d\u0000\u032b\u032a\u0001"+ + "\u0000\u0000\u0000\u032c\u032d\u0001\u0000\u0000\u0000\u032d\u032b\u0001"+ + "\u0000\u0000\u0000\u032d\u032e\u0001\u0000\u0000\u0000\u032e\u032f\u0001"+ + "\u0000\u0000\u0000\u032f\u0333\u0003u2\u0000\u0330\u0332\u0003K\u001d"+ + "\u0000\u0331\u0330\u0001\u0000\u0000\u0000\u0332\u0335\u0001\u0000\u0000"+ + "\u0000\u0333\u0331\u0001\u0000\u0000\u0000\u0333\u0334\u0001\u0000\u0000"+ + "\u0000\u0334\u0355\u0001\u0000\u0000\u0000\u0335\u0333\u0001\u0000\u0000"+ + "\u0000\u0336\u0338\u0003u2\u0000\u0337\u0339\u0003K\u001d\u0000\u0338"+ + "\u0337\u0001\u0000\u0000\u0000\u0339\u033a\u0001\u0000\u0000\u0000\u033a"+ + "\u0338\u0001\u0000\u0000\u0000\u033a\u033b\u0001\u0000\u0000\u0000\u033b"+ + "\u0355\u0001\u0000\u0000\u0000\u033c\u033e\u0003K\u001d\u0000\u033d\u033c"+ + "\u0001\u0000\u0000\u0000\u033e\u033f\u0001\u0000\u0000\u0000\u033f\u033d"+ + "\u0001\u0000\u0000\u0000\u033f\u0340\u0001\u0000\u0000\u0000\u0340\u0348"+ + "\u0001\u0000\u0000\u0000\u0341\u0345\u0003u2\u0000\u0342\u0344\u0003K"+ + "\u001d\u0000\u0343\u0342\u0001\u0000\u0000\u0000\u0344\u0347\u0001\u0000"+ + "\u0000\u0000\u0345\u0343\u0001\u0000\u0000\u0000\u0345\u0346\u0001\u0000"+ + "\u0000\u0000\u0346\u0349\u0001\u0000\u0000\u0000\u0347\u0345\u0001\u0000"+ + "\u0000\u0000\u0348\u0341\u0001\u0000\u0000\u0000\u0348\u0349\u0001\u0000"+ + "\u0000\u0000\u0349\u034a\u0001\u0000\u0000\u0000\u034a\u034b\u0003S!\u0000"+ + "\u034b\u0355\u0001\u0000\u0000\u0000\u034c\u034e\u0003u2\u0000\u034d\u034f"+ + "\u0003K\u001d\u0000\u034e\u034d\u0001\u0000\u0000\u0000\u034f\u0350\u0001"+ + "\u0000\u0000\u0000\u0350\u034e\u0001\u0000\u0000\u0000\u0350\u0351\u0001"+ + "\u0000\u0000\u0000\u0351\u0352\u0001\u0000\u0000\u0000\u0352\u0353\u0003"+ + "S!\u0000\u0353\u0355\u0001\u0000\u0000\u0000\u0354\u032b\u0001\u0000\u0000"+ + "\u0000\u0354\u0336\u0001\u0000\u0000\u0000\u0354\u033d\u0001\u0000\u0000"+ + "\u0000\u0354\u034c\u0001\u0000\u0000\u0000\u0355d\u0001\u0000\u0000\u0000"+ + "\u0356\u0357\u0007\u001e\u0000\u0000\u0357\u0358\u0007\u001f\u0000\u0000"+ + "\u0358f\u0001\u0000\u0000\u0000\u0359\u035a\u0007\f\u0000\u0000\u035a"+ + "\u035b\u0007\t\u0000\u0000\u035b\u035c\u0007\u0000\u0000\u0000\u035ch"+ + "\u0001\u0000\u0000\u0000\u035d\u035e\u0007\f\u0000\u0000\u035e\u035f\u0007"+ + "\u0002\u0000\u0000\u035f\u0360\u0007\u0004\u0000\u0000\u0360j\u0001\u0000"+ + "\u0000\u0000\u0361\u0362\u0005=\u0000\u0000\u0362l\u0001\u0000\u0000\u0000"+ + "\u0363\u0364\u0005:\u0000\u0000\u0364\u0365\u0005:\u0000\u0000\u0365n"+ + "\u0001\u0000\u0000\u0000\u0366\u0367\u0005:\u0000\u0000\u0367p\u0001\u0000"+ + "\u0000\u0000\u0368\u0369\u0005,\u0000\u0000\u0369r\u0001\u0000\u0000\u0000"+ + "\u036a\u036b\u0007\u0000\u0000\u0000\u036b\u036c\u0007\u0003\u0000\u0000"+ + "\u036c\u036d\u0007\u0002\u0000\u0000\u036d\u036e\u0007\u0004\u0000\u0000"+ + "\u036et\u0001\u0000\u0000\u0000\u036f\u0370\u0005.\u0000\u0000\u0370v"+ + "\u0001\u0000\u0000\u0000\u0371\u0372\u0007\u000f\u0000\u0000\u0372\u0373"+ + "\u0007\f\u0000\u0000\u0373\u0374\u0007\r\u0000\u0000\u0374\u0375\u0007"+ + "\u0002\u0000\u0000\u0375\u0376\u0007\u0003\u0000\u0000\u0376x\u0001\u0000"+ + "\u0000\u0000\u0377\u0378\u0007\u000f\u0000\u0000\u0378\u0379\u0007\u0001"+ + "\u0000\u0000\u0379\u037a\u0007\u0006\u0000\u0000\u037a\u037b\u0007\u0002"+ + "\u0000\u0000\u037b\u037c\u0007\u0005\u0000\u0000\u037cz\u0001\u0000\u0000"+ + "\u0000\u037d\u037e\u0007\u0001\u0000\u0000\u037e\u037f\u0007\t\u0000\u0000"+ + "\u037f|\u0001\u0000\u0000\u0000\u0380\u0381\u0007\u0001\u0000\u0000\u0381"+ + "\u0382\u0007\u0002\u0000\u0000\u0382~\u0001\u0000\u0000\u0000\u0383\u0384"+ + "\u0007\r\u0000\u0000\u0384\u0385\u0007\f\u0000\u0000\u0385\u0386\u0007"+ + "\u0002\u0000\u0000\u0386\u0387\u0007\u0005\u0000\u0000\u0387\u0080\u0001"+ + "\u0000\u0000\u0000\u0388\u0389\u0007\r\u0000\u0000\u0389\u038a\u0007\u0001"+ + "\u0000\u0000\u038a\u038b\u0007\u0012\u0000\u0000\u038b\u038c\u0007\u0003"+ + "\u0000\u0000\u038c\u0082\u0001\u0000\u0000\u0000\u038d\u038e\u0005(\u0000"+ + "\u0000\u038e\u0084\u0001\u0000\u0000\u0000\u038f\u0390\u0007\t\u0000\u0000"+ + "\u0390\u0391\u0007\u0007\u0000\u0000\u0391\u0392\u0007\u0005\u0000\u0000"+ + "\u0392\u0086\u0001\u0000\u0000\u0000\u0393\u0394\u0007\t\u0000\u0000\u0394"+ + "\u0395\u0007\u0014\u0000\u0000\u0395\u0396\u0007\r\u0000\u0000\u0396\u0397"+ + "\u0007\r\u0000\u0000\u0397\u0088\u0001\u0000\u0000\u0000\u0398\u0399\u0007"+ + "\t\u0000\u0000\u0399\u039a\u0007\u0014\u0000\u0000\u039a\u039b\u0007\r"+ + "\u0000\u0000\u039b\u039c\u0007\r\u0000\u0000\u039c\u039d\u0007\u0002\u0000"+ + "\u0000\u039d\u008a\u0001\u0000\u0000\u0000\u039e\u039f\u0007\u0007\u0000"+ + "\u0000\u039f\u03a0\u0007\u0006\u0000\u0000\u03a0\u008c\u0001\u0000\u0000"+ + "\u0000\u03a1\u03a2\u0005?\u0000\u0000\u03a2\u008e\u0001\u0000\u0000\u0000"+ + "\u03a3\u03a4\u0007\u0006\u0000\u0000\u03a4\u03a5\u0007\r\u0000\u0000\u03a5"+ + "\u03a6\u0007\u0001\u0000\u0000\u03a6\u03a7\u0007\u0012\u0000\u0000\u03a7"+ + "\u03a8\u0007\u0003\u0000\u0000\u03a8\u0090\u0001\u0000\u0000\u0000\u03a9"+ + "\u03aa\u0005)\u0000\u0000\u03aa\u0092\u0001\u0000\u0000\u0000\u03ab\u03ac"+ + "\u0007\u0005\u0000\u0000\u03ac\u03ad\u0007\u0006\u0000\u0000\u03ad\u03ae"+ + "\u0007\u0014\u0000\u0000\u03ae\u03af\u0007\u0003\u0000\u0000\u03af\u0094"+ + "\u0001\u0000\u0000\u0000\u03b0\u03b1\u0005=\u0000\u0000\u03b1\u03b2\u0005"+ + "=\u0000\u0000\u03b2\u0096\u0001\u0000\u0000\u0000\u03b3\u03b4\u0005=\u0000"+ + "\u0000\u03b4\u03b5\u0005~\u0000\u0000\u03b5\u0098\u0001\u0000\u0000\u0000"+ + "\u03b6\u03b7\u0005!\u0000\u0000\u03b7\u03b8\u0005=\u0000\u0000\u03b8\u009a"+ + "\u0001\u0000\u0000\u0000\u03b9\u03ba\u0005<\u0000\u0000\u03ba\u009c\u0001"+ + "\u0000\u0000\u0000\u03bb\u03bc\u0005<\u0000\u0000\u03bc\u03bd\u0005=\u0000"+ + "\u0000\u03bd\u009e\u0001\u0000\u0000\u0000\u03be\u03bf\u0005>\u0000\u0000"+ + "\u03bf\u00a0\u0001\u0000\u0000\u0000\u03c0\u03c1\u0005>\u0000\u0000\u03c1"+ + "\u03c2\u0005=\u0000\u0000\u03c2\u00a2\u0001\u0000\u0000\u0000\u03c3\u03c4"+ + "\u0005+\u0000\u0000\u03c4\u00a4\u0001\u0000\u0000\u0000\u03c5\u03c6\u0005"+ + "-\u0000\u0000\u03c6\u00a6\u0001\u0000\u0000\u0000\u03c7\u03c8\u0005*\u0000"+ + "\u0000\u03c8\u00a8\u0001\u0000\u0000\u0000\u03c9\u03ca\u0005/\u0000\u0000"+ + "\u03ca\u00aa\u0001\u0000\u0000\u0000\u03cb\u03cc\u0005%\u0000\u0000\u03cc"+ + "\u00ac\u0001\u0000\u0000\u0000\u03cd\u03ce\u0005{\u0000\u0000\u03ce\u00ae"+ + "\u0001\u0000\u0000\u0000\u03cf\u03d0\u0005}\u0000\u0000\u03d0\u00b0\u0001"+ + "\u0000\u0000\u0000\u03d1\u03d2\u0003/\u000f\u0000\u03d2\u03d3\u0001\u0000"+ + "\u0000\u0000\u03d3\u03d4\u0006P\u000e\u0000\u03d4\u00b2\u0001\u0000\u0000"+ + "\u0000\u03d5\u03d8\u0003\u008d>\u0000\u03d6\u03d9\u0003M\u001e\u0000\u03d7"+ + "\u03d9\u0003[%\u0000\u03d8\u03d6\u0001\u0000\u0000\u0000\u03d8\u03d7\u0001"+ + "\u0000\u0000\u0000\u03d9\u03dd\u0001\u0000\u0000\u0000\u03da\u03dc\u0003"+ + "]&\u0000\u03db\u03da\u0001\u0000\u0000\u0000\u03dc\u03df\u0001\u0000\u0000"+ + "\u0000\u03dd\u03db\u0001\u0000\u0000\u0000\u03dd\u03de\u0001\u0000\u0000"+ + "\u0000\u03de\u03e7\u0001\u0000\u0000\u0000\u03df\u03dd\u0001\u0000\u0000"+ + "\u0000\u03e0\u03e2\u0003\u008d>\u0000\u03e1\u03e3\u0003K\u001d\u0000\u03e2"+ + "\u03e1\u0001\u0000\u0000\u0000\u03e3\u03e4\u0001\u0000\u0000\u0000\u03e4"+ + "\u03e2\u0001\u0000\u0000\u0000\u03e4\u03e5\u0001\u0000\u0000\u0000\u03e5"+ + "\u03e7\u0001\u0000\u0000\u0000\u03e6\u03d5\u0001\u0000\u0000\u0000\u03e6"+ + "\u03e0\u0001\u0000\u0000\u0000\u03e7\u00b4\u0001\u0000\u0000\u0000\u03e8"+ + "\u03e9\u0005[\u0000\u0000\u03e9\u03ea\u0001\u0000\u0000\u0000\u03ea\u03eb"+ + "\u0006R\u0000\u0000\u03eb\u03ec\u0006R\u0000\u0000\u03ec\u00b6\u0001\u0000"+ + "\u0000\u0000\u03ed\u03ee\u0005]\u0000\u0000\u03ee\u03ef\u0001\u0000\u0000"+ + "\u0000\u03ef\u03f0\u0006S\r\u0000\u03f0\u03f1\u0006S\r\u0000\u03f1\u00b8"+ + "\u0001\u0000\u0000\u0000\u03f2\u03f6\u0003M\u001e\u0000\u03f3\u03f5\u0003"+ + "]&\u0000\u03f4\u03f3\u0001\u0000\u0000\u0000\u03f5\u03f8\u0001\u0000\u0000"+ + "\u0000\u03f6\u03f4\u0001\u0000\u0000\u0000\u03f6\u03f7\u0001\u0000\u0000"+ + "\u0000\u03f7\u0403\u0001\u0000\u0000\u0000\u03f8\u03f6\u0001\u0000\u0000"+ + "\u0000\u03f9\u03fc\u0003[%\u0000\u03fa\u03fc\u0003U\"\u0000\u03fb\u03f9"+ + "\u0001\u0000\u0000\u0000\u03fb\u03fa\u0001\u0000\u0000\u0000\u03fc\u03fe"+ + "\u0001\u0000\u0000\u0000\u03fd\u03ff\u0003]&\u0000\u03fe\u03fd\u0001\u0000"+ + "\u0000\u0000\u03ff\u0400\u0001\u0000\u0000\u0000\u0400\u03fe\u0001\u0000"+ + "\u0000\u0000\u0400\u0401\u0001\u0000\u0000\u0000\u0401\u0403\u0001\u0000"+ + "\u0000\u0000\u0402\u03f2\u0001\u0000\u0000\u0000\u0402\u03fb\u0001\u0000"+ + "\u0000\u0000\u0403\u00ba\u0001\u0000\u0000\u0000\u0404\u0406\u0003W#\u0000"+ + "\u0405\u0407\u0003Y$\u0000\u0406\u0405\u0001\u0000\u0000\u0000\u0407\u0408"+ + "\u0001\u0000\u0000\u0000\u0408\u0406\u0001\u0000\u0000\u0000\u0408\u0409"+ + "\u0001\u0000\u0000\u0000\u0409\u040a\u0001\u0000\u0000\u0000\u040a\u040b"+ + "\u0003W#\u0000\u040b\u00bc\u0001\u0000\u0000\u0000\u040c\u040d\u0003\u00bb"+ + "U\u0000\u040d\u00be\u0001\u0000\u0000\u0000\u040e\u040f\u0003C\u0019\u0000"+ + "\u040f\u0410\u0001\u0000\u0000\u0000\u0410\u0411\u0006W\f\u0000\u0411"+ + "\u00c0\u0001\u0000\u0000\u0000\u0412\u0413\u0003E\u001a\u0000\u0413\u0414"+ + "\u0001\u0000\u0000\u0000\u0414\u0415\u0006X\f\u0000\u0415\u00c2\u0001"+ + "\u0000\u0000\u0000\u0416\u0417\u0003G\u001b\u0000\u0417\u0418\u0001\u0000"+ + "\u0000\u0000\u0418\u0419\u0006Y\f\u0000\u0419\u00c4\u0001\u0000\u0000"+ + "\u0000\u041a\u041b\u0003\u00b5R\u0000\u041b\u041c\u0001\u0000\u0000\u0000"+ + "\u041c\u041d\u0006Z\u000f\u0000\u041d\u041e\u0006Z\u0010\u0000\u041e\u00c6"+ + "\u0001\u0000\u0000\u0000\u041f\u0420\u0003I\u001c\u0000\u0420\u0421\u0001"+ + "\u0000\u0000\u0000\u0421\u0422\u0006[\u0011\u0000\u0422\u0423\u0006[\r"+ + "\u0000\u0423\u00c8\u0001\u0000\u0000\u0000\u0424\u0425\u0003G\u001b\u0000"+ + "\u0425\u0426\u0001\u0000\u0000\u0000\u0426\u0427\u0006\\\f\u0000\u0427"+ + "\u00ca\u0001\u0000\u0000\u0000\u0428\u0429\u0003C\u0019\u0000\u0429\u042a"+ + "\u0001\u0000\u0000\u0000\u042a\u042b\u0006]\f\u0000\u042b\u00cc\u0001"+ + "\u0000\u0000\u0000\u042c\u042d\u0003E\u001a\u0000\u042d\u042e\u0001\u0000"+ + "\u0000\u0000\u042e\u042f\u0006^\f\u0000\u042f\u00ce\u0001\u0000\u0000"+ + "\u0000\u0430\u0431\u0003I\u001c\u0000\u0431\u0432\u0001\u0000\u0000\u0000"+ + "\u0432\u0433\u0006_\u0011\u0000\u0433\u0434\u0006_\r\u0000\u0434\u00d0"+ + "\u0001\u0000\u0000\u0000\u0435\u0436\u0003\u00b5R\u0000\u0436\u0437\u0001"+ + "\u0000\u0000\u0000\u0437\u0438\u0006`\u000f\u0000\u0438\u00d2\u0001\u0000"+ + "\u0000\u0000\u0439\u043a\u0003\u00b7S\u0000\u043a\u043b\u0001\u0000\u0000"+ + "\u0000\u043b\u043c\u0006a\u0012\u0000\u043c\u00d4\u0001\u0000\u0000\u0000"+ + "\u043d\u043e\u0003o/\u0000\u043e\u043f\u0001\u0000\u0000\u0000\u043f\u0440"+ + "\u0006b\u0013\u0000\u0440\u00d6\u0001\u0000\u0000\u0000\u0441\u0442\u0003"+ + "q0\u0000\u0442\u0443\u0001\u0000\u0000\u0000\u0443\u0444\u0006c\u0014"+ + "\u0000\u0444\u00d8\u0001\u0000\u0000\u0000\u0445\u0446\u0003k-\u0000\u0446"+ + "\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006d\u0015\u0000\u0448\u00da"+ + "\u0001\u0000\u0000\u0000\u0449\u044a\u0007\u0010\u0000\u0000\u044a\u044b"+ + "\u0007\u0003\u0000\u0000\u044b\u044c\u0007\u0005\u0000\u0000\u044c\u044d"+ + "\u0007\f\u0000\u0000\u044d\u044e\u0007\u0000\u0000\u0000\u044e\u044f\u0007"+ + "\f\u0000\u0000\u044f\u0450\u0007\u0005\u0000\u0000\u0450\u0451\u0007\f"+ + "\u0000\u0000\u0451\u00dc\u0001\u0000\u0000\u0000\u0452\u0456\b \u0000"+ + "\u0000\u0453\u0454\u0005/\u0000\u0000\u0454\u0456\b!\u0000\u0000\u0455"+ + "\u0452\u0001\u0000\u0000\u0000\u0455\u0453\u0001\u0000\u0000\u0000\u0456"+ + "\u00de\u0001\u0000\u0000\u0000\u0457\u0459\u0003\u00ddf\u0000\u0458\u0457"+ + "\u0001\u0000\u0000\u0000\u0459\u045a\u0001\u0000\u0000\u0000\u045a\u0458"+ + "\u0001\u0000\u0000\u0000\u045a\u045b\u0001\u0000\u0000\u0000\u045b\u00e0"+ + "\u0001\u0000\u0000\u0000\u045c\u045d\u0003\u00dfg\u0000\u045d\u045e\u0001"+ + "\u0000\u0000\u0000\u045e\u045f\u0006h\u0016\u0000\u045f\u00e2\u0001\u0000"+ + "\u0000\u0000\u0460\u0461\u0003_\'\u0000\u0461\u0462\u0001\u0000\u0000"+ + "\u0000\u0462\u0463\u0006i\u0017\u0000\u0463\u00e4\u0001\u0000\u0000\u0000"+ + "\u0464\u0465\u0003C\u0019\u0000\u0465\u0466\u0001\u0000\u0000\u0000\u0466"+ + "\u0467\u0006j\f\u0000\u0467\u00e6\u0001\u0000\u0000\u0000\u0468\u0469"+ + "\u0003E\u001a\u0000\u0469\u046a\u0001\u0000\u0000\u0000\u046a\u046b\u0006"+ + "k\f\u0000\u046b\u00e8\u0001\u0000\u0000\u0000\u046c\u046d\u0003G\u001b"+ + "\u0000\u046d\u046e\u0001\u0000\u0000\u0000\u046e\u046f\u0006l\f\u0000"+ + "\u046f\u00ea\u0001\u0000\u0000\u0000\u0470\u0471\u0003I\u001c\u0000\u0471"+ + "\u0472\u0001\u0000\u0000\u0000\u0472\u0473\u0006m\u0011\u0000\u0473\u0474"+ + "\u0006m\r\u0000\u0474\u00ec\u0001\u0000\u0000\u0000\u0475\u0476\u0003"+ + "u2\u0000\u0476\u0477\u0001\u0000\u0000\u0000\u0477\u0478\u0006n\u0018"+ + "\u0000\u0478\u00ee\u0001\u0000\u0000\u0000\u0479\u047a\u0003q0\u0000\u047a"+ + "\u047b\u0001\u0000\u0000\u0000\u047b\u047c\u0006o\u0014\u0000\u047c\u00f0"+ + "\u0001\u0000\u0000\u0000\u047d\u047e\u0003\u008d>\u0000\u047e\u047f\u0001"+ + "\u0000\u0000\u0000\u047f\u0480\u0006p\u0019\u0000\u0480\u00f2\u0001\u0000"+ + "\u0000\u0000\u0481\u0482\u0003\u00b3Q\u0000\u0482\u0483\u0001\u0000\u0000"+ + "\u0000\u0483\u0484\u0006q\u001a\u0000\u0484\u00f4\u0001\u0000\u0000\u0000"+ + "\u0485\u048a\u0003M\u001e\u0000\u0486\u048a\u0003K\u001d\u0000\u0487\u048a"+ + "\u0003[%\u0000\u0488\u048a\u0003\u00a7K\u0000\u0489\u0485\u0001\u0000"+ + "\u0000\u0000\u0489\u0486\u0001\u0000\u0000\u0000\u0489\u0487\u0001\u0000"+ + "\u0000\u0000\u0489\u0488\u0001\u0000\u0000\u0000\u048a\u00f6\u0001\u0000"+ + "\u0000\u0000\u048b\u048e\u0003M\u001e\u0000\u048c\u048e\u0003\u00a7K\u0000"+ + "\u048d\u048b\u0001\u0000\u0000\u0000\u048d\u048c\u0001\u0000\u0000\u0000"+ + "\u048e\u0492\u0001\u0000\u0000\u0000\u048f\u0491\u0003\u00f5r\u0000\u0490"+ + "\u048f\u0001\u0000\u0000\u0000\u0491\u0494\u0001\u0000\u0000\u0000\u0492"+ + "\u0490\u0001\u0000\u0000\u0000\u0492\u0493\u0001\u0000\u0000\u0000\u0493"+ + "\u049f\u0001\u0000\u0000\u0000\u0494\u0492\u0001\u0000\u0000\u0000\u0495"+ + "\u0498\u0003[%\u0000\u0496\u0498\u0003U\"\u0000\u0497\u0495\u0001\u0000"+ + "\u0000\u0000\u0497\u0496\u0001\u0000\u0000\u0000\u0498\u049a\u0001\u0000"+ + "\u0000\u0000\u0499\u049b\u0003\u00f5r\u0000\u049a\u0499\u0001\u0000\u0000"+ + "\u0000\u049b\u049c\u0001\u0000\u0000\u0000\u049c\u049a\u0001\u0000\u0000"+ + "\u0000\u049c\u049d\u0001\u0000\u0000\u0000\u049d\u049f\u0001\u0000\u0000"+ + "\u0000\u049e\u048d\u0001\u0000\u0000\u0000\u049e\u0497\u0001\u0000\u0000"+ + "\u0000\u049f\u00f8\u0001\u0000\u0000\u0000\u04a0\u04a3\u0003\u00f7s\u0000"+ + "\u04a1\u04a3\u0003\u00bbU\u0000\u04a2\u04a0\u0001\u0000\u0000\u0000\u04a2"+ + "\u04a1\u0001\u0000\u0000\u0000\u04a3\u04a4\u0001\u0000\u0000\u0000\u04a4"+ + "\u04a2\u0001\u0000\u0000\u0000\u04a4\u04a5\u0001\u0000\u0000\u0000\u04a5"+ + "\u00fa\u0001\u0000\u0000\u0000\u04a6\u04a7\u0003C\u0019\u0000\u04a7\u04a8"+ + "\u0001\u0000\u0000\u0000\u04a8\u04a9\u0006u\f\u0000\u04a9\u00fc\u0001"+ + "\u0000\u0000\u0000\u04aa\u04ab\u0003E\u001a\u0000\u04ab\u04ac\u0001\u0000"+ + "\u0000\u0000\u04ac\u04ad\u0006v\f\u0000\u04ad\u00fe\u0001\u0000\u0000"+ + "\u0000\u04ae\u04af\u0003G\u001b\u0000\u04af\u04b0\u0001\u0000\u0000\u0000"+ + "\u04b0\u04b1\u0006w\f\u0000\u04b1\u0100\u0001\u0000\u0000\u0000\u04b2"+ + "\u04b3\u0003I\u001c\u0000\u04b3\u04b4\u0001\u0000\u0000\u0000\u04b4\u04b5"+ + "\u0006x\u0011\u0000\u04b5\u04b6\u0006x\r\u0000\u04b6\u0102\u0001\u0000"+ + "\u0000\u0000\u04b7\u04b8\u0003k-\u0000\u04b8\u04b9\u0001\u0000\u0000\u0000"+ + "\u04b9\u04ba\u0006y\u0015\u0000\u04ba\u0104\u0001\u0000\u0000\u0000\u04bb"+ + "\u04bc\u0003q0\u0000\u04bc\u04bd\u0001\u0000\u0000\u0000\u04bd\u04be\u0006"+ + "z\u0014\u0000\u04be\u0106\u0001\u0000\u0000\u0000\u04bf\u04c0\u0003u2"+ + "\u0000\u04c0\u04c1\u0001\u0000\u0000\u0000\u04c1\u04c2\u0006{\u0018\u0000"+ + "\u04c2\u0108\u0001\u0000\u0000\u0000\u04c3\u04c4\u0003\u008d>\u0000\u04c4"+ + "\u04c5\u0001\u0000\u0000\u0000\u04c5\u04c6\u0006|\u0019\u0000\u04c6\u010a"+ + "\u0001\u0000\u0000\u0000\u04c7\u04c8\u0003\u00b3Q\u0000\u04c8\u04c9\u0001"+ + "\u0000\u0000\u0000\u04c9\u04ca\u0006}\u001a\u0000\u04ca\u010c\u0001\u0000"+ + "\u0000\u0000\u04cb\u04cc\u0007\f\u0000\u0000\u04cc\u04cd\u0007\u0002\u0000"+ + "\u0000\u04cd\u010e\u0001\u0000\u0000\u0000\u04ce\u04cf\u0003\u00f9t\u0000"+ + "\u04cf\u04d0\u0001\u0000\u0000\u0000\u04d0\u04d1\u0006\u007f\u001b\u0000"+ + "\u04d1\u0110\u0001\u0000\u0000\u0000\u04d2\u04d3\u0003C\u0019\u0000\u04d3"+ + "\u04d4\u0001\u0000\u0000\u0000\u04d4\u04d5\u0006\u0080\f\u0000\u04d5\u0112"+ + "\u0001\u0000\u0000\u0000\u04d6\u04d7\u0003E\u001a\u0000\u04d7\u04d8\u0001"+ + "\u0000\u0000\u0000\u04d8\u04d9\u0006\u0081\f\u0000\u04d9\u0114\u0001\u0000"+ + "\u0000\u0000\u04da\u04db\u0003G\u001b\u0000\u04db\u04dc\u0001\u0000\u0000"+ + "\u0000\u04dc\u04dd\u0006\u0082\f\u0000\u04dd\u0116\u0001\u0000\u0000\u0000"+ + "\u04de\u04df\u0003I\u001c\u0000\u04df\u04e0\u0001\u0000\u0000\u0000\u04e0"+ + "\u04e1\u0006\u0083\u0011\u0000\u04e1\u04e2\u0006\u0083\r\u0000\u04e2\u0118"+ + "\u0001\u0000\u0000\u0000\u04e3\u04e4\u0003\u00b5R\u0000\u04e4\u04e5\u0001"+ + "\u0000\u0000\u0000\u04e5\u04e6\u0006\u0084\u000f\u0000\u04e6\u04e7\u0006"+ + "\u0084\u001c\u0000\u04e7\u011a\u0001\u0000\u0000\u0000\u04e8\u04e9\u0007"+ + "\u0007\u0000\u0000\u04e9\u04ea\u0007\t\u0000\u0000\u04ea\u04eb\u0001\u0000"+ + "\u0000\u0000\u04eb\u04ec\u0006\u0085\u001d\u0000\u04ec\u011c\u0001\u0000"+ + "\u0000\u0000\u04ed\u04ee\u0007\u0013\u0000\u0000\u04ee\u04ef\u0007\u0001"+ + "\u0000\u0000\u04ef\u04f0\u0007\u0005\u0000\u0000\u04f0\u04f1\u0007\n\u0000"+ + "\u0000\u04f1\u04f2\u0001\u0000\u0000\u0000\u04f2\u04f3\u0006\u0086\u001d"+ + "\u0000\u04f3\u011e\u0001\u0000\u0000\u0000\u04f4\u04f5\b\"\u0000\u0000"+ + "\u04f5\u0120\u0001\u0000\u0000\u0000\u04f6\u04f8\u0003\u011f\u0087\u0000"+ + "\u04f7\u04f6\u0001\u0000\u0000\u0000\u04f8\u04f9\u0001\u0000\u0000\u0000"+ + "\u04f9\u04f7\u0001\u0000\u0000\u0000\u04f9\u04fa\u0001\u0000\u0000\u0000"+ + "\u04fa\u04fb\u0001\u0000\u0000\u0000\u04fb\u04fc\u0003o/\u0000\u04fc\u04fe"+ + "\u0001\u0000\u0000\u0000\u04fd\u04f7\u0001\u0000\u0000\u0000\u04fd\u04fe"+ + "\u0001\u0000\u0000\u0000\u04fe\u0500\u0001\u0000\u0000\u0000\u04ff\u0501"+ + "\u0003\u011f\u0087\u0000\u0500\u04ff\u0001\u0000\u0000\u0000\u0501\u0502"+ + "\u0001\u0000\u0000\u0000\u0502\u0500\u0001\u0000\u0000\u0000\u0502\u0503"+ + "\u0001\u0000\u0000\u0000\u0503\u0122\u0001\u0000\u0000\u0000\u0504\u0505"+ + "\u0003\u0121\u0088\u0000\u0505\u0506\u0001\u0000\u0000\u0000\u0506\u0507"+ + "\u0006\u0089\u001e\u0000\u0507\u0124\u0001\u0000\u0000\u0000\u0508\u0509"+ + "\u0003C\u0019\u0000\u0509\u050a\u0001\u0000\u0000\u0000\u050a\u050b\u0006"+ + "\u008a\f\u0000\u050b\u0126\u0001\u0000\u0000\u0000\u050c\u050d\u0003E"+ + "\u001a\u0000\u050d\u050e\u0001\u0000\u0000\u0000\u050e\u050f\u0006\u008b"+ + "\f\u0000\u050f\u0128\u0001\u0000\u0000\u0000\u0510\u0511\u0003G\u001b"+ + "\u0000\u0511\u0512\u0001\u0000\u0000\u0000\u0512\u0513\u0006\u008c\f\u0000"+ + "\u0513\u012a\u0001\u0000\u0000\u0000\u0514\u0515\u0003I\u001c\u0000\u0515"+ + "\u0516\u0001\u0000\u0000\u0000\u0516\u0517\u0006\u008d\u0011\u0000\u0517"+ + "\u0518\u0006\u008d\r\u0000\u0518\u0519\u0006\u008d\r\u0000\u0519\u012c"+ + "\u0001\u0000\u0000\u0000\u051a\u051b\u0003k-\u0000\u051b\u051c\u0001\u0000"+ + "\u0000\u0000\u051c\u051d\u0006\u008e\u0015\u0000\u051d\u012e\u0001\u0000"+ + "\u0000\u0000\u051e\u051f\u0003q0\u0000\u051f\u0520\u0001\u0000\u0000\u0000"+ + "\u0520\u0521\u0006\u008f\u0014\u0000\u0521\u0130\u0001\u0000\u0000\u0000"+ + "\u0522\u0523\u0003u2\u0000\u0523\u0524\u0001\u0000\u0000\u0000\u0524\u0525"+ + "\u0006\u0090\u0018\u0000\u0525\u0132\u0001\u0000\u0000\u0000\u0526\u0527"+ + "\u0003\u011d\u0086\u0000\u0527\u0528\u0001\u0000\u0000\u0000\u0528\u0529"+ + "\u0006\u0091\u001f\u0000\u0529\u0134\u0001\u0000\u0000\u0000\u052a\u052b"+ + "\u0003\u00f9t\u0000\u052b\u052c\u0001\u0000\u0000\u0000\u052c\u052d\u0006"+ + "\u0092\u001b\u0000\u052d\u0136\u0001\u0000\u0000\u0000\u052e\u052f\u0003"+ + "\u00bdV\u0000\u052f\u0530\u0001\u0000\u0000\u0000\u0530\u0531\u0006\u0093"+ + " \u0000\u0531\u0138\u0001\u0000\u0000\u0000\u0532\u0533\u0003\u008d>\u0000"+ + "\u0533\u0534\u0001\u0000\u0000\u0000\u0534\u0535\u0006\u0094\u0019\u0000"+ + "\u0535\u013a\u0001\u0000\u0000\u0000\u0536\u0537\u0003\u00b3Q\u0000\u0537"+ + "\u0538\u0001\u0000\u0000\u0000\u0538\u0539\u0006\u0095\u001a\u0000\u0539"+ + "\u013c\u0001\u0000\u0000\u0000\u053a\u053b\u0003C\u0019\u0000\u053b\u053c"+ + "\u0001\u0000\u0000\u0000\u053c\u053d\u0006\u0096\f\u0000\u053d\u013e\u0001"+ + "\u0000\u0000\u0000\u053e\u053f\u0003E\u001a\u0000\u053f\u0540\u0001\u0000"+ + "\u0000\u0000\u0540\u0541\u0006\u0097\f\u0000\u0541\u0140\u0001\u0000\u0000"+ + "\u0000\u0542\u0543\u0003G\u001b\u0000\u0543\u0544\u0001\u0000\u0000\u0000"+ + "\u0544\u0545\u0006\u0098\f\u0000\u0545\u0142\u0001\u0000\u0000\u0000\u0546"+ + "\u0547\u0003I\u001c\u0000\u0547\u0548\u0001\u0000\u0000\u0000\u0548\u0549"+ + "\u0006\u0099\u0011\u0000\u0549\u054a\u0006\u0099\r\u0000\u054a\u0144\u0001"+ + "\u0000\u0000\u0000\u054b\u054c\u0003u2\u0000\u054c\u054d\u0001\u0000\u0000"+ + "\u0000\u054d\u054e\u0006\u009a\u0018\u0000\u054e\u0146\u0001\u0000\u0000"+ + "\u0000\u054f\u0550\u0003\u008d>\u0000\u0550\u0551\u0001\u0000\u0000\u0000"+ + "\u0551\u0552\u0006\u009b\u0019\u0000\u0552\u0148\u0001\u0000\u0000\u0000"+ + "\u0553\u0554\u0003\u00b3Q\u0000\u0554\u0555\u0001\u0000\u0000\u0000\u0555"+ + "\u0556\u0006\u009c\u001a\u0000\u0556\u014a\u0001\u0000\u0000\u0000\u0557"+ + "\u0558\u0003\u00bdV\u0000\u0558\u0559\u0001\u0000\u0000\u0000\u0559\u055a"+ + "\u0006\u009d \u0000\u055a\u014c\u0001\u0000\u0000\u0000\u055b\u055c\u0003"+ + "\u00b9T\u0000\u055c\u055d\u0001\u0000\u0000\u0000\u055d\u055e\u0006\u009e"+ + "!\u0000\u055e\u014e\u0001\u0000\u0000\u0000\u055f\u0560\u0003C\u0019\u0000"+ + "\u0560\u0561\u0001\u0000\u0000\u0000\u0561\u0562\u0006\u009f\f\u0000\u0562"+ + "\u0150\u0001\u0000\u0000\u0000\u0563\u0564\u0003E\u001a\u0000\u0564\u0565"+ + "\u0001\u0000\u0000\u0000\u0565\u0566\u0006\u00a0\f\u0000\u0566\u0152\u0001"+ + "\u0000\u0000\u0000\u0567\u0568\u0003G\u001b\u0000\u0568\u0569\u0001\u0000"+ + "\u0000\u0000\u0569\u056a\u0006\u00a1\f\u0000\u056a\u0154\u0001\u0000\u0000"+ + "\u0000\u056b\u056c\u0003I\u001c\u0000\u056c\u056d\u0001\u0000\u0000\u0000"+ + "\u056d\u056e\u0006\u00a2\u0011\u0000\u056e\u056f\u0006\u00a2\r\u0000\u056f"+ + "\u0156\u0001\u0000\u0000\u0000\u0570\u0571\u0007\u0001\u0000\u0000\u0571"+ + "\u0572\u0007\t\u0000\u0000\u0572\u0573\u0007\u000f\u0000\u0000\u0573\u0574"+ + "\u0007\u0007\u0000\u0000\u0574\u0158\u0001\u0000\u0000\u0000\u0575\u0576"+ + "\u0003C\u0019\u0000\u0576\u0577\u0001\u0000\u0000\u0000\u0577\u0578\u0006"+ + "\u00a4\f\u0000\u0578\u015a\u0001\u0000\u0000\u0000\u0579\u057a\u0003E"+ + "\u001a\u0000\u057a\u057b\u0001\u0000\u0000\u0000\u057b\u057c\u0006\u00a5"+ + "\f\u0000\u057c\u015c\u0001\u0000\u0000\u0000\u057d\u057e\u0003G\u001b"+ + "\u0000\u057e\u057f\u0001\u0000\u0000\u0000\u057f\u0580\u0006\u00a6\f\u0000"+ + "\u0580\u015e\u0001\u0000\u0000\u0000\u0581\u0582\u0003\u00b7S\u0000\u0582"+ + "\u0583\u0001\u0000\u0000\u0000\u0583\u0584\u0006\u00a7\u0012\u0000\u0584"+ + "\u0585\u0006\u00a7\r\u0000\u0585\u0160\u0001\u0000\u0000\u0000\u0586\u0587"+ + "\u0003o/\u0000\u0587\u0588\u0001\u0000\u0000\u0000\u0588\u0589\u0006\u00a8"+ + "\u0013\u0000\u0589\u0162\u0001\u0000\u0000\u0000\u058a\u0590\u0003U\""+ + "\u0000\u058b\u0590\u0003K\u001d\u0000\u058c\u0590\u0003u2\u0000\u058d"+ + "\u0590\u0003M\u001e\u0000\u058e\u0590\u0003[%\u0000\u058f\u058a\u0001"+ + "\u0000\u0000\u0000\u058f\u058b\u0001\u0000\u0000\u0000\u058f\u058c\u0001"+ + "\u0000\u0000\u0000\u058f\u058d\u0001\u0000\u0000\u0000\u058f\u058e\u0001"+ + "\u0000\u0000\u0000\u0590\u0591\u0001\u0000\u0000\u0000\u0591\u058f\u0001"+ + "\u0000\u0000\u0000\u0591\u0592\u0001\u0000\u0000\u0000\u0592\u0164\u0001"+ + "\u0000\u0000\u0000\u0593\u0594\u0003C\u0019\u0000\u0594\u0595\u0001\u0000"+ + "\u0000\u0000\u0595\u0596\u0006\u00aa\f\u0000\u0596\u0166\u0001\u0000\u0000"+ + "\u0000\u0597\u0598\u0003E\u001a\u0000\u0598\u0599\u0001\u0000\u0000\u0000"+ + "\u0599\u059a\u0006\u00ab\f\u0000\u059a\u0168\u0001\u0000\u0000\u0000\u059b"+ + "\u059c\u0003G\u001b\u0000\u059c\u059d\u0001\u0000\u0000\u0000\u059d\u059e"+ + "\u0006\u00ac\f\u0000\u059e\u016a\u0001\u0000\u0000\u0000\u059f\u05a0\u0003"+ + "I\u001c\u0000\u05a0\u05a1\u0001\u0000\u0000\u0000\u05a1\u05a2\u0006\u00ad"+ + "\u0011\u0000\u05a2\u05a3\u0006\u00ad\r\u0000\u05a3\u016c\u0001\u0000\u0000"+ + "\u0000\u05a4\u05a5\u0003o/\u0000\u05a5\u05a6\u0001\u0000\u0000\u0000\u05a6"+ + "\u05a7\u0006\u00ae\u0013\u0000\u05a7\u016e\u0001\u0000\u0000\u0000\u05a8"+ + "\u05a9\u0003q0\u0000\u05a9\u05aa\u0001\u0000\u0000\u0000\u05aa\u05ab\u0006"+ + "\u00af\u0014\u0000\u05ab\u0170\u0001\u0000\u0000\u0000\u05ac\u05ad\u0003"+ + "u2\u0000\u05ad\u05ae\u0001\u0000\u0000\u0000\u05ae\u05af\u0006\u00b0\u0018"+ + "\u0000\u05af\u0172\u0001\u0000\u0000\u0000\u05b0\u05b1\u0003\u011b\u0085"+ + "\u0000\u05b1\u05b2\u0001\u0000\u0000\u0000\u05b2\u05b3\u0006\u00b1\"\u0000"+ + "\u05b3\u05b4\u0006\u00b1#\u0000\u05b4\u0174\u0001\u0000\u0000\u0000\u05b5"+ + "\u05b6\u0003\u00dfg\u0000\u05b6\u05b7\u0001\u0000\u0000\u0000\u05b7\u05b8"+ + "\u0006\u00b2\u0016\u0000\u05b8\u0176\u0001\u0000\u0000\u0000\u05b9\u05ba"+ + "\u0003_\'\u0000\u05ba\u05bb\u0001\u0000\u0000\u0000\u05bb\u05bc\u0006"+ + "\u00b3\u0017\u0000\u05bc\u0178\u0001\u0000\u0000\u0000\u05bd\u05be\u0003"+ + "C\u0019\u0000\u05be\u05bf\u0001\u0000\u0000\u0000\u05bf\u05c0\u0006\u00b4"+ + "\f\u0000\u05c0\u017a\u0001\u0000\u0000\u0000\u05c1\u05c2\u0003E\u001a"+ + "\u0000\u05c2\u05c3\u0001\u0000\u0000\u0000\u05c3\u05c4\u0006\u00b5\f\u0000"+ + "\u05c4\u017c\u0001\u0000\u0000\u0000\u05c5\u05c6\u0003G\u001b\u0000\u05c6"+ + "\u05c7\u0001\u0000\u0000\u0000\u05c7\u05c8\u0006\u00b6\f\u0000\u05c8\u017e"+ + "\u0001\u0000\u0000\u0000\u05c9\u05ca\u0003I\u001c\u0000\u05ca\u05cb\u0001"+ + "\u0000\u0000\u0000\u05cb\u05cc\u0006\u00b7\u0011\u0000\u05cc\u05cd\u0006"+ + "\u00b7\r\u0000\u05cd\u05ce\u0006\u00b7\r\u0000\u05ce\u0180\u0001\u0000"+ + "\u0000\u0000\u05cf\u05d0\u0003q0\u0000\u05d0\u05d1\u0001\u0000\u0000\u0000"+ + "\u05d1\u05d2\u0006\u00b8\u0014\u0000\u05d2\u0182\u0001\u0000\u0000\u0000"+ + "\u05d3\u05d4\u0003u2\u0000\u05d4\u05d5\u0001\u0000\u0000\u0000\u05d5\u05d6"+ + "\u0006\u00b9\u0018\u0000\u05d6\u0184\u0001\u0000\u0000\u0000\u05d7\u05d8"+ + "\u0003\u00f9t\u0000\u05d8\u05d9\u0001\u0000\u0000\u0000\u05d9\u05da\u0006"+ + "\u00ba\u001b\u0000\u05da\u0186\u0001\u0000\u0000\u0000\u05db\u05dc\u0003"+ + "C\u0019\u0000\u05dc\u05dd\u0001\u0000\u0000\u0000\u05dd\u05de\u0006\u00bb"+ + "\f\u0000\u05de\u0188\u0001\u0000\u0000\u0000\u05df\u05e0\u0003E\u001a"+ + "\u0000\u05e0\u05e1\u0001\u0000\u0000\u0000\u05e1\u05e2\u0006\u00bc\f\u0000"+ + "\u05e2\u018a\u0001\u0000\u0000\u0000\u05e3\u05e4\u0003G\u001b\u0000\u05e4"+ + "\u05e5\u0001\u0000\u0000\u0000\u05e5\u05e6\u0006\u00bd\f\u0000\u05e6\u018c"+ + "\u0001\u0000\u0000\u0000\u05e7\u05e8\u0003I\u001c\u0000\u05e8\u05e9\u0001"+ + "\u0000\u0000\u0000\u05e9\u05ea\u0006\u00be\u0011\u0000\u05ea\u05eb\u0006"+ + "\u00be\r\u0000\u05eb\u018e\u0001\u0000\u0000\u0000\u05ec\u05ed\u0007#"+ + "\u0000\u0000\u05ed\u05ee\u0007\u0007\u0000\u0000\u05ee\u05ef\u0007\u0001"+ + "\u0000\u0000\u05ef\u05f0\u0007\t\u0000\u0000\u05f0\u0190\u0001\u0000\u0000"+ + "\u0000\u05f1\u05f2\u0003\u010d~\u0000\u05f2\u05f3\u0001\u0000\u0000\u0000"+ + "\u05f3\u05f4\u0006\u00c0$\u0000\u05f4\u0192\u0001\u0000\u0000\u0000\u05f5"+ + "\u05f6\u0003\u011b\u0085\u0000\u05f6\u05f7\u0001\u0000\u0000\u0000\u05f7"+ + "\u05f8\u0006\u00c1\"\u0000\u05f8\u05f9\u0006\u00c1\r\u0000\u05f9\u05fa"+ + "\u0006\u00c1\u0000\u0000\u05fa\u0194\u0001\u0000\u0000\u0000\u05fb\u05fc"+ + "\u0007\u0014\u0000\u0000\u05fc\u05fd\u0007\u0002\u0000\u0000\u05fd\u05fe"+ + "\u0007\u0001\u0000\u0000\u05fe\u05ff\u0007\t\u0000\u0000\u05ff\u0600\u0007"+ + "\u0011\u0000\u0000\u0600\u0601\u0001\u0000\u0000\u0000\u0601\u0602\u0006"+ + "\u00c2\r\u0000\u0602\u0603\u0006\u00c2\u0000\u0000\u0603\u0196\u0001\u0000"+ + "\u0000\u0000\u0604\u0605\u0003\u00dfg\u0000\u0605\u0606\u0001\u0000\u0000"+ + "\u0000\u0606\u0607\u0006\u00c3\u0016\u0000\u0607\u0198\u0001\u0000\u0000"+ + "\u0000\u0608\u0609\u0003_\'\u0000\u0609\u060a\u0001\u0000\u0000\u0000"+ + "\u060a\u060b\u0006\u00c4\u0017\u0000\u060b\u019a\u0001\u0000\u0000\u0000"+ + "\u060c\u060d\u0003o/\u0000\u060d\u060e\u0001\u0000\u0000\u0000\u060e\u060f"+ + "\u0006\u00c5\u0013\u0000\u060f\u019c\u0001\u0000\u0000\u0000\u0610\u0611"+ + "\u0003\u00b9T\u0000\u0611\u0612\u0001\u0000\u0000\u0000\u0612\u0613\u0006"+ + "\u00c6!\u0000\u0613\u019e\u0001\u0000\u0000\u0000\u0614\u0615\u0003\u00bd"+ + "V\u0000\u0615\u0616\u0001\u0000\u0000\u0000\u0616\u0617\u0006\u00c7 \u0000"+ + "\u0617\u01a0\u0001\u0000\u0000\u0000\u0618\u0619\u0003C\u0019\u0000\u0619"+ + "\u061a\u0001\u0000\u0000\u0000\u061a\u061b\u0006\u00c8\f\u0000\u061b\u01a2"+ + "\u0001\u0000\u0000\u0000\u061c\u061d\u0003E\u001a\u0000\u061d\u061e\u0001"+ + "\u0000\u0000\u0000\u061e\u061f\u0006\u00c9\f\u0000\u061f\u01a4\u0001\u0000"+ + "\u0000\u0000\u0620\u0621\u0003G\u001b\u0000\u0621\u0622\u0001\u0000\u0000"+ + "\u0000\u0622\u0623\u0006\u00ca\f\u0000\u0623\u01a6\u0001\u0000\u0000\u0000"+ + "\u0624\u0625\u0003I\u001c\u0000\u0625\u0626\u0001\u0000\u0000\u0000\u0626"+ + "\u0627\u0006\u00cb\u0011\u0000\u0627\u0628\u0006\u00cb\r\u0000\u0628\u01a8"+ + "\u0001\u0000\u0000\u0000\u0629\u062a\u0003\u00dfg\u0000\u062a\u062b\u0001"+ + "\u0000\u0000\u0000\u062b\u062c\u0006\u00cc\u0016\u0000\u062c\u062d\u0006"+ + "\u00cc\r\u0000\u062d\u062e\u0006\u00cc%\u0000\u062e\u01aa\u0001\u0000"+ + "\u0000\u0000\u062f\u0630\u0003_\'\u0000\u0630\u0631\u0001\u0000\u0000"+ + "\u0000\u0631\u0632\u0006\u00cd\u0017\u0000\u0632\u0633\u0006\u00cd\r\u0000"+ + "\u0633\u0634\u0006\u00cd%\u0000\u0634\u01ac\u0001\u0000\u0000\u0000\u0635"+ + "\u0636\u0003C\u0019\u0000\u0636\u0637\u0001\u0000\u0000\u0000\u0637\u0638"+ + "\u0006\u00ce\f\u0000\u0638\u01ae\u0001\u0000\u0000\u0000\u0639\u063a\u0003"+ + "E\u001a\u0000\u063a\u063b\u0001\u0000\u0000\u0000\u063b\u063c\u0006\u00cf"+ + "\f\u0000\u063c\u01b0\u0001\u0000\u0000\u0000\u063d\u063e\u0003G\u001b"+ + "\u0000\u063e\u063f\u0001\u0000\u0000\u0000\u063f\u0640\u0006\u00d0\f\u0000"+ + "\u0640\u01b2\u0001\u0000\u0000\u0000\u0641\u0642\u0003o/\u0000\u0642\u0643"+ + "\u0001\u0000\u0000\u0000\u0643\u0644\u0006\u00d1\u0013\u0000\u0644\u0645"+ + "\u0006\u00d1\r\u0000\u0645\u0646\u0006\u00d1\u000b\u0000\u0646\u01b4\u0001"+ + "\u0000\u0000\u0000\u0647\u0648\u0003q0\u0000\u0648\u0649\u0001\u0000\u0000"+ + "\u0000\u0649\u064a\u0006\u00d2\u0014\u0000\u064a\u064b\u0006\u00d2\r\u0000"+ + "\u064b\u064c\u0006\u00d2\u000b\u0000\u064c\u01b6\u0001\u0000\u0000\u0000"+ + "\u064d\u064e\u0003C\u0019\u0000\u064e\u064f\u0001\u0000\u0000\u0000\u064f"+ + "\u0650\u0006\u00d3\f\u0000\u0650\u01b8\u0001\u0000\u0000\u0000\u0651\u0652"+ + "\u0003E\u001a\u0000\u0652\u0653\u0001\u0000\u0000\u0000\u0653\u0654\u0006"+ + "\u00d4\f\u0000\u0654\u01ba\u0001\u0000\u0000\u0000\u0655\u0656\u0003G"+ + "\u001b\u0000\u0656\u0657\u0001\u0000\u0000\u0000\u0657\u0658\u0006\u00d5"+ + "\f\u0000\u0658\u01bc\u0001\u0000\u0000\u0000\u0659\u065a\u0003\u00bdV"+ + "\u0000\u065a\u065b\u0001\u0000\u0000\u0000\u065b\u065c\u0006\u00d6\r\u0000"+ + "\u065c\u065d\u0006\u00d6\u0000\u0000\u065d\u065e\u0006\u00d6 \u0000\u065e"+ + "\u01be\u0001\u0000\u0000\u0000\u065f\u0660\u0003\u00b9T\u0000\u0660\u0661"+ + "\u0001\u0000\u0000\u0000\u0661\u0662\u0006\u00d7\r\u0000\u0662\u0663\u0006"+ + "\u00d7\u0000\u0000\u0663\u0664\u0006\u00d7!\u0000\u0664\u01c0\u0001\u0000"+ + "\u0000\u0000\u0665\u0666\u0003e*\u0000\u0666\u0667\u0001\u0000\u0000\u0000"+ + "\u0667\u0668\u0006\u00d8\r\u0000\u0668\u0669\u0006\u00d8\u0000\u0000\u0669"+ + "\u066a\u0006\u00d8&\u0000\u066a\u01c2\u0001\u0000\u0000\u0000\u066b\u066c"+ + "\u0003I\u001c\u0000\u066c\u066d\u0001\u0000\u0000\u0000\u066d\u066e\u0006"+ + "\u00d9\u0011\u0000\u066e\u066f\u0006\u00d9\r\u0000\u066f\u01c4\u0001\u0000"+ + "\u0000\u0000\u0670\u0671\u0003I\u001c\u0000\u0671\u0672\u0001\u0000\u0000"+ + "\u0000\u0672\u0673\u0006\u00da\u0011\u0000\u0673\u0674\u0006\u00da\r\u0000"+ + "\u0674\u01c6\u0001\u0000\u0000\u0000\u0675\u0676\u0003\u011b\u0085\u0000"+ + "\u0676\u0677\u0001\u0000\u0000\u0000\u0677\u0678\u0006\u00db\"\u0000\u0678"+ + "\u01c8\u0001\u0000\u0000\u0000\u0679\u067a\u0003\u010d~\u0000\u067a\u067b"+ + "\u0001\u0000\u0000\u0000\u067b\u067c\u0006\u00dc$\u0000\u067c\u01ca\u0001"+ + "\u0000\u0000\u0000\u067d\u067e\u0003u2\u0000\u067e\u067f\u0001\u0000\u0000"+ + "\u0000\u067f\u0680\u0006\u00dd\u0018\u0000\u0680\u01cc\u0001\u0000\u0000"+ + "\u0000\u0681\u0682\u0003q0\u0000\u0682\u0683\u0001\u0000\u0000\u0000\u0683"+ + "\u0684\u0006\u00de\u0014\u0000\u0684\u01ce\u0001\u0000\u0000\u0000\u0685"+ + "\u0686\u0003\u00bdV\u0000\u0686\u0687\u0001\u0000\u0000\u0000\u0687\u0688"+ + "\u0006\u00df \u0000\u0688\u01d0\u0001\u0000\u0000\u0000\u0689\u068a\u0003"+ + "\u00b9T\u0000\u068a\u068b\u0001\u0000\u0000\u0000\u068b\u068c\u0006\u00e0"+ + "!\u0000\u068c\u01d2\u0001\u0000\u0000\u0000\u068d\u068e\u0003C\u0019\u0000"+ + "\u068e\u068f\u0001\u0000\u0000\u0000\u068f\u0690\u0006\u00e1\f\u0000\u0690"+ + "\u01d4\u0001\u0000\u0000\u0000\u0691\u0692\u0003E\u001a\u0000\u0692\u0693"+ + "\u0001\u0000\u0000\u0000\u0693\u0694\u0006\u00e2\f\u0000\u0694\u01d6\u0001"+ + "\u0000\u0000\u0000\u0695\u0696\u0003G\u001b\u0000\u0696\u0697\u0001\u0000"+ + "\u0000\u0000\u0697\u0698\u0006\u00e3\f\u0000\u0698\u01d8\u0001\u0000\u0000"+ + "\u0000C\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f"+ + "\r\u000e\u000f\u0010\u02b5\u02bf\u02c3\u02c6\u02cf\u02d1\u02dc\u02ef\u02f4"+ + "\u02fd\u0304\u0309\u030b\u0316\u031e\u0321\u0323\u0328\u032d\u0333\u033a"+ + "\u033f\u0345\u0348\u0350\u0354\u03d8\u03dd\u03e4\u03e6\u03f6\u03fb\u0400"+ + "\u0402\u0408\u0455\u045a\u0489\u048d\u0492\u0497\u049c\u049e\u04a2\u04a4"+ + "\u04f9\u04fd\u0502\u058f\u0591\'\u0005\u0001\u0000\u0005\u0004\u0000\u0005"+ + "\u0006\u0000\u0005\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005"+ + "\u0000\u0005\t\u0000\u0005\r\u0000\u0005\u0010\u0000\u0005\u000b\u0000"+ + "\u0005\u000e\u0000\u0000\u0001\u0000\u0004\u0000\u0000\u0007\u0010\u0000"+ + "\u0007H\u0000\u0005\u0000\u0000\u0007\u001d\u0000\u0007I\u0000\u0007&"+ + "\u0000\u0007\'\u0000\u0007$\u0000\u0007S\u0000\u0007\u001e\u0000\u0007"+ + ")\u0000\u00075\u0000\u0007G\u0000\u0007W\u0000\u0005\n\u0000\u0005\u0007"+ + "\u0000\u0007a\u0000\u0007`\u0000\u0007K\u0000\u0007J\u0000\u0007_\u0000"+ + "\u0005\f\u0000\u0007[\u0000\u0005\u000f\u0000\u0007!\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index a75d7e985c1d0..2957751d99f6a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -16,7 +16,7 @@ null 'sort' 'stats' 'where' -null +'lookup' null null null @@ -120,6 +120,7 @@ null null null null +'join' 'USING' null null @@ -130,6 +131,9 @@ null null null null +null +null +null token symbolic names: null @@ -149,14 +153,14 @@ SHOW SORT STATS WHERE +JOIN_LOOKUP +DEV_CHANGE_POINT DEV_INLINESTATS DEV_LOOKUP DEV_METRICS -DEV_JOIN DEV_JOIN_FULL DEV_JOIN_LEFT DEV_JOIN_RIGHT -DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT @@ -253,6 +257,7 @@ LOOKUP_WS LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS +JOIN USING JOIN_LINE_COMMENT JOIN_MULTILINE_COMMENT @@ -263,6 +268,9 @@ METRICS_WS CLOSING_METRICS_LINE_COMMENT CLOSING_METRICS_MULTILINE_COMMENT CLOSING_METRICS_WS +CHANGE_POINT_LINE_COMMENT +CHANGE_POINT_MULTILINE_COMMENT +CHANGE_POINT_WS rule names: singleStatement @@ -331,7 +339,8 @@ joinCommand joinTarget joinCondition joinPredicate +changePointCommand atn: -[4, 1, 130, 650, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 142, 8, 1, 10, 1, 12, 1, 145, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 153, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 173, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 185, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 192, 8, 5, 10, 5, 12, 5, 195, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 202, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 207, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 215, 8, 5, 10, 5, 12, 5, 218, 9, 5, 1, 6, 1, 6, 3, 6, 222, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 229, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 234, 8, 6, 1, 7, 1, 7, 1, 7, 3, 7, 239, 8, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 249, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 255, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 263, 8, 9, 10, 9, 12, 9, 266, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 276, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 281, 8, 10, 10, 10, 12, 10, 284, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 292, 8, 11, 10, 11, 12, 11, 295, 9, 11, 1, 11, 1, 11, 3, 11, 299, 8, 11, 3, 11, 301, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 311, 8, 13, 10, 13, 12, 13, 314, 9, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 5, 17, 330, 8, 17, 10, 17, 12, 17, 333, 9, 17, 1, 18, 1, 18, 1, 18, 3, 18, 338, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 346, 8, 19, 10, 19, 12, 19, 349, 9, 19, 1, 19, 3, 19, 352, 8, 19, 1, 20, 1, 20, 1, 20, 3, 20, 357, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 369, 8, 23, 10, 23, 12, 23, 372, 9, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 378, 8, 24, 10, 24, 12, 24, 381, 9, 24, 1, 24, 3, 24, 384, 8, 24, 1, 24, 1, 24, 3, 24, 388, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 395, 8, 26, 1, 26, 1, 26, 3, 26, 399, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 404, 8, 27, 10, 27, 12, 27, 407, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 412, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 417, 8, 29, 10, 29, 12, 29, 420, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 425, 8, 30, 10, 30, 12, 30, 428, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 433, 8, 31, 10, 31, 12, 31, 436, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 443, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 458, 8, 34, 10, 34, 12, 34, 461, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 469, 8, 34, 10, 34, 12, 34, 472, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 480, 8, 34, 10, 34, 12, 34, 483, 9, 34, 1, 34, 1, 34, 3, 34, 487, 8, 34, 1, 35, 1, 35, 3, 35, 491, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 496, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 505, 8, 38, 10, 38, 12, 38, 508, 9, 38, 1, 39, 1, 39, 3, 39, 512, 8, 39, 1, 39, 1, 39, 3, 39, 516, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 528, 8, 42, 10, 42, 12, 42, 531, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 541, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 553, 8, 47, 10, 47, 12, 47, 556, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 566, 8, 50, 1, 51, 3, 51, 569, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 574, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 596, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 602, 8, 58, 10, 58, 12, 58, 605, 9, 58, 3, 58, 607, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 612, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 625, 8, 61, 1, 62, 3, 62, 628, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 3, 63, 637, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 643, 8, 64, 10, 64, 12, 64, 646, 9, 64, 1, 65, 1, 65, 1, 65, 0, 4, 2, 10, 18, 20, 66, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 30, 30, 83, 83, 1, 0, 74, 75, 2, 0, 35, 35, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 677, 0, 132, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 4, 152, 1, 0, 0, 0, 6, 172, 1, 0, 0, 0, 8, 174, 1, 0, 0, 0, 10, 206, 1, 0, 0, 0, 12, 233, 1, 0, 0, 0, 14, 235, 1, 0, 0, 0, 16, 248, 1, 0, 0, 0, 18, 254, 1, 0, 0, 0, 20, 275, 1, 0, 0, 0, 22, 285, 1, 0, 0, 0, 24, 304, 1, 0, 0, 0, 26, 306, 1, 0, 0, 0, 28, 317, 1, 0, 0, 0, 30, 321, 1, 0, 0, 0, 32, 323, 1, 0, 0, 0, 34, 326, 1, 0, 0, 0, 36, 337, 1, 0, 0, 0, 38, 341, 1, 0, 0, 0, 40, 356, 1, 0, 0, 0, 42, 360, 1, 0, 0, 0, 44, 362, 1, 0, 0, 0, 46, 364, 1, 0, 0, 0, 48, 373, 1, 0, 0, 0, 50, 389, 1, 0, 0, 0, 52, 392, 1, 0, 0, 0, 54, 400, 1, 0, 0, 0, 56, 408, 1, 0, 0, 0, 58, 413, 1, 0, 0, 0, 60, 421, 1, 0, 0, 0, 62, 429, 1, 0, 0, 0, 64, 437, 1, 0, 0, 0, 66, 442, 1, 0, 0, 0, 68, 486, 1, 0, 0, 0, 70, 490, 1, 0, 0, 0, 72, 495, 1, 0, 0, 0, 74, 497, 1, 0, 0, 0, 76, 500, 1, 0, 0, 0, 78, 509, 1, 0, 0, 0, 80, 517, 1, 0, 0, 0, 82, 520, 1, 0, 0, 0, 84, 523, 1, 0, 0, 0, 86, 532, 1, 0, 0, 0, 88, 536, 1, 0, 0, 0, 90, 542, 1, 0, 0, 0, 92, 546, 1, 0, 0, 0, 94, 549, 1, 0, 0, 0, 96, 557, 1, 0, 0, 0, 98, 561, 1, 0, 0, 0, 100, 565, 1, 0, 0, 0, 102, 568, 1, 0, 0, 0, 104, 573, 1, 0, 0, 0, 106, 577, 1, 0, 0, 0, 108, 579, 1, 0, 0, 0, 110, 581, 1, 0, 0, 0, 112, 584, 1, 0, 0, 0, 114, 588, 1, 0, 0, 0, 116, 591, 1, 0, 0, 0, 118, 611, 1, 0, 0, 0, 120, 615, 1, 0, 0, 0, 122, 620, 1, 0, 0, 0, 124, 627, 1, 0, 0, 0, 126, 633, 1, 0, 0, 0, 128, 638, 1, 0, 0, 0, 130, 647, 1, 0, 0, 0, 132, 133, 3, 2, 1, 0, 133, 134, 5, 0, 0, 1, 134, 1, 1, 0, 0, 0, 135, 136, 6, 1, -1, 0, 136, 137, 3, 4, 2, 0, 137, 143, 1, 0, 0, 0, 138, 139, 10, 1, 0, 0, 139, 140, 5, 29, 0, 0, 140, 142, 3, 6, 3, 0, 141, 138, 1, 0, 0, 0, 142, 145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 3, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 153, 3, 110, 55, 0, 147, 153, 3, 38, 19, 0, 148, 153, 3, 32, 16, 0, 149, 153, 3, 114, 57, 0, 150, 151, 4, 2, 1, 0, 151, 153, 3, 48, 24, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 153, 5, 1, 0, 0, 0, 154, 173, 3, 50, 25, 0, 155, 173, 3, 8, 4, 0, 156, 173, 3, 80, 40, 0, 157, 173, 3, 74, 37, 0, 158, 173, 3, 52, 26, 0, 159, 173, 3, 76, 38, 0, 160, 173, 3, 82, 41, 0, 161, 173, 3, 84, 42, 0, 162, 173, 3, 88, 44, 0, 163, 173, 3, 90, 45, 0, 164, 173, 3, 116, 58, 0, 165, 173, 3, 92, 46, 0, 166, 167, 4, 3, 2, 0, 167, 173, 3, 122, 61, 0, 168, 169, 4, 3, 3, 0, 169, 173, 3, 120, 60, 0, 170, 171, 4, 3, 4, 0, 171, 173, 3, 124, 62, 0, 172, 154, 1, 0, 0, 0, 172, 155, 1, 0, 0, 0, 172, 156, 1, 0, 0, 0, 172, 157, 1, 0, 0, 0, 172, 158, 1, 0, 0, 0, 172, 159, 1, 0, 0, 0, 172, 160, 1, 0, 0, 0, 172, 161, 1, 0, 0, 0, 172, 162, 1, 0, 0, 0, 172, 163, 1, 0, 0, 0, 172, 164, 1, 0, 0, 0, 172, 165, 1, 0, 0, 0, 172, 166, 1, 0, 0, 0, 172, 168, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 7, 1, 0, 0, 0, 174, 175, 5, 16, 0, 0, 175, 176, 3, 10, 5, 0, 176, 9, 1, 0, 0, 0, 177, 178, 6, 5, -1, 0, 178, 179, 5, 49, 0, 0, 179, 207, 3, 10, 5, 8, 180, 207, 3, 16, 8, 0, 181, 207, 3, 12, 6, 0, 182, 184, 3, 16, 8, 0, 183, 185, 5, 49, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 44, 0, 0, 187, 188, 5, 48, 0, 0, 188, 193, 3, 16, 8, 0, 189, 190, 5, 39, 0, 0, 190, 192, 3, 16, 8, 0, 191, 189, 1, 0, 0, 0, 192, 195, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 197, 5, 55, 0, 0, 197, 207, 1, 0, 0, 0, 198, 199, 3, 16, 8, 0, 199, 201, 5, 45, 0, 0, 200, 202, 5, 49, 0, 0, 201, 200, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 204, 5, 50, 0, 0, 204, 207, 1, 0, 0, 0, 205, 207, 3, 14, 7, 0, 206, 177, 1, 0, 0, 0, 206, 180, 1, 0, 0, 0, 206, 181, 1, 0, 0, 0, 206, 182, 1, 0, 0, 0, 206, 198, 1, 0, 0, 0, 206, 205, 1, 0, 0, 0, 207, 216, 1, 0, 0, 0, 208, 209, 10, 5, 0, 0, 209, 210, 5, 34, 0, 0, 210, 215, 3, 10, 5, 6, 211, 212, 10, 4, 0, 0, 212, 213, 5, 52, 0, 0, 213, 215, 3, 10, 5, 5, 214, 208, 1, 0, 0, 0, 214, 211, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 11, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 221, 3, 16, 8, 0, 220, 222, 5, 49, 0, 0, 221, 220, 1, 0, 0, 0, 221, 222, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 224, 5, 47, 0, 0, 224, 225, 3, 106, 53, 0, 225, 234, 1, 0, 0, 0, 226, 228, 3, 16, 8, 0, 227, 229, 5, 49, 0, 0, 228, 227, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 231, 5, 54, 0, 0, 231, 232, 3, 106, 53, 0, 232, 234, 1, 0, 0, 0, 233, 219, 1, 0, 0, 0, 233, 226, 1, 0, 0, 0, 234, 13, 1, 0, 0, 0, 235, 238, 3, 58, 29, 0, 236, 237, 5, 37, 0, 0, 237, 239, 3, 30, 15, 0, 238, 236, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 241, 5, 38, 0, 0, 241, 242, 3, 68, 34, 0, 242, 15, 1, 0, 0, 0, 243, 249, 3, 18, 9, 0, 244, 245, 3, 18, 9, 0, 245, 246, 3, 108, 54, 0, 246, 247, 3, 18, 9, 0, 247, 249, 1, 0, 0, 0, 248, 243, 1, 0, 0, 0, 248, 244, 1, 0, 0, 0, 249, 17, 1, 0, 0, 0, 250, 251, 6, 9, -1, 0, 251, 255, 3, 20, 10, 0, 252, 253, 7, 0, 0, 0, 253, 255, 3, 18, 9, 3, 254, 250, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 255, 264, 1, 0, 0, 0, 256, 257, 10, 2, 0, 0, 257, 258, 7, 1, 0, 0, 258, 263, 3, 18, 9, 3, 259, 260, 10, 1, 0, 0, 260, 261, 7, 0, 0, 0, 261, 263, 3, 18, 9, 2, 262, 256, 1, 0, 0, 0, 262, 259, 1, 0, 0, 0, 263, 266, 1, 0, 0, 0, 264, 262, 1, 0, 0, 0, 264, 265, 1, 0, 0, 0, 265, 19, 1, 0, 0, 0, 266, 264, 1, 0, 0, 0, 267, 268, 6, 10, -1, 0, 268, 276, 3, 68, 34, 0, 269, 276, 3, 58, 29, 0, 270, 276, 3, 22, 11, 0, 271, 272, 5, 48, 0, 0, 272, 273, 3, 10, 5, 0, 273, 274, 5, 55, 0, 0, 274, 276, 1, 0, 0, 0, 275, 267, 1, 0, 0, 0, 275, 269, 1, 0, 0, 0, 275, 270, 1, 0, 0, 0, 275, 271, 1, 0, 0, 0, 276, 282, 1, 0, 0, 0, 277, 278, 10, 1, 0, 0, 278, 279, 5, 37, 0, 0, 279, 281, 3, 30, 15, 0, 280, 277, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 282, 283, 1, 0, 0, 0, 283, 21, 1, 0, 0, 0, 284, 282, 1, 0, 0, 0, 285, 286, 3, 24, 12, 0, 286, 300, 5, 48, 0, 0, 287, 301, 5, 66, 0, 0, 288, 293, 3, 10, 5, 0, 289, 290, 5, 39, 0, 0, 290, 292, 3, 10, 5, 0, 291, 289, 1, 0, 0, 0, 292, 295, 1, 0, 0, 0, 293, 291, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 298, 1, 0, 0, 0, 295, 293, 1, 0, 0, 0, 296, 297, 5, 39, 0, 0, 297, 299, 3, 26, 13, 0, 298, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 301, 1, 0, 0, 0, 300, 287, 1, 0, 0, 0, 300, 288, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 303, 5, 55, 0, 0, 303, 23, 1, 0, 0, 0, 304, 305, 3, 72, 36, 0, 305, 25, 1, 0, 0, 0, 306, 307, 5, 69, 0, 0, 307, 312, 3, 28, 14, 0, 308, 309, 5, 39, 0, 0, 309, 311, 3, 28, 14, 0, 310, 308, 1, 0, 0, 0, 311, 314, 1, 0, 0, 0, 312, 310, 1, 0, 0, 0, 312, 313, 1, 0, 0, 0, 313, 315, 1, 0, 0, 0, 314, 312, 1, 0, 0, 0, 315, 316, 5, 70, 0, 0, 316, 27, 1, 0, 0, 0, 317, 318, 3, 106, 53, 0, 318, 319, 5, 38, 0, 0, 319, 320, 3, 68, 34, 0, 320, 29, 1, 0, 0, 0, 321, 322, 3, 64, 32, 0, 322, 31, 1, 0, 0, 0, 323, 324, 5, 12, 0, 0, 324, 325, 3, 34, 17, 0, 325, 33, 1, 0, 0, 0, 326, 331, 3, 36, 18, 0, 327, 328, 5, 39, 0, 0, 328, 330, 3, 36, 18, 0, 329, 327, 1, 0, 0, 0, 330, 333, 1, 0, 0, 0, 331, 329, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 35, 1, 0, 0, 0, 333, 331, 1, 0, 0, 0, 334, 335, 3, 58, 29, 0, 335, 336, 5, 36, 0, 0, 336, 338, 1, 0, 0, 0, 337, 334, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 339, 1, 0, 0, 0, 339, 340, 3, 10, 5, 0, 340, 37, 1, 0, 0, 0, 341, 342, 5, 6, 0, 0, 342, 347, 3, 40, 20, 0, 343, 344, 5, 39, 0, 0, 344, 346, 3, 40, 20, 0, 345, 343, 1, 0, 0, 0, 346, 349, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 347, 348, 1, 0, 0, 0, 348, 351, 1, 0, 0, 0, 349, 347, 1, 0, 0, 0, 350, 352, 3, 46, 23, 0, 351, 350, 1, 0, 0, 0, 351, 352, 1, 0, 0, 0, 352, 39, 1, 0, 0, 0, 353, 354, 3, 42, 21, 0, 354, 355, 5, 38, 0, 0, 355, 357, 1, 0, 0, 0, 356, 353, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 359, 3, 44, 22, 0, 359, 41, 1, 0, 0, 0, 360, 361, 5, 83, 0, 0, 361, 43, 1, 0, 0, 0, 362, 363, 7, 2, 0, 0, 363, 45, 1, 0, 0, 0, 364, 365, 5, 82, 0, 0, 365, 370, 5, 83, 0, 0, 366, 367, 5, 39, 0, 0, 367, 369, 5, 83, 0, 0, 368, 366, 1, 0, 0, 0, 369, 372, 1, 0, 0, 0, 370, 368, 1, 0, 0, 0, 370, 371, 1, 0, 0, 0, 371, 47, 1, 0, 0, 0, 372, 370, 1, 0, 0, 0, 373, 374, 5, 19, 0, 0, 374, 379, 3, 40, 20, 0, 375, 376, 5, 39, 0, 0, 376, 378, 3, 40, 20, 0, 377, 375, 1, 0, 0, 0, 378, 381, 1, 0, 0, 0, 379, 377, 1, 0, 0, 0, 379, 380, 1, 0, 0, 0, 380, 383, 1, 0, 0, 0, 381, 379, 1, 0, 0, 0, 382, 384, 3, 54, 27, 0, 383, 382, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 387, 1, 0, 0, 0, 385, 386, 5, 33, 0, 0, 386, 388, 3, 34, 17, 0, 387, 385, 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 49, 1, 0, 0, 0, 389, 390, 5, 4, 0, 0, 390, 391, 3, 34, 17, 0, 391, 51, 1, 0, 0, 0, 392, 394, 5, 15, 0, 0, 393, 395, 3, 54, 27, 0, 394, 393, 1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 398, 1, 0, 0, 0, 396, 397, 5, 33, 0, 0, 397, 399, 3, 34, 17, 0, 398, 396, 1, 0, 0, 0, 398, 399, 1, 0, 0, 0, 399, 53, 1, 0, 0, 0, 400, 405, 3, 56, 28, 0, 401, 402, 5, 39, 0, 0, 402, 404, 3, 56, 28, 0, 403, 401, 1, 0, 0, 0, 404, 407, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 405, 406, 1, 0, 0, 0, 406, 55, 1, 0, 0, 0, 407, 405, 1, 0, 0, 0, 408, 411, 3, 36, 18, 0, 409, 410, 5, 16, 0, 0, 410, 412, 3, 10, 5, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 57, 1, 0, 0, 0, 413, 418, 3, 72, 36, 0, 414, 415, 5, 41, 0, 0, 415, 417, 3, 72, 36, 0, 416, 414, 1, 0, 0, 0, 417, 420, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 59, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 426, 3, 66, 33, 0, 422, 423, 5, 41, 0, 0, 423, 425, 3, 66, 33, 0, 424, 422, 1, 0, 0, 0, 425, 428, 1, 0, 0, 0, 426, 424, 1, 0, 0, 0, 426, 427, 1, 0, 0, 0, 427, 61, 1, 0, 0, 0, 428, 426, 1, 0, 0, 0, 429, 434, 3, 60, 30, 0, 430, 431, 5, 39, 0, 0, 431, 433, 3, 60, 30, 0, 432, 430, 1, 0, 0, 0, 433, 436, 1, 0, 0, 0, 434, 432, 1, 0, 0, 0, 434, 435, 1, 0, 0, 0, 435, 63, 1, 0, 0, 0, 436, 434, 1, 0, 0, 0, 437, 438, 7, 3, 0, 0, 438, 65, 1, 0, 0, 0, 439, 443, 5, 87, 0, 0, 440, 441, 4, 33, 10, 0, 441, 443, 3, 70, 35, 0, 442, 439, 1, 0, 0, 0, 442, 440, 1, 0, 0, 0, 443, 67, 1, 0, 0, 0, 444, 487, 5, 50, 0, 0, 445, 446, 3, 104, 52, 0, 446, 447, 5, 74, 0, 0, 447, 487, 1, 0, 0, 0, 448, 487, 3, 102, 51, 0, 449, 487, 3, 104, 52, 0, 450, 487, 3, 98, 49, 0, 451, 487, 3, 70, 35, 0, 452, 487, 3, 106, 53, 0, 453, 454, 5, 72, 0, 0, 454, 459, 3, 100, 50, 0, 455, 456, 5, 39, 0, 0, 456, 458, 3, 100, 50, 0, 457, 455, 1, 0, 0, 0, 458, 461, 1, 0, 0, 0, 459, 457, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 462, 1, 0, 0, 0, 461, 459, 1, 0, 0, 0, 462, 463, 5, 73, 0, 0, 463, 487, 1, 0, 0, 0, 464, 465, 5, 72, 0, 0, 465, 470, 3, 98, 49, 0, 466, 467, 5, 39, 0, 0, 467, 469, 3, 98, 49, 0, 468, 466, 1, 0, 0, 0, 469, 472, 1, 0, 0, 0, 470, 468, 1, 0, 0, 0, 470, 471, 1, 0, 0, 0, 471, 473, 1, 0, 0, 0, 472, 470, 1, 0, 0, 0, 473, 474, 5, 73, 0, 0, 474, 487, 1, 0, 0, 0, 475, 476, 5, 72, 0, 0, 476, 481, 3, 106, 53, 0, 477, 478, 5, 39, 0, 0, 478, 480, 3, 106, 53, 0, 479, 477, 1, 0, 0, 0, 480, 483, 1, 0, 0, 0, 481, 479, 1, 0, 0, 0, 481, 482, 1, 0, 0, 0, 482, 484, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 484, 485, 5, 73, 0, 0, 485, 487, 1, 0, 0, 0, 486, 444, 1, 0, 0, 0, 486, 445, 1, 0, 0, 0, 486, 448, 1, 0, 0, 0, 486, 449, 1, 0, 0, 0, 486, 450, 1, 0, 0, 0, 486, 451, 1, 0, 0, 0, 486, 452, 1, 0, 0, 0, 486, 453, 1, 0, 0, 0, 486, 464, 1, 0, 0, 0, 486, 475, 1, 0, 0, 0, 487, 69, 1, 0, 0, 0, 488, 491, 5, 53, 0, 0, 489, 491, 5, 71, 0, 0, 490, 488, 1, 0, 0, 0, 490, 489, 1, 0, 0, 0, 491, 71, 1, 0, 0, 0, 492, 496, 3, 64, 32, 0, 493, 494, 4, 36, 11, 0, 494, 496, 3, 70, 35, 0, 495, 492, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 496, 73, 1, 0, 0, 0, 497, 498, 5, 9, 0, 0, 498, 499, 5, 31, 0, 0, 499, 75, 1, 0, 0, 0, 500, 501, 5, 14, 0, 0, 501, 506, 3, 78, 39, 0, 502, 503, 5, 39, 0, 0, 503, 505, 3, 78, 39, 0, 504, 502, 1, 0, 0, 0, 505, 508, 1, 0, 0, 0, 506, 504, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 77, 1, 0, 0, 0, 508, 506, 1, 0, 0, 0, 509, 511, 3, 10, 5, 0, 510, 512, 7, 4, 0, 0, 511, 510, 1, 0, 0, 0, 511, 512, 1, 0, 0, 0, 512, 515, 1, 0, 0, 0, 513, 514, 5, 51, 0, 0, 514, 516, 7, 5, 0, 0, 515, 513, 1, 0, 0, 0, 515, 516, 1, 0, 0, 0, 516, 79, 1, 0, 0, 0, 517, 518, 5, 8, 0, 0, 518, 519, 3, 62, 31, 0, 519, 81, 1, 0, 0, 0, 520, 521, 5, 2, 0, 0, 521, 522, 3, 62, 31, 0, 522, 83, 1, 0, 0, 0, 523, 524, 5, 11, 0, 0, 524, 529, 3, 86, 43, 0, 525, 526, 5, 39, 0, 0, 526, 528, 3, 86, 43, 0, 527, 525, 1, 0, 0, 0, 528, 531, 1, 0, 0, 0, 529, 527, 1, 0, 0, 0, 529, 530, 1, 0, 0, 0, 530, 85, 1, 0, 0, 0, 531, 529, 1, 0, 0, 0, 532, 533, 3, 60, 30, 0, 533, 534, 5, 91, 0, 0, 534, 535, 3, 60, 30, 0, 535, 87, 1, 0, 0, 0, 536, 537, 5, 1, 0, 0, 537, 538, 3, 20, 10, 0, 538, 540, 3, 106, 53, 0, 539, 541, 3, 94, 47, 0, 540, 539, 1, 0, 0, 0, 540, 541, 1, 0, 0, 0, 541, 89, 1, 0, 0, 0, 542, 543, 5, 7, 0, 0, 543, 544, 3, 20, 10, 0, 544, 545, 3, 106, 53, 0, 545, 91, 1, 0, 0, 0, 546, 547, 5, 10, 0, 0, 547, 548, 3, 58, 29, 0, 548, 93, 1, 0, 0, 0, 549, 554, 3, 96, 48, 0, 550, 551, 5, 39, 0, 0, 551, 553, 3, 96, 48, 0, 552, 550, 1, 0, 0, 0, 553, 556, 1, 0, 0, 0, 554, 552, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 95, 1, 0, 0, 0, 556, 554, 1, 0, 0, 0, 557, 558, 3, 64, 32, 0, 558, 559, 5, 36, 0, 0, 559, 560, 3, 68, 34, 0, 560, 97, 1, 0, 0, 0, 561, 562, 7, 6, 0, 0, 562, 99, 1, 0, 0, 0, 563, 566, 3, 102, 51, 0, 564, 566, 3, 104, 52, 0, 565, 563, 1, 0, 0, 0, 565, 564, 1, 0, 0, 0, 566, 101, 1, 0, 0, 0, 567, 569, 7, 0, 0, 0, 568, 567, 1, 0, 0, 0, 568, 569, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 571, 5, 32, 0, 0, 571, 103, 1, 0, 0, 0, 572, 574, 7, 0, 0, 0, 573, 572, 1, 0, 0, 0, 573, 574, 1, 0, 0, 0, 574, 575, 1, 0, 0, 0, 575, 576, 5, 31, 0, 0, 576, 105, 1, 0, 0, 0, 577, 578, 5, 30, 0, 0, 578, 107, 1, 0, 0, 0, 579, 580, 7, 7, 0, 0, 580, 109, 1, 0, 0, 0, 581, 582, 5, 5, 0, 0, 582, 583, 3, 112, 56, 0, 583, 111, 1, 0, 0, 0, 584, 585, 5, 72, 0, 0, 585, 586, 3, 2, 1, 0, 586, 587, 5, 73, 0, 0, 587, 113, 1, 0, 0, 0, 588, 589, 5, 13, 0, 0, 589, 590, 5, 107, 0, 0, 590, 115, 1, 0, 0, 0, 591, 592, 5, 3, 0, 0, 592, 595, 5, 97, 0, 0, 593, 594, 5, 95, 0, 0, 594, 596, 3, 60, 30, 0, 595, 593, 1, 0, 0, 0, 595, 596, 1, 0, 0, 0, 596, 606, 1, 0, 0, 0, 597, 598, 5, 96, 0, 0, 598, 603, 3, 118, 59, 0, 599, 600, 5, 39, 0, 0, 600, 602, 3, 118, 59, 0, 601, 599, 1, 0, 0, 0, 602, 605, 1, 0, 0, 0, 603, 601, 1, 0, 0, 0, 603, 604, 1, 0, 0, 0, 604, 607, 1, 0, 0, 0, 605, 603, 1, 0, 0, 0, 606, 597, 1, 0, 0, 0, 606, 607, 1, 0, 0, 0, 607, 117, 1, 0, 0, 0, 608, 609, 3, 60, 30, 0, 609, 610, 5, 36, 0, 0, 610, 612, 1, 0, 0, 0, 611, 608, 1, 0, 0, 0, 611, 612, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 3, 60, 30, 0, 614, 119, 1, 0, 0, 0, 615, 616, 5, 18, 0, 0, 616, 617, 3, 40, 20, 0, 617, 618, 5, 95, 0, 0, 618, 619, 3, 62, 31, 0, 619, 121, 1, 0, 0, 0, 620, 621, 5, 17, 0, 0, 621, 624, 3, 54, 27, 0, 622, 623, 5, 33, 0, 0, 623, 625, 3, 34, 17, 0, 624, 622, 1, 0, 0, 0, 624, 625, 1, 0, 0, 0, 625, 123, 1, 0, 0, 0, 626, 628, 7, 8, 0, 0, 627, 626, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 630, 5, 20, 0, 0, 630, 631, 3, 126, 63, 0, 631, 632, 3, 128, 64, 0, 632, 125, 1, 0, 0, 0, 633, 636, 3, 40, 20, 0, 634, 635, 5, 91, 0, 0, 635, 637, 3, 64, 32, 0, 636, 634, 1, 0, 0, 0, 636, 637, 1, 0, 0, 0, 637, 127, 1, 0, 0, 0, 638, 639, 5, 95, 0, 0, 639, 644, 3, 130, 65, 0, 640, 641, 5, 39, 0, 0, 641, 643, 3, 130, 65, 0, 642, 640, 1, 0, 0, 0, 643, 646, 1, 0, 0, 0, 644, 642, 1, 0, 0, 0, 644, 645, 1, 0, 0, 0, 645, 129, 1, 0, 0, 0, 646, 644, 1, 0, 0, 0, 647, 648, 3, 16, 8, 0, 648, 131, 1, 0, 0, 0, 63, 143, 152, 172, 184, 193, 201, 206, 214, 216, 221, 228, 233, 238, 248, 254, 262, 264, 275, 282, 293, 298, 300, 312, 331, 337, 347, 351, 356, 370, 379, 383, 387, 394, 398, 405, 411, 418, 426, 434, 442, 459, 470, 481, 486, 490, 495, 506, 511, 515, 529, 540, 554, 565, 568, 573, 595, 603, 606, 611, 624, 627, 636, 644] \ No newline at end of file +[4, 1, 134, 659, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 144, 8, 1, 10, 1, 12, 1, 147, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 155, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 176, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 188, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 195, 8, 5, 10, 5, 12, 5, 198, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 205, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 210, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 218, 8, 5, 10, 5, 12, 5, 221, 9, 5, 1, 6, 1, 6, 3, 6, 225, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 232, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 237, 8, 6, 1, 7, 1, 7, 1, 7, 3, 7, 242, 8, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 252, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 258, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 266, 8, 9, 10, 9, 12, 9, 269, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 279, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 284, 8, 10, 10, 10, 12, 10, 287, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 295, 8, 11, 10, 11, 12, 11, 298, 9, 11, 1, 11, 1, 11, 3, 11, 302, 8, 11, 3, 11, 304, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 314, 8, 13, 10, 13, 12, 13, 317, 9, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 5, 17, 333, 8, 17, 10, 17, 12, 17, 336, 9, 17, 1, 18, 1, 18, 1, 18, 3, 18, 341, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 349, 8, 19, 10, 19, 12, 19, 352, 9, 19, 1, 19, 3, 19, 355, 8, 19, 1, 20, 1, 20, 1, 20, 3, 20, 360, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 372, 8, 23, 10, 23, 12, 23, 375, 9, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 381, 8, 24, 10, 24, 12, 24, 384, 9, 24, 1, 24, 3, 24, 387, 8, 24, 1, 24, 1, 24, 3, 24, 391, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 398, 8, 26, 1, 26, 1, 26, 3, 26, 402, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 407, 8, 27, 10, 27, 12, 27, 410, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 415, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 420, 8, 29, 10, 29, 12, 29, 423, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 428, 8, 30, 10, 30, 12, 30, 431, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 436, 8, 31, 10, 31, 12, 31, 439, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 445, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 460, 8, 34, 10, 34, 12, 34, 463, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 471, 8, 34, 10, 34, 12, 34, 474, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 482, 8, 34, 10, 34, 12, 34, 485, 9, 34, 1, 34, 1, 34, 3, 34, 489, 8, 34, 1, 35, 1, 35, 3, 35, 493, 8, 35, 1, 36, 1, 36, 3, 36, 497, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 506, 8, 38, 10, 38, 12, 38, 509, 9, 38, 1, 39, 1, 39, 3, 39, 513, 8, 39, 1, 39, 1, 39, 3, 39, 517, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 529, 8, 42, 10, 42, 12, 42, 532, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 542, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 554, 8, 47, 10, 47, 12, 47, 557, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 567, 8, 50, 1, 51, 3, 51, 570, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 575, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 597, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 603, 8, 58, 10, 58, 12, 58, 606, 9, 58, 3, 58, 608, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 613, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 626, 8, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 639, 8, 64, 10, 64, 12, 64, 642, 9, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 66, 3, 66, 650, 8, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 3, 66, 657, 8, 66, 1, 66, 0, 4, 2, 10, 18, 20, 67, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 30, 30, 83, 83, 1, 0, 74, 75, 2, 0, 35, 35, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 2, 0, 17, 17, 23, 24, 686, 0, 134, 1, 0, 0, 0, 2, 137, 1, 0, 0, 0, 4, 154, 1, 0, 0, 0, 6, 175, 1, 0, 0, 0, 8, 177, 1, 0, 0, 0, 10, 209, 1, 0, 0, 0, 12, 236, 1, 0, 0, 0, 14, 238, 1, 0, 0, 0, 16, 251, 1, 0, 0, 0, 18, 257, 1, 0, 0, 0, 20, 278, 1, 0, 0, 0, 22, 288, 1, 0, 0, 0, 24, 307, 1, 0, 0, 0, 26, 309, 1, 0, 0, 0, 28, 320, 1, 0, 0, 0, 30, 324, 1, 0, 0, 0, 32, 326, 1, 0, 0, 0, 34, 329, 1, 0, 0, 0, 36, 340, 1, 0, 0, 0, 38, 344, 1, 0, 0, 0, 40, 359, 1, 0, 0, 0, 42, 363, 1, 0, 0, 0, 44, 365, 1, 0, 0, 0, 46, 367, 1, 0, 0, 0, 48, 376, 1, 0, 0, 0, 50, 392, 1, 0, 0, 0, 52, 395, 1, 0, 0, 0, 54, 403, 1, 0, 0, 0, 56, 411, 1, 0, 0, 0, 58, 416, 1, 0, 0, 0, 60, 424, 1, 0, 0, 0, 62, 432, 1, 0, 0, 0, 64, 440, 1, 0, 0, 0, 66, 444, 1, 0, 0, 0, 68, 488, 1, 0, 0, 0, 70, 492, 1, 0, 0, 0, 72, 496, 1, 0, 0, 0, 74, 498, 1, 0, 0, 0, 76, 501, 1, 0, 0, 0, 78, 510, 1, 0, 0, 0, 80, 518, 1, 0, 0, 0, 82, 521, 1, 0, 0, 0, 84, 524, 1, 0, 0, 0, 86, 533, 1, 0, 0, 0, 88, 537, 1, 0, 0, 0, 90, 543, 1, 0, 0, 0, 92, 547, 1, 0, 0, 0, 94, 550, 1, 0, 0, 0, 96, 558, 1, 0, 0, 0, 98, 562, 1, 0, 0, 0, 100, 566, 1, 0, 0, 0, 102, 569, 1, 0, 0, 0, 104, 574, 1, 0, 0, 0, 106, 578, 1, 0, 0, 0, 108, 580, 1, 0, 0, 0, 110, 582, 1, 0, 0, 0, 112, 585, 1, 0, 0, 0, 114, 589, 1, 0, 0, 0, 116, 592, 1, 0, 0, 0, 118, 612, 1, 0, 0, 0, 120, 616, 1, 0, 0, 0, 122, 621, 1, 0, 0, 0, 124, 627, 1, 0, 0, 0, 126, 632, 1, 0, 0, 0, 128, 634, 1, 0, 0, 0, 130, 643, 1, 0, 0, 0, 132, 645, 1, 0, 0, 0, 134, 135, 3, 2, 1, 0, 135, 136, 5, 0, 0, 1, 136, 1, 1, 0, 0, 0, 137, 138, 6, 1, -1, 0, 138, 139, 3, 4, 2, 0, 139, 145, 1, 0, 0, 0, 140, 141, 10, 1, 0, 0, 141, 142, 5, 29, 0, 0, 142, 144, 3, 6, 3, 0, 143, 140, 1, 0, 0, 0, 144, 147, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 145, 146, 1, 0, 0, 0, 146, 3, 1, 0, 0, 0, 147, 145, 1, 0, 0, 0, 148, 155, 3, 110, 55, 0, 149, 155, 3, 38, 19, 0, 150, 155, 3, 32, 16, 0, 151, 155, 3, 114, 57, 0, 152, 153, 4, 2, 1, 0, 153, 155, 3, 48, 24, 0, 154, 148, 1, 0, 0, 0, 154, 149, 1, 0, 0, 0, 154, 150, 1, 0, 0, 0, 154, 151, 1, 0, 0, 0, 154, 152, 1, 0, 0, 0, 155, 5, 1, 0, 0, 0, 156, 176, 3, 50, 25, 0, 157, 176, 3, 8, 4, 0, 158, 176, 3, 80, 40, 0, 159, 176, 3, 74, 37, 0, 160, 176, 3, 52, 26, 0, 161, 176, 3, 76, 38, 0, 162, 176, 3, 82, 41, 0, 163, 176, 3, 84, 42, 0, 164, 176, 3, 88, 44, 0, 165, 176, 3, 90, 45, 0, 166, 176, 3, 116, 58, 0, 167, 176, 3, 92, 46, 0, 168, 176, 3, 124, 62, 0, 169, 170, 4, 3, 2, 0, 170, 176, 3, 122, 61, 0, 171, 172, 4, 3, 3, 0, 172, 176, 3, 120, 60, 0, 173, 174, 4, 3, 4, 0, 174, 176, 3, 132, 66, 0, 175, 156, 1, 0, 0, 0, 175, 157, 1, 0, 0, 0, 175, 158, 1, 0, 0, 0, 175, 159, 1, 0, 0, 0, 175, 160, 1, 0, 0, 0, 175, 161, 1, 0, 0, 0, 175, 162, 1, 0, 0, 0, 175, 163, 1, 0, 0, 0, 175, 164, 1, 0, 0, 0, 175, 165, 1, 0, 0, 0, 175, 166, 1, 0, 0, 0, 175, 167, 1, 0, 0, 0, 175, 168, 1, 0, 0, 0, 175, 169, 1, 0, 0, 0, 175, 171, 1, 0, 0, 0, 175, 173, 1, 0, 0, 0, 176, 7, 1, 0, 0, 0, 177, 178, 5, 16, 0, 0, 178, 179, 3, 10, 5, 0, 179, 9, 1, 0, 0, 0, 180, 181, 6, 5, -1, 0, 181, 182, 5, 49, 0, 0, 182, 210, 3, 10, 5, 8, 183, 210, 3, 16, 8, 0, 184, 210, 3, 12, 6, 0, 185, 187, 3, 16, 8, 0, 186, 188, 5, 49, 0, 0, 187, 186, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 189, 1, 0, 0, 0, 189, 190, 5, 44, 0, 0, 190, 191, 5, 48, 0, 0, 191, 196, 3, 16, 8, 0, 192, 193, 5, 39, 0, 0, 193, 195, 3, 16, 8, 0, 194, 192, 1, 0, 0, 0, 195, 198, 1, 0, 0, 0, 196, 194, 1, 0, 0, 0, 196, 197, 1, 0, 0, 0, 197, 199, 1, 0, 0, 0, 198, 196, 1, 0, 0, 0, 199, 200, 5, 55, 0, 0, 200, 210, 1, 0, 0, 0, 201, 202, 3, 16, 8, 0, 202, 204, 5, 45, 0, 0, 203, 205, 5, 49, 0, 0, 204, 203, 1, 0, 0, 0, 204, 205, 1, 0, 0, 0, 205, 206, 1, 0, 0, 0, 206, 207, 5, 50, 0, 0, 207, 210, 1, 0, 0, 0, 208, 210, 3, 14, 7, 0, 209, 180, 1, 0, 0, 0, 209, 183, 1, 0, 0, 0, 209, 184, 1, 0, 0, 0, 209, 185, 1, 0, 0, 0, 209, 201, 1, 0, 0, 0, 209, 208, 1, 0, 0, 0, 210, 219, 1, 0, 0, 0, 211, 212, 10, 5, 0, 0, 212, 213, 5, 34, 0, 0, 213, 218, 3, 10, 5, 6, 214, 215, 10, 4, 0, 0, 215, 216, 5, 52, 0, 0, 216, 218, 3, 10, 5, 5, 217, 211, 1, 0, 0, 0, 217, 214, 1, 0, 0, 0, 218, 221, 1, 0, 0, 0, 219, 217, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 11, 1, 0, 0, 0, 221, 219, 1, 0, 0, 0, 222, 224, 3, 16, 8, 0, 223, 225, 5, 49, 0, 0, 224, 223, 1, 0, 0, 0, 224, 225, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 227, 5, 47, 0, 0, 227, 228, 3, 106, 53, 0, 228, 237, 1, 0, 0, 0, 229, 231, 3, 16, 8, 0, 230, 232, 5, 49, 0, 0, 231, 230, 1, 0, 0, 0, 231, 232, 1, 0, 0, 0, 232, 233, 1, 0, 0, 0, 233, 234, 5, 54, 0, 0, 234, 235, 3, 106, 53, 0, 235, 237, 1, 0, 0, 0, 236, 222, 1, 0, 0, 0, 236, 229, 1, 0, 0, 0, 237, 13, 1, 0, 0, 0, 238, 241, 3, 58, 29, 0, 239, 240, 5, 37, 0, 0, 240, 242, 3, 30, 15, 0, 241, 239, 1, 0, 0, 0, 241, 242, 1, 0, 0, 0, 242, 243, 1, 0, 0, 0, 243, 244, 5, 38, 0, 0, 244, 245, 3, 68, 34, 0, 245, 15, 1, 0, 0, 0, 246, 252, 3, 18, 9, 0, 247, 248, 3, 18, 9, 0, 248, 249, 3, 108, 54, 0, 249, 250, 3, 18, 9, 0, 250, 252, 1, 0, 0, 0, 251, 246, 1, 0, 0, 0, 251, 247, 1, 0, 0, 0, 252, 17, 1, 0, 0, 0, 253, 254, 6, 9, -1, 0, 254, 258, 3, 20, 10, 0, 255, 256, 7, 0, 0, 0, 256, 258, 3, 18, 9, 3, 257, 253, 1, 0, 0, 0, 257, 255, 1, 0, 0, 0, 258, 267, 1, 0, 0, 0, 259, 260, 10, 2, 0, 0, 260, 261, 7, 1, 0, 0, 261, 266, 3, 18, 9, 3, 262, 263, 10, 1, 0, 0, 263, 264, 7, 0, 0, 0, 264, 266, 3, 18, 9, 2, 265, 259, 1, 0, 0, 0, 265, 262, 1, 0, 0, 0, 266, 269, 1, 0, 0, 0, 267, 265, 1, 0, 0, 0, 267, 268, 1, 0, 0, 0, 268, 19, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 270, 271, 6, 10, -1, 0, 271, 279, 3, 68, 34, 0, 272, 279, 3, 58, 29, 0, 273, 279, 3, 22, 11, 0, 274, 275, 5, 48, 0, 0, 275, 276, 3, 10, 5, 0, 276, 277, 5, 55, 0, 0, 277, 279, 1, 0, 0, 0, 278, 270, 1, 0, 0, 0, 278, 272, 1, 0, 0, 0, 278, 273, 1, 0, 0, 0, 278, 274, 1, 0, 0, 0, 279, 285, 1, 0, 0, 0, 280, 281, 10, 1, 0, 0, 281, 282, 5, 37, 0, 0, 282, 284, 3, 30, 15, 0, 283, 280, 1, 0, 0, 0, 284, 287, 1, 0, 0, 0, 285, 283, 1, 0, 0, 0, 285, 286, 1, 0, 0, 0, 286, 21, 1, 0, 0, 0, 287, 285, 1, 0, 0, 0, 288, 289, 3, 24, 12, 0, 289, 303, 5, 48, 0, 0, 290, 304, 5, 66, 0, 0, 291, 296, 3, 10, 5, 0, 292, 293, 5, 39, 0, 0, 293, 295, 3, 10, 5, 0, 294, 292, 1, 0, 0, 0, 295, 298, 1, 0, 0, 0, 296, 294, 1, 0, 0, 0, 296, 297, 1, 0, 0, 0, 297, 301, 1, 0, 0, 0, 298, 296, 1, 0, 0, 0, 299, 300, 5, 39, 0, 0, 300, 302, 3, 26, 13, 0, 301, 299, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 304, 1, 0, 0, 0, 303, 290, 1, 0, 0, 0, 303, 291, 1, 0, 0, 0, 303, 304, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 306, 5, 55, 0, 0, 306, 23, 1, 0, 0, 0, 307, 308, 3, 72, 36, 0, 308, 25, 1, 0, 0, 0, 309, 310, 5, 69, 0, 0, 310, 315, 3, 28, 14, 0, 311, 312, 5, 39, 0, 0, 312, 314, 3, 28, 14, 0, 313, 311, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 318, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 318, 319, 5, 70, 0, 0, 319, 27, 1, 0, 0, 0, 320, 321, 3, 106, 53, 0, 321, 322, 5, 38, 0, 0, 322, 323, 3, 68, 34, 0, 323, 29, 1, 0, 0, 0, 324, 325, 3, 64, 32, 0, 325, 31, 1, 0, 0, 0, 326, 327, 5, 12, 0, 0, 327, 328, 3, 34, 17, 0, 328, 33, 1, 0, 0, 0, 329, 334, 3, 36, 18, 0, 330, 331, 5, 39, 0, 0, 331, 333, 3, 36, 18, 0, 332, 330, 1, 0, 0, 0, 333, 336, 1, 0, 0, 0, 334, 332, 1, 0, 0, 0, 334, 335, 1, 0, 0, 0, 335, 35, 1, 0, 0, 0, 336, 334, 1, 0, 0, 0, 337, 338, 3, 58, 29, 0, 338, 339, 5, 36, 0, 0, 339, 341, 1, 0, 0, 0, 340, 337, 1, 0, 0, 0, 340, 341, 1, 0, 0, 0, 341, 342, 1, 0, 0, 0, 342, 343, 3, 10, 5, 0, 343, 37, 1, 0, 0, 0, 344, 345, 5, 6, 0, 0, 345, 350, 3, 40, 20, 0, 346, 347, 5, 39, 0, 0, 347, 349, 3, 40, 20, 0, 348, 346, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 354, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 355, 3, 46, 23, 0, 354, 353, 1, 0, 0, 0, 354, 355, 1, 0, 0, 0, 355, 39, 1, 0, 0, 0, 356, 357, 3, 42, 21, 0, 357, 358, 5, 38, 0, 0, 358, 360, 1, 0, 0, 0, 359, 356, 1, 0, 0, 0, 359, 360, 1, 0, 0, 0, 360, 361, 1, 0, 0, 0, 361, 362, 3, 44, 22, 0, 362, 41, 1, 0, 0, 0, 363, 364, 7, 2, 0, 0, 364, 43, 1, 0, 0, 0, 365, 366, 7, 2, 0, 0, 366, 45, 1, 0, 0, 0, 367, 368, 5, 82, 0, 0, 368, 373, 5, 83, 0, 0, 369, 370, 5, 39, 0, 0, 370, 372, 5, 83, 0, 0, 371, 369, 1, 0, 0, 0, 372, 375, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 373, 374, 1, 0, 0, 0, 374, 47, 1, 0, 0, 0, 375, 373, 1, 0, 0, 0, 376, 377, 5, 21, 0, 0, 377, 382, 3, 40, 20, 0, 378, 379, 5, 39, 0, 0, 379, 381, 3, 40, 20, 0, 380, 378, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 382, 383, 1, 0, 0, 0, 383, 386, 1, 0, 0, 0, 384, 382, 1, 0, 0, 0, 385, 387, 3, 54, 27, 0, 386, 385, 1, 0, 0, 0, 386, 387, 1, 0, 0, 0, 387, 390, 1, 0, 0, 0, 388, 389, 5, 33, 0, 0, 389, 391, 3, 34, 17, 0, 390, 388, 1, 0, 0, 0, 390, 391, 1, 0, 0, 0, 391, 49, 1, 0, 0, 0, 392, 393, 5, 4, 0, 0, 393, 394, 3, 34, 17, 0, 394, 51, 1, 0, 0, 0, 395, 397, 5, 15, 0, 0, 396, 398, 3, 54, 27, 0, 397, 396, 1, 0, 0, 0, 397, 398, 1, 0, 0, 0, 398, 401, 1, 0, 0, 0, 399, 400, 5, 33, 0, 0, 400, 402, 3, 34, 17, 0, 401, 399, 1, 0, 0, 0, 401, 402, 1, 0, 0, 0, 402, 53, 1, 0, 0, 0, 403, 408, 3, 56, 28, 0, 404, 405, 5, 39, 0, 0, 405, 407, 3, 56, 28, 0, 406, 404, 1, 0, 0, 0, 407, 410, 1, 0, 0, 0, 408, 406, 1, 0, 0, 0, 408, 409, 1, 0, 0, 0, 409, 55, 1, 0, 0, 0, 410, 408, 1, 0, 0, 0, 411, 414, 3, 36, 18, 0, 412, 413, 5, 16, 0, 0, 413, 415, 3, 10, 5, 0, 414, 412, 1, 0, 0, 0, 414, 415, 1, 0, 0, 0, 415, 57, 1, 0, 0, 0, 416, 421, 3, 72, 36, 0, 417, 418, 5, 41, 0, 0, 418, 420, 3, 72, 36, 0, 419, 417, 1, 0, 0, 0, 420, 423, 1, 0, 0, 0, 421, 419, 1, 0, 0, 0, 421, 422, 1, 0, 0, 0, 422, 59, 1, 0, 0, 0, 423, 421, 1, 0, 0, 0, 424, 429, 3, 66, 33, 0, 425, 426, 5, 41, 0, 0, 426, 428, 3, 66, 33, 0, 427, 425, 1, 0, 0, 0, 428, 431, 1, 0, 0, 0, 429, 427, 1, 0, 0, 0, 429, 430, 1, 0, 0, 0, 430, 61, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 432, 437, 3, 60, 30, 0, 433, 434, 5, 39, 0, 0, 434, 436, 3, 60, 30, 0, 435, 433, 1, 0, 0, 0, 436, 439, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 437, 438, 1, 0, 0, 0, 438, 63, 1, 0, 0, 0, 439, 437, 1, 0, 0, 0, 440, 441, 7, 3, 0, 0, 441, 65, 1, 0, 0, 0, 442, 445, 5, 87, 0, 0, 443, 445, 3, 70, 35, 0, 444, 442, 1, 0, 0, 0, 444, 443, 1, 0, 0, 0, 445, 67, 1, 0, 0, 0, 446, 489, 5, 50, 0, 0, 447, 448, 3, 104, 52, 0, 448, 449, 5, 74, 0, 0, 449, 489, 1, 0, 0, 0, 450, 489, 3, 102, 51, 0, 451, 489, 3, 104, 52, 0, 452, 489, 3, 98, 49, 0, 453, 489, 3, 70, 35, 0, 454, 489, 3, 106, 53, 0, 455, 456, 5, 72, 0, 0, 456, 461, 3, 100, 50, 0, 457, 458, 5, 39, 0, 0, 458, 460, 3, 100, 50, 0, 459, 457, 1, 0, 0, 0, 460, 463, 1, 0, 0, 0, 461, 459, 1, 0, 0, 0, 461, 462, 1, 0, 0, 0, 462, 464, 1, 0, 0, 0, 463, 461, 1, 0, 0, 0, 464, 465, 5, 73, 0, 0, 465, 489, 1, 0, 0, 0, 466, 467, 5, 72, 0, 0, 467, 472, 3, 98, 49, 0, 468, 469, 5, 39, 0, 0, 469, 471, 3, 98, 49, 0, 470, 468, 1, 0, 0, 0, 471, 474, 1, 0, 0, 0, 472, 470, 1, 0, 0, 0, 472, 473, 1, 0, 0, 0, 473, 475, 1, 0, 0, 0, 474, 472, 1, 0, 0, 0, 475, 476, 5, 73, 0, 0, 476, 489, 1, 0, 0, 0, 477, 478, 5, 72, 0, 0, 478, 483, 3, 106, 53, 0, 479, 480, 5, 39, 0, 0, 480, 482, 3, 106, 53, 0, 481, 479, 1, 0, 0, 0, 482, 485, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 486, 1, 0, 0, 0, 485, 483, 1, 0, 0, 0, 486, 487, 5, 73, 0, 0, 487, 489, 1, 0, 0, 0, 488, 446, 1, 0, 0, 0, 488, 447, 1, 0, 0, 0, 488, 450, 1, 0, 0, 0, 488, 451, 1, 0, 0, 0, 488, 452, 1, 0, 0, 0, 488, 453, 1, 0, 0, 0, 488, 454, 1, 0, 0, 0, 488, 455, 1, 0, 0, 0, 488, 466, 1, 0, 0, 0, 488, 477, 1, 0, 0, 0, 489, 69, 1, 0, 0, 0, 490, 493, 5, 53, 0, 0, 491, 493, 5, 71, 0, 0, 492, 490, 1, 0, 0, 0, 492, 491, 1, 0, 0, 0, 493, 71, 1, 0, 0, 0, 494, 497, 3, 64, 32, 0, 495, 497, 3, 70, 35, 0, 496, 494, 1, 0, 0, 0, 496, 495, 1, 0, 0, 0, 497, 73, 1, 0, 0, 0, 498, 499, 5, 9, 0, 0, 499, 500, 5, 31, 0, 0, 500, 75, 1, 0, 0, 0, 501, 502, 5, 14, 0, 0, 502, 507, 3, 78, 39, 0, 503, 504, 5, 39, 0, 0, 504, 506, 3, 78, 39, 0, 505, 503, 1, 0, 0, 0, 506, 509, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 508, 77, 1, 0, 0, 0, 509, 507, 1, 0, 0, 0, 510, 512, 3, 10, 5, 0, 511, 513, 7, 4, 0, 0, 512, 511, 1, 0, 0, 0, 512, 513, 1, 0, 0, 0, 513, 516, 1, 0, 0, 0, 514, 515, 5, 51, 0, 0, 515, 517, 7, 5, 0, 0, 516, 514, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 79, 1, 0, 0, 0, 518, 519, 5, 8, 0, 0, 519, 520, 3, 62, 31, 0, 520, 81, 1, 0, 0, 0, 521, 522, 5, 2, 0, 0, 522, 523, 3, 62, 31, 0, 523, 83, 1, 0, 0, 0, 524, 525, 5, 11, 0, 0, 525, 530, 3, 86, 43, 0, 526, 527, 5, 39, 0, 0, 527, 529, 3, 86, 43, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 85, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 534, 3, 60, 30, 0, 534, 535, 5, 91, 0, 0, 535, 536, 3, 60, 30, 0, 536, 87, 1, 0, 0, 0, 537, 538, 5, 1, 0, 0, 538, 539, 3, 20, 10, 0, 539, 541, 3, 106, 53, 0, 540, 542, 3, 94, 47, 0, 541, 540, 1, 0, 0, 0, 541, 542, 1, 0, 0, 0, 542, 89, 1, 0, 0, 0, 543, 544, 5, 7, 0, 0, 544, 545, 3, 20, 10, 0, 545, 546, 3, 106, 53, 0, 546, 91, 1, 0, 0, 0, 547, 548, 5, 10, 0, 0, 548, 549, 3, 58, 29, 0, 549, 93, 1, 0, 0, 0, 550, 555, 3, 96, 48, 0, 551, 552, 5, 39, 0, 0, 552, 554, 3, 96, 48, 0, 553, 551, 1, 0, 0, 0, 554, 557, 1, 0, 0, 0, 555, 553, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 95, 1, 0, 0, 0, 557, 555, 1, 0, 0, 0, 558, 559, 3, 64, 32, 0, 559, 560, 5, 36, 0, 0, 560, 561, 3, 68, 34, 0, 561, 97, 1, 0, 0, 0, 562, 563, 7, 6, 0, 0, 563, 99, 1, 0, 0, 0, 564, 567, 3, 102, 51, 0, 565, 567, 3, 104, 52, 0, 566, 564, 1, 0, 0, 0, 566, 565, 1, 0, 0, 0, 567, 101, 1, 0, 0, 0, 568, 570, 7, 0, 0, 0, 569, 568, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 5, 32, 0, 0, 572, 103, 1, 0, 0, 0, 573, 575, 7, 0, 0, 0, 574, 573, 1, 0, 0, 0, 574, 575, 1, 0, 0, 0, 575, 576, 1, 0, 0, 0, 576, 577, 5, 31, 0, 0, 577, 105, 1, 0, 0, 0, 578, 579, 5, 30, 0, 0, 579, 107, 1, 0, 0, 0, 580, 581, 7, 7, 0, 0, 581, 109, 1, 0, 0, 0, 582, 583, 5, 5, 0, 0, 583, 584, 3, 112, 56, 0, 584, 111, 1, 0, 0, 0, 585, 586, 5, 72, 0, 0, 586, 587, 3, 2, 1, 0, 587, 588, 5, 73, 0, 0, 588, 113, 1, 0, 0, 0, 589, 590, 5, 13, 0, 0, 590, 591, 5, 107, 0, 0, 591, 115, 1, 0, 0, 0, 592, 593, 5, 3, 0, 0, 593, 596, 5, 97, 0, 0, 594, 595, 5, 95, 0, 0, 595, 597, 3, 60, 30, 0, 596, 594, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 607, 1, 0, 0, 0, 598, 599, 5, 96, 0, 0, 599, 604, 3, 118, 59, 0, 600, 601, 5, 39, 0, 0, 601, 603, 3, 118, 59, 0, 602, 600, 1, 0, 0, 0, 603, 606, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 605, 608, 1, 0, 0, 0, 606, 604, 1, 0, 0, 0, 607, 598, 1, 0, 0, 0, 607, 608, 1, 0, 0, 0, 608, 117, 1, 0, 0, 0, 609, 610, 3, 60, 30, 0, 610, 611, 5, 36, 0, 0, 611, 613, 1, 0, 0, 0, 612, 609, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 615, 3, 60, 30, 0, 615, 119, 1, 0, 0, 0, 616, 617, 5, 20, 0, 0, 617, 618, 3, 40, 20, 0, 618, 619, 5, 95, 0, 0, 619, 620, 3, 62, 31, 0, 620, 121, 1, 0, 0, 0, 621, 622, 5, 19, 0, 0, 622, 625, 3, 54, 27, 0, 623, 624, 5, 33, 0, 0, 624, 626, 3, 34, 17, 0, 625, 623, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 123, 1, 0, 0, 0, 627, 628, 7, 8, 0, 0, 628, 629, 5, 121, 0, 0, 629, 630, 3, 126, 63, 0, 630, 631, 3, 128, 64, 0, 631, 125, 1, 0, 0, 0, 632, 633, 3, 40, 20, 0, 633, 127, 1, 0, 0, 0, 634, 635, 5, 95, 0, 0, 635, 640, 3, 130, 65, 0, 636, 637, 5, 39, 0, 0, 637, 639, 3, 130, 65, 0, 638, 636, 1, 0, 0, 0, 639, 642, 1, 0, 0, 0, 640, 638, 1, 0, 0, 0, 640, 641, 1, 0, 0, 0, 641, 129, 1, 0, 0, 0, 642, 640, 1, 0, 0, 0, 643, 644, 3, 16, 8, 0, 644, 131, 1, 0, 0, 0, 645, 646, 5, 18, 0, 0, 646, 649, 3, 58, 29, 0, 647, 648, 5, 95, 0, 0, 648, 650, 3, 58, 29, 0, 649, 647, 1, 0, 0, 0, 649, 650, 1, 0, 0, 0, 650, 656, 1, 0, 0, 0, 651, 652, 5, 91, 0, 0, 652, 653, 3, 58, 29, 0, 653, 654, 5, 39, 0, 0, 654, 655, 3, 58, 29, 0, 655, 657, 1, 0, 0, 0, 656, 651, 1, 0, 0, 0, 656, 657, 1, 0, 0, 0, 657, 133, 1, 0, 0, 0, 63, 145, 154, 175, 187, 196, 204, 209, 217, 219, 224, 231, 236, 241, 251, 257, 265, 267, 278, 285, 296, 301, 303, 315, 334, 340, 350, 354, 359, 373, 382, 386, 390, 397, 401, 408, 414, 421, 429, 437, 444, 461, 472, 483, 488, 492, 496, 507, 512, 516, 530, 541, 555, 566, 569, 574, 596, 604, 607, 612, 625, 640, 649, 656] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 1e0a636d67182..3691f23744088 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -27,8 +27,8 @@ public class EsqlBaseParser extends ParserConfig { public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, DEV_JOIN=20, - DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, DEV_JOIN_LOOKUP=24, + WHERE=16, JOIN_LOOKUP=17, DEV_CHANGE_POINT=18, DEV_INLINESTATS=19, DEV_LOOKUP=20, + DEV_METRICS=21, DEV_JOIN_FULL=22, DEV_JOIN_LEFT=23, DEV_JOIN_RIGHT=24, UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, PIPE=29, QUOTED_STRING=30, INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, ASC=35, ASSIGN=36, CAST_OP=37, COLON=38, COMMA=39, DESC=40, DOT=41, FALSE=42, @@ -48,10 +48,11 @@ public class EsqlBaseParser extends ParserConfig { SHOW_MULTILINE_COMMENT=109, SHOW_WS=110, SETTING=111, SETTING_LINE_COMMENT=112, SETTTING_MULTILINE_COMMENT=113, SETTING_WS=114, LOOKUP_LINE_COMMENT=115, LOOKUP_MULTILINE_COMMENT=116, LOOKUP_WS=117, LOOKUP_FIELD_LINE_COMMENT=118, - LOOKUP_FIELD_MULTILINE_COMMENT=119, LOOKUP_FIELD_WS=120, USING=121, JOIN_LINE_COMMENT=122, - JOIN_MULTILINE_COMMENT=123, JOIN_WS=124, METRICS_LINE_COMMENT=125, METRICS_MULTILINE_COMMENT=126, - METRICS_WS=127, CLOSING_METRICS_LINE_COMMENT=128, CLOSING_METRICS_MULTILINE_COMMENT=129, - CLOSING_METRICS_WS=130; + LOOKUP_FIELD_MULTILINE_COMMENT=119, LOOKUP_FIELD_WS=120, JOIN=121, USING=122, + JOIN_LINE_COMMENT=123, JOIN_MULTILINE_COMMENT=124, JOIN_WS=125, METRICS_LINE_COMMENT=126, + METRICS_MULTILINE_COMMENT=127, METRICS_WS=128, CLOSING_METRICS_LINE_COMMENT=129, + CLOSING_METRICS_MULTILINE_COMMENT=130, CLOSING_METRICS_WS=131, CHANGE_POINT_LINE_COMMENT=132, + CHANGE_POINT_MULTILINE_COMMENT=133, CHANGE_POINT_WS=134; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, @@ -73,7 +74,7 @@ public class EsqlBaseParser extends ParserConfig { RULE_explainCommand = 55, RULE_subqueryExpression = 56, RULE_showCommand = 57, RULE_enrichCommand = 58, RULE_enrichWithClause = 59, RULE_lookupCommand = 60, RULE_inlinestatsCommand = 61, RULE_joinCommand = 62, RULE_joinTarget = 63, - RULE_joinCondition = 64, RULE_joinPredicate = 65; + RULE_joinCondition = 64, RULE_joinPredicate = 65, RULE_changePointCommand = 66; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", @@ -89,7 +90,8 @@ private static String[] makeRuleNames() { "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", "showCommand", "enrichCommand", "enrichWithClause", "lookupCommand", - "inlinestatsCommand", "joinCommand", "joinTarget", "joinCondition", "joinPredicate" + "inlinestatsCommand", "joinCommand", "joinTarget", "joinCondition", "joinPredicate", + "changePointCommand" }; } public static final String[] ruleNames = makeRuleNames(); @@ -98,17 +100,17 @@ private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - null, null, null, null, null, "'|'", null, null, null, "'by'", "'and'", - "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", "'first'", - "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", - "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", - "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'{'", "'}'", - null, null, "']'", null, null, null, null, null, null, null, null, "'metadata'", - null, null, null, null, null, null, null, null, "'as'", null, null, null, - "'on'", "'with'", null, null, null, null, null, null, null, null, null, - null, "'info'", null, null, null, null, null, null, null, null, null, - null, null, null, null, "'USING'" + "'sort'", "'stats'", "'where'", "'lookup'", null, null, null, null, null, + null, null, null, null, null, null, "'|'", null, null, null, "'by'", + "'and'", "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", + "'first'", "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", + "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", + "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", + "'{'", "'}'", null, null, "']'", null, null, null, null, null, null, + null, null, "'metadata'", null, null, null, null, null, null, null, null, + "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, + null, null, null, null, null, "'info'", null, null, null, null, null, + null, null, null, null, null, null, null, null, "'join'", "'USING'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); @@ -116,13 +118,13 @@ private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", - "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", - "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", - "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", - "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "WHERE", "JOIN_LOOKUP", "DEV_CHANGE_POINT", "DEV_INLINESTATS", "DEV_LOOKUP", + "DEV_METRICS", "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "UNKNOWN_CMD", + "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COLON", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", + "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", + "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "LEFT_BRACES", "RIGHT_BRACES", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", @@ -137,9 +139,10 @@ private static String[] makeSymbolicNames() { "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", - "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_LINE_COMMENT", - "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", - "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" + "JOIN", "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", + "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", + "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS", "CHANGE_POINT_LINE_COMMENT", + "CHANGE_POINT_MULTILINE_COMMENT", "CHANGE_POINT_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -226,9 +229,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(132); + setState(134); query(0); - setState(133); + setState(135); match(EOF); } } @@ -324,11 +327,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(136); + setState(138); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(143); + setState(145); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -339,16 +342,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(138); + setState(140); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(139); + setState(141); match(PIPE); - setState(140); + setState(142); processingCommand(); } } } - setState(145); + setState(147); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -406,43 +409,43 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(152); + setState(154); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(146); + setState(148); explainCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(147); + setState(149); fromCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(148); + setState(150); rowCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(149); + setState(151); showCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(150); + setState(152); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(151); + setState(153); metricsCommand(); } break; @@ -497,14 +500,17 @@ public EnrichCommandContext enrichCommand() { public MvExpandCommandContext mvExpandCommand() { return getRuleContext(MvExpandCommandContext.class,0); } + public JoinCommandContext joinCommand() { + return getRuleContext(JoinCommandContext.class,0); + } public InlinestatsCommandContext inlinestatsCommand() { return getRuleContext(InlinestatsCommandContext.class,0); } public LookupCommandContext lookupCommand() { return getRuleContext(LookupCommandContext.class,0); } - public JoinCommandContext joinCommand() { - return getRuleContext(JoinCommandContext.class,0); + public ChangePointCommandContext changePointCommand() { + return getRuleContext(ChangePointCommandContext.class,0); } @SuppressWarnings("this-escape") public ProcessingCommandContext(ParserRuleContext parent, int invokingState) { @@ -530,118 +536,125 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(172); + setState(175); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(154); + setState(156); evalCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(155); + setState(157); whereCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(156); + setState(158); keepCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(157); + setState(159); limitCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(158); + setState(160); statsCommand(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(159); + setState(161); sortCommand(); } break; case 7: enterOuterAlt(_localctx, 7); { - setState(160); + setState(162); dropCommand(); } break; case 8: enterOuterAlt(_localctx, 8); { - setState(161); + setState(163); renameCommand(); } break; case 9: enterOuterAlt(_localctx, 9); { - setState(162); + setState(164); dissectCommand(); } break; case 10: enterOuterAlt(_localctx, 10); { - setState(163); + setState(165); grokCommand(); } break; case 11: enterOuterAlt(_localctx, 11); { - setState(164); + setState(166); enrichCommand(); } break; case 12: enterOuterAlt(_localctx, 12); { - setState(165); + setState(167); mvExpandCommand(); } break; case 13: enterOuterAlt(_localctx, 13); { - setState(166); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(167); - inlinestatsCommand(); + setState(168); + joinCommand(); } break; case 14: enterOuterAlt(_localctx, 14); { - setState(168); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); setState(169); - lookupCommand(); + if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); + setState(170); + inlinestatsCommand(); } break; case 15: enterOuterAlt(_localctx, 15); { - setState(170); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); setState(171); - joinCommand(); + if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); + setState(172); + lookupCommand(); + } + break; + case 16: + enterOuterAlt(_localctx, 16); + { + setState(173); + if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); + setState(174); + changePointCommand(); } break; } @@ -689,9 +702,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(174); + setState(177); match(WHERE); - setState(175); + setState(178); booleanExpression(0); } } @@ -907,7 +920,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(206); + setState(209); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -916,9 +929,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(178); + setState(181); match(NOT); - setState(179); + setState(182); booleanExpression(8); } break; @@ -927,7 +940,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(180); + setState(183); valueExpression(); } break; @@ -936,7 +949,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(181); + setState(184); regexBooleanExpression(); } break; @@ -945,41 +958,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(182); + setState(185); valueExpression(); - setState(184); + setState(187); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(183); + setState(186); match(NOT); } } - setState(186); + setState(189); match(IN); - setState(187); + setState(190); match(LP); - setState(188); + setState(191); valueExpression(); - setState(193); + setState(196); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(189); + setState(192); match(COMMA); - setState(190); + setState(193); valueExpression(); } } - setState(195); + setState(198); _errHandler.sync(this); _la = _input.LA(1); } - setState(196); + setState(199); match(RP); } break; @@ -988,21 +1001,21 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(198); + setState(201); valueExpression(); - setState(199); + setState(202); match(IS); - setState(201); + setState(204); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(200); + setState(203); match(NOT); } } - setState(203); + setState(206); match(NULL); } break; @@ -1011,13 +1024,13 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new MatchExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(205); + setState(208); matchBooleanExpression(); } break; } _ctx.stop = _input.LT(-1); - setState(216); + setState(219); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1025,7 +1038,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(214); + setState(217); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -1033,11 +1046,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(208); + setState(211); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(209); + setState(212); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(210); + setState(213); ((LogicalBinaryContext)_localctx).right = booleanExpression(6); } break; @@ -1046,18 +1059,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(211); + setState(214); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(212); + setState(215); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(213); + setState(216); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; } } } - setState(218); + setState(221); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1112,48 +1125,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(233); + setState(236); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(219); + setState(222); valueExpression(); - setState(221); + setState(224); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(220); + setState(223); match(NOT); } } - setState(223); + setState(226); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(224); + setState(227); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(226); + setState(229); valueExpression(); - setState(228); + setState(231); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(227); + setState(230); match(NOT); } } - setState(230); + setState(233); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(231); + setState(234); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1213,23 +1226,23 @@ public final MatchBooleanExpressionContext matchBooleanExpression() throws Recog try { enterOuterAlt(_localctx, 1); { - setState(235); - ((MatchBooleanExpressionContext)_localctx).fieldExp = qualifiedName(); setState(238); + ((MatchBooleanExpressionContext)_localctx).fieldExp = qualifiedName(); + setState(241); _errHandler.sync(this); _la = _input.LA(1); if (_la==CAST_OP) { { - setState(236); + setState(239); match(CAST_OP); - setState(237); + setState(240); ((MatchBooleanExpressionContext)_localctx).fieldType = dataType(); } } - setState(240); + setState(243); match(COLON); - setState(241); + setState(244); ((MatchBooleanExpressionContext)_localctx).matchQuery = constant(); } } @@ -1313,14 +1326,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 16, RULE_valueExpression); try { - setState(248); + setState(251); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(243); + setState(246); operatorExpression(0); } break; @@ -1328,11 +1341,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(244); + setState(247); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(245); + setState(248); comparisonOperator(); - setState(246); + setState(249); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1457,7 +1470,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(254); + setState(257); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1466,7 +1479,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(251); + setState(254); primaryExpression(0); } break; @@ -1475,7 +1488,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(252); + setState(255); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1486,13 +1499,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(253); + setState(256); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(264); + setState(267); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,16,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1500,7 +1513,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(262); + setState(265); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,15,_ctx) ) { case 1: @@ -1508,9 +1521,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(256); + setState(259); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(257); + setState(260); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & 7L) != 0)) ) { @@ -1521,7 +1534,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(258); + setState(261); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1530,9 +1543,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(259); + setState(262); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(260); + setState(263); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1543,14 +1556,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(261); + setState(264); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(266); + setState(269); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,16,_ctx); } @@ -1708,7 +1721,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(275); + setState(278); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,17,_ctx) ) { case 1: @@ -1717,7 +1730,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(268); + setState(271); constant(); } break; @@ -1726,7 +1739,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(269); + setState(272); qualifiedName(); } break; @@ -1735,7 +1748,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(270); + setState(273); functionExpression(); } break; @@ -1744,17 +1757,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(271); + setState(274); match(LP); - setState(272); + setState(275); booleanExpression(0); - setState(273); + setState(276); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(282); + setState(285); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,18,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1765,16 +1778,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(277); + setState(280); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(278); + setState(281); match(CAST_OP); - setState(279); + setState(282); dataType(); } } } - setState(284); + setState(287); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,18,_ctx); } @@ -1840,50 +1853,64 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx int _alt; enterOuterAlt(_localctx, 1); { - setState(285); + setState(288); functionName(); - setState(286); + setState(289); match(LP); - setState(300); + setState(303); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { - case 1: + switch (_input.LA(1)) { + case ASTERISK: { - setState(287); + setState(290); match(ASTERISK); } break; - case 2: + case QUOTED_STRING: + case INTEGER_LITERAL: + case DECIMAL_LITERAL: + case FALSE: + case LP: + case NOT: + case NULL: + case PARAM: + case TRUE: + case PLUS: + case MINUS: + case NAMED_OR_POSITIONAL_PARAM: + case OPENING_BRACKET: + case UNQUOTED_IDENTIFIER: + case QUOTED_IDENTIFIER: { { - setState(288); + setState(291); booleanExpression(0); - setState(293); + setState(296); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(289); + setState(292); match(COMMA); - setState(290); + setState(293); booleanExpression(0); } } } - setState(295); + setState(298); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); } - setState(298); + setState(301); _errHandler.sync(this); _la = _input.LA(1); if (_la==COMMA) { { - setState(296); + setState(299); match(COMMA); - setState(297); + setState(300); mapExpression(); } } @@ -1891,8 +1918,12 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx } } break; + case RP: + break; + default: + break; } - setState(302); + setState(305); match(RP); } } @@ -1938,7 +1969,7 @@ public final FunctionNameContext functionName() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(304); + setState(307); identifierOrParameter(); } } @@ -1994,27 +2025,27 @@ public final MapExpressionContext mapExpression() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(306); + setState(309); match(LEFT_BRACES); - setState(307); + setState(310); entryExpression(); - setState(312); + setState(315); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(308); + setState(311); match(COMMA); - setState(309); + setState(312); entryExpression(); } } - setState(314); + setState(317); _errHandler.sync(this); _la = _input.LA(1); } - setState(315); + setState(318); match(RIGHT_BRACES); } } @@ -2066,11 +2097,11 @@ public final EntryExpressionContext entryExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(317); + setState(320); ((EntryExpressionContext)_localctx).key = string(); - setState(318); + setState(321); match(COLON); - setState(319); + setState(322); ((EntryExpressionContext)_localctx).value = constant(); } } @@ -2128,7 +2159,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(321); + setState(324); identifier(); } } @@ -2175,9 +2206,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(323); + setState(326); match(ROW); - setState(324); + setState(327); fields(); } } @@ -2231,23 +2262,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(326); + setState(329); field(); - setState(331); + setState(334); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,23,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(327); + setState(330); match(COMMA); - setState(328); + setState(331); field(); } } } - setState(333); + setState(336); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,23,_ctx); } @@ -2299,19 +2330,19 @@ public final FieldContext field() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(337); + setState(340); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(334); + setState(337); qualifiedName(); - setState(335); + setState(338); match(ASSIGN); } break; } - setState(339); + setState(342); booleanExpression(0); } } @@ -2369,34 +2400,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(341); + setState(344); match(FROM); - setState(342); + setState(345); indexPattern(); - setState(347); + setState(350); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,25,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(343); + setState(346); match(COMMA); - setState(344); + setState(347); indexPattern(); } } } - setState(349); + setState(352); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,25,_ctx); } - setState(351); + setState(354); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { case 1: { - setState(350); + setState(353); metadata(); } break; @@ -2449,19 +2480,19 @@ public final IndexPatternContext indexPattern() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(356); + setState(359); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { case 1: { - setState(353); + setState(356); clusterString(); - setState(354); + setState(357); match(COLON); } break; } - setState(358); + setState(361); indexString(); } } @@ -2479,6 +2510,7 @@ public final IndexPatternContext indexPattern() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class ClusterStringContext extends ParserRuleContext { public TerminalNode UNQUOTED_SOURCE() { return getToken(EsqlBaseParser.UNQUOTED_SOURCE, 0); } + public TerminalNode QUOTED_STRING() { return getToken(EsqlBaseParser.QUOTED_STRING, 0); } @SuppressWarnings("this-escape") public ClusterStringContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -2502,11 +2534,20 @@ public T accept(ParseTreeVisitor visitor) { public final ClusterStringContext clusterString() throws RecognitionException { ClusterStringContext _localctx = new ClusterStringContext(_ctx, getState()); enterRule(_localctx, 42, RULE_clusterString); + int _la; try { enterOuterAlt(_localctx, 1); { - setState(360); - match(UNQUOTED_SOURCE); + setState(363); + _la = _input.LA(1); + if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { + _errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } } } catch (RecognitionException re) { @@ -2551,7 +2592,7 @@ public final IndexStringContext indexString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(362); + setState(365); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -2612,25 +2653,25 @@ public final MetadataContext metadata() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(364); + setState(367); match(METADATA); - setState(365); + setState(368); match(UNQUOTED_SOURCE); - setState(370); + setState(373); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(366); + setState(369); match(COMMA); - setState(367); + setState(370); match(UNQUOTED_SOURCE); } } } - setState(372); + setState(375); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); } @@ -2696,46 +2737,46 @@ public final MetricsCommandContext metricsCommand() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(373); + setState(376); match(DEV_METRICS); - setState(374); + setState(377); indexPattern(); - setState(379); + setState(382); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,29,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(375); + setState(378); match(COMMA); - setState(376); + setState(379); indexPattern(); } } } - setState(381); + setState(384); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,29,_ctx); } - setState(383); + setState(386); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(382); + setState(385); ((MetricsCommandContext)_localctx).aggregates = aggFields(); } break; } - setState(387); + setState(390); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(385); + setState(388); match(BY); - setState(386); + setState(389); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2785,9 +2826,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(389); + setState(392); match(EVAL); - setState(390); + setState(393); fields(); } } @@ -2840,26 +2881,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(392); + setState(395); match(STATS); - setState(394); + setState(397); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: { - setState(393); + setState(396); ((StatsCommandContext)_localctx).stats = aggFields(); } break; } - setState(398); + setState(401); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(396); + setState(399); match(BY); - setState(397); + setState(400); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2916,23 +2957,23 @@ public final AggFieldsContext aggFields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(400); + setState(403); aggField(); - setState(405); + setState(408); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(401); + setState(404); match(COMMA); - setState(402); + setState(405); aggField(); } } } - setState(407); + setState(410); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } @@ -2984,16 +3025,16 @@ public final AggFieldContext aggField() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(408); - field(); setState(411); + field(); + setState(414); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { case 1: { - setState(409); + setState(412); match(WHERE); - setState(410); + setState(413); booleanExpression(0); } break; @@ -3050,23 +3091,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(413); + setState(416); identifierOrParameter(); - setState(418); + setState(421); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(414); + setState(417); match(DOT); - setState(415); + setState(418); identifierOrParameter(); } } } - setState(420); + setState(423); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } @@ -3122,23 +3163,23 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti int _alt; enterOuterAlt(_localctx, 1); { - setState(421); + setState(424); identifierPattern(); - setState(426); + setState(429); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(422); + setState(425); match(DOT); - setState(423); + setState(426); identifierPattern(); } } } - setState(428); + setState(431); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } @@ -3194,23 +3235,23 @@ public final QualifiedNamePatternsContext qualifiedNamePatterns() throws Recogni int _alt; enterOuterAlt(_localctx, 1); { - setState(429); + setState(432); qualifiedNamePattern(); - setState(434); + setState(437); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(430); + setState(433); match(COMMA); - setState(431); + setState(434); qualifiedNamePattern(); } } } - setState(436); + setState(439); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); } @@ -3258,7 +3299,7 @@ public final IdentifierContext identifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(437); + setState(440); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3311,25 +3352,26 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); enterRule(_localctx, 66, RULE_identifierPattern); try { - setState(442); + setState(444); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { - case 1: + switch (_input.LA(1)) { + case ID_PATTERN: enterOuterAlt(_localctx, 1); { - setState(439); + setState(442); match(ID_PATTERN); } break; - case 2: + case PARAM: + case NAMED_OR_POSITIONAL_PARAM: enterOuterAlt(_localctx, 2); { - setState(440); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(441); + setState(443); parameter(); } break; + default: + throw new NoViableAltException(this); } } catch (RecognitionException re) { @@ -3599,14 +3641,14 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 68, RULE_constant); int _la; try { - setState(486); + setState(488); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(444); + setState(446); match(NULL); } break; @@ -3614,9 +3656,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(445); + setState(447); integerValue(); - setState(446); + setState(448); match(UNQUOTED_IDENTIFIER); } break; @@ -3624,7 +3666,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(448); + setState(450); decimalValue(); } break; @@ -3632,7 +3674,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(449); + setState(451); integerValue(); } break; @@ -3640,7 +3682,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(450); + setState(452); booleanValue(); } break; @@ -3648,7 +3690,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(451); + setState(453); parameter(); } break; @@ -3656,7 +3698,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(452); + setState(454); string(); } break; @@ -3664,27 +3706,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(453); + setState(455); match(OPENING_BRACKET); - setState(454); + setState(456); numericValue(); - setState(459); + setState(461); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(455); + setState(457); match(COMMA); - setState(456); + setState(458); numericValue(); } } - setState(461); + setState(463); _errHandler.sync(this); _la = _input.LA(1); } - setState(462); + setState(464); match(CLOSING_BRACKET); } break; @@ -3692,27 +3734,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(464); + setState(466); match(OPENING_BRACKET); - setState(465); + setState(467); booleanValue(); - setState(470); + setState(472); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(466); + setState(468); match(COMMA); - setState(467); + setState(469); booleanValue(); } } - setState(472); + setState(474); _errHandler.sync(this); _la = _input.LA(1); } - setState(473); + setState(475); match(CLOSING_BRACKET); } break; @@ -3720,27 +3762,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(475); + setState(477); match(OPENING_BRACKET); - setState(476); + setState(478); string(); - setState(481); + setState(483); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(477); + setState(479); match(COMMA); - setState(478); + setState(480); string(); } } - setState(483); + setState(485); _errHandler.sync(this); _la = _input.LA(1); } - setState(484); + setState(486); match(CLOSING_BRACKET); } break; @@ -3814,14 +3856,14 @@ public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); enterRule(_localctx, 70, RULE_parameter); try { - setState(490); + setState(492); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(488); + setState(490); match(PARAM); } break; @@ -3829,7 +3871,7 @@ public final ParameterContext parameter() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(489); + setState(491); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3880,25 +3922,27 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); enterRule(_localctx, 72, RULE_identifierOrParameter); try { - setState(495); + setState(496); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { - case 1: + switch (_input.LA(1)) { + case UNQUOTED_IDENTIFIER: + case QUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(492); + setState(494); identifier(); } break; - case 2: + case PARAM: + case NAMED_OR_POSITIONAL_PARAM: enterOuterAlt(_localctx, 2); { - setState(493); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(494); + setState(495); parameter(); } break; + default: + throw new NoViableAltException(this); } } catch (RecognitionException re) { @@ -3942,9 +3986,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(497); - match(LIMIT); setState(498); + match(LIMIT); + setState(499); match(INTEGER_LITERAL); } } @@ -3999,25 +4043,25 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(500); - match(SORT); setState(501); + match(SORT); + setState(502); orderExpression(); - setState(506); + setState(507); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,46,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(502); - match(COMMA); setState(503); + match(COMMA); + setState(504); orderExpression(); } } } - setState(508); + setState(509); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,46,_ctx); } @@ -4073,14 +4117,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(509); + setState(510); booleanExpression(0); - setState(511); + setState(512); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: { - setState(510); + setState(511); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -4094,14 +4138,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(515); + setState(516); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(513); - match(NULLS); setState(514); + match(NULLS); + setState(515); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -4160,9 +4204,9 @@ public final KeepCommandContext keepCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(517); - match(KEEP); setState(518); + match(KEEP); + setState(519); qualifiedNamePatterns(); } } @@ -4209,9 +4253,9 @@ public final DropCommandContext dropCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(520); - match(DROP); setState(521); + match(DROP); + setState(522); qualifiedNamePatterns(); } } @@ -4266,25 +4310,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(523); - match(RENAME); setState(524); + match(RENAME); + setState(525); renameClause(); - setState(529); + setState(530); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(525); - match(COMMA); setState(526); + match(COMMA); + setState(527); renameClause(); } } } - setState(531); + setState(532); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); } @@ -4338,11 +4382,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(532); - ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); setState(533); - match(AS); + ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); setState(534); + match(AS); + setState(535); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4395,18 +4439,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(536); - match(DISSECT); setState(537); - primaryExpression(0); + match(DISSECT); setState(538); + primaryExpression(0); + setState(539); string(); - setState(540); + setState(541); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: { - setState(539); + setState(540); commandOptions(); } break; @@ -4459,11 +4503,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(542); - match(GROK); setState(543); - primaryExpression(0); + match(GROK); setState(544); + primaryExpression(0); + setState(545); string(); } } @@ -4510,9 +4554,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(546); - match(MV_EXPAND); setState(547); + match(MV_EXPAND); + setState(548); qualifiedName(); } } @@ -4566,23 +4610,23 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(549); + setState(550); commandOption(); - setState(554); + setState(555); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,51,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(550); - match(COMMA); setState(551); + match(COMMA); + setState(552); commandOption(); } } } - setState(556); + setState(557); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,51,_ctx); } @@ -4634,11 +4678,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(557); - identifier(); setState(558); - match(ASSIGN); + identifier(); setState(559); + match(ASSIGN); + setState(560); constant(); } } @@ -4684,7 +4728,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(561); + setState(562); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4739,20 +4783,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 100, RULE_numericValue); try { - setState(565); + setState(566); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,52,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(563); + setState(564); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(564); + setState(565); integerValue(); } break; @@ -4801,12 +4845,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(568); + setState(569); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(567); + setState(568); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4819,7 +4863,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(570); + setState(571); match(DECIMAL_LITERAL); } } @@ -4866,12 +4910,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(573); + setState(574); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(572); + setState(573); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4884,7 +4928,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(575); + setState(576); match(INTEGER_LITERAL); } } @@ -4928,7 +4972,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(577); + setState(578); match(QUOTED_STRING); } } @@ -4978,7 +5022,7 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(579); + setState(580); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -432345564227567616L) != 0)) ) { _errHandler.recoverInline(this); @@ -5033,9 +5077,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(581); - match(EXPLAIN); setState(582); + match(EXPLAIN); + setState(583); subqueryExpression(); } } @@ -5083,11 +5127,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(584); - match(OPENING_BRACKET); setState(585); - query(0); + match(OPENING_BRACKET); setState(586); + query(0); + setState(587); match(CLOSING_BRACKET); } } @@ -5144,9 +5188,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(588); - match(SHOW); setState(589); + match(SHOW); + setState(590); match(INFO); } } @@ -5209,46 +5253,46 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(591); - match(ENRICH); setState(592); + match(ENRICH); + setState(593); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(595); + setState(596); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { case 1: { - setState(593); - match(ON); setState(594); + match(ON); + setState(595); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(606); + setState(607); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { case 1: { - setState(597); - match(WITH); setState(598); + match(WITH); + setState(599); enrichWithClause(); - setState(603); + setState(604); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,56,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(599); - match(COMMA); setState(600); + match(COMMA); + setState(601); enrichWithClause(); } } } - setState(605); + setState(606); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,56,_ctx); } @@ -5305,19 +5349,19 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(611); + setState(612); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { case 1: { - setState(608); - ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); setState(609); + ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); + setState(610); match(ASSIGN); } break; } - setState(613); + setState(614); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5370,13 +5414,13 @@ public final LookupCommandContext lookupCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(615); - match(DEV_LOOKUP); setState(616); - ((LookupCommandContext)_localctx).tableName = indexPattern(); + match(DEV_LOOKUP); setState(617); - match(ON); + ((LookupCommandContext)_localctx).tableName = indexPattern(); setState(618); + match(ON); + setState(619); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5429,18 +5473,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(620); - match(DEV_INLINESTATS); setState(621); + match(DEV_INLINESTATS); + setState(622); ((InlinestatsCommandContext)_localctx).stats = aggFields(); - setState(624); + setState(625); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,59,_ctx) ) { case 1: { - setState(622); - match(BY); setState(623); + match(BY); + setState(624); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5461,14 +5505,14 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx @SuppressWarnings("CheckReturnValue") public static class JoinCommandContext extends ParserRuleContext { public Token type; - public TerminalNode DEV_JOIN() { return getToken(EsqlBaseParser.DEV_JOIN, 0); } + public TerminalNode JOIN() { return getToken(EsqlBaseParser.JOIN, 0); } public JoinTargetContext joinTarget() { return getRuleContext(JoinTargetContext.class,0); } public JoinConditionContext joinCondition() { return getRuleContext(JoinConditionContext.class,0); } - public TerminalNode DEV_JOIN_LOOKUP() { return getToken(EsqlBaseParser.DEV_JOIN_LOOKUP, 0); } + public TerminalNode JOIN_LOOKUP() { return getToken(EsqlBaseParser.JOIN_LOOKUP, 0); } public TerminalNode DEV_JOIN_LEFT() { return getToken(EsqlBaseParser.DEV_JOIN_LEFT, 0); } public TerminalNode DEV_JOIN_RIGHT() { return getToken(EsqlBaseParser.DEV_JOIN_RIGHT, 0); } @SuppressWarnings("this-escape") @@ -5499,29 +5543,21 @@ public final JoinCommandContext joinCommand() throws RecognitionException { enterOuterAlt(_localctx, 1); { setState(627); - _errHandler.sync(this); + ((JoinCommandContext)_localctx).type = _input.LT(1); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) { - { - setState(626); - ((JoinCommandContext)_localctx).type = _input.LT(1); - _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) ) { - ((JoinCommandContext)_localctx).type = (Token)_errHandler.recoverInline(this); - } - else { - if ( _input.LA(1)==Token.EOF ) matchedEOF = true; - _errHandler.reportMatch(this); - consume(); - } - } + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 25296896L) != 0)) ) { + ((JoinCommandContext)_localctx).type = (Token)_errHandler.recoverInline(this); } - + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + setState(628); + match(JOIN); setState(629); - match(DEV_JOIN); - setState(630); joinTarget(); - setState(631); + setState(630); joinCondition(); } } @@ -5539,14 +5575,9 @@ public final JoinCommandContext joinCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class JoinTargetContext extends ParserRuleContext { public IndexPatternContext index; - public IdentifierContext alias; public IndexPatternContext indexPattern() { return getRuleContext(IndexPatternContext.class,0); } - public TerminalNode AS() { return getToken(EsqlBaseParser.AS, 0); } - public IdentifierContext identifier() { - return getRuleContext(IdentifierContext.class,0); - } @SuppressWarnings("this-escape") public JoinTargetContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -5570,24 +5601,11 @@ public T accept(ParseTreeVisitor visitor) { public final JoinTargetContext joinTarget() throws RecognitionException { JoinTargetContext _localctx = new JoinTargetContext(_ctx, getState()); enterRule(_localctx, 126, RULE_joinTarget); - int _la; try { enterOuterAlt(_localctx, 1); { - setState(633); + setState(632); ((JoinTargetContext)_localctx).index = indexPattern(); - setState(636); - _errHandler.sync(this); - _la = _input.LA(1); - if (_la==AS) { - { - setState(634); - match(AS); - setState(635); - ((JoinTargetContext)_localctx).alias = identifier(); - } - } - } } catch (RecognitionException re) { @@ -5641,27 +5659,27 @@ public final JoinConditionContext joinCondition() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(638); + setState(634); match(ON); - setState(639); + setState(635); joinPredicate(); - setState(644); + setState(640); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,62,_ctx); + _alt = getInterpreter().adaptivePredict(_input,60,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(640); + setState(636); match(COMMA); - setState(641); + setState(637); joinPredicate(); } } } - setState(646); + setState(642); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,62,_ctx); + _alt = getInterpreter().adaptivePredict(_input,60,_ctx); } } } @@ -5707,7 +5725,7 @@ public final JoinPredicateContext joinPredicate() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(647); + setState(643); valueExpression(); } } @@ -5722,6 +5740,93 @@ public final JoinPredicateContext joinPredicate() throws RecognitionException { return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class ChangePointCommandContext extends ParserRuleContext { + public QualifiedNameContext value; + public QualifiedNameContext key; + public QualifiedNameContext targetType; + public QualifiedNameContext targetPvalue; + public TerminalNode DEV_CHANGE_POINT() { return getToken(EsqlBaseParser.DEV_CHANGE_POINT, 0); } + public List qualifiedName() { + return getRuleContexts(QualifiedNameContext.class); + } + public QualifiedNameContext qualifiedName(int i) { + return getRuleContext(QualifiedNameContext.class,i); + } + public TerminalNode ON() { return getToken(EsqlBaseParser.ON, 0); } + public TerminalNode AS() { return getToken(EsqlBaseParser.AS, 0); } + public TerminalNode COMMA() { return getToken(EsqlBaseParser.COMMA, 0); } + @SuppressWarnings("this-escape") + public ChangePointCommandContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_changePointCommand; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterChangePointCommand(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitChangePointCommand(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitChangePointCommand(this); + else return visitor.visitChildren(this); + } + } + + public final ChangePointCommandContext changePointCommand() throws RecognitionException { + ChangePointCommandContext _localctx = new ChangePointCommandContext(_ctx, getState()); + enterRule(_localctx, 132, RULE_changePointCommand); + try { + enterOuterAlt(_localctx, 1); + { + setState(645); + match(DEV_CHANGE_POINT); + setState(646); + ((ChangePointCommandContext)_localctx).value = qualifiedName(); + setState(649); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,61,_ctx) ) { + case 1: + { + setState(647); + match(ON); + setState(648); + ((ChangePointCommandContext)_localctx).key = qualifiedName(); + } + break; + } + setState(656); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,62,_ctx) ) { + case 1: + { + setState(651); + match(AS); + setState(652); + ((ChangePointCommandContext)_localctx).targetType = qualifiedName(); + setState(653); + match(COMMA); + setState(654); + ((ChangePointCommandContext)_localctx).targetPvalue = qualifiedName(); + } + break; + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { case 1: @@ -5736,10 +5841,6 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return operatorExpression_sempred((OperatorExpressionContext)_localctx, predIndex); case 10: return primaryExpression_sempred((PrimaryExpressionContext)_localctx, predIndex); - case 33: - return identifierPattern_sempred((IdentifierPatternContext)_localctx, predIndex); - case 36: - return identifierOrParameter_sempred((IdentifierOrParameterContext)_localctx, predIndex); } return true; } @@ -5793,23 +5894,9 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in } return true; } - private boolean identifierPattern_sempred(IdentifierPatternContext _localctx, int predIndex) { - switch (predIndex) { - case 10: - return this.isDevVersion(); - } - return true; - } - private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _localctx, int predIndex) { - switch (predIndex) { - case 11: - return this.isDevVersion(); - } - return true; - } public static final String _serializedATN = - "\u0004\u0001\u0082\u028a\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0004\u0001\u0086\u0293\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ @@ -5826,399 +5913,404 @@ private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _loca "1\u00022\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u0007"+ "6\u00027\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007"+ ";\u0002<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007"+ - "@\u0002A\u0007A\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001\u008e"+ - "\b\u0001\n\u0001\f\u0001\u0091\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0099\b\u0002\u0001\u0003"+ + "@\u0002A\u0007A\u0002B\u0007B\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005"+ + "\u0001\u0090\b\u0001\n\u0001\f\u0001\u0093\t\u0001\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u009b\b\u0002"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003"+ - "\u00ad\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005"+ - "\u00b9\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0005\u0005\u00c0\b\u0005\n\u0005\f\u0005\u00c3\t\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00ca\b\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00cf\b\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00d7"+ - "\b\u0005\n\u0005\f\u0005\u00da\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006"+ - "\u00de\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ - "\u0003\u0006\u00e5\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006"+ - "\u00ea\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00ef\b"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001"+ - "\b\u0001\b\u0003\b\u00f9\b\b\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00ff"+ - "\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0005\t\u0107\b\t"+ - "\n\t\f\t\u010a\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0003\n\u0114\b\n\u0001\n\u0001\n\u0001\n\u0005\n\u0119\b\n"+ - "\n\n\f\n\u011c\t\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\u000b\u0001\u000b\u0005\u000b\u0124\b\u000b\n\u000b\f\u000b\u0127\t\u000b"+ - "\u0001\u000b\u0001\u000b\u0003\u000b\u012b\b\u000b\u0003\u000b\u012d\b"+ - "\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r"+ - "\u0001\r\u0005\r\u0137\b\r\n\r\f\r\u013a\t\r\u0001\r\u0001\r\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0005\u0011"+ - "\u014a\b\u0011\n\u0011\f\u0011\u014d\t\u0011\u0001\u0012\u0001\u0012\u0001"+ - "\u0012\u0003\u0012\u0152\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001"+ - "\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u015a\b\u0013\n\u0013\f\u0013"+ - "\u015d\t\u0013\u0001\u0013\u0003\u0013\u0160\b\u0013\u0001\u0014\u0001"+ - "\u0014\u0001\u0014\u0003\u0014\u0165\b\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0005\u0017\u0171\b\u0017\n\u0017\f\u0017\u0174\t\u0017"+ - "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u017a\b\u0018"+ - "\n\u0018\f\u0018\u017d\t\u0018\u0001\u0018\u0003\u0018\u0180\b\u0018\u0001"+ - "\u0018\u0001\u0018\u0003\u0018\u0184\b\u0018\u0001\u0019\u0001\u0019\u0001"+ - "\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u018b\b\u001a\u0001\u001a\u0001"+ - "\u001a\u0003\u001a\u018f\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005"+ - "\u001b\u0194\b\u001b\n\u001b\f\u001b\u0197\t\u001b\u0001\u001c\u0001\u001c"+ - "\u0001\u001c\u0003\u001c\u019c\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d"+ - "\u0005\u001d\u01a1\b\u001d\n\u001d\f\u001d\u01a4\t\u001d\u0001\u001e\u0001"+ - "\u001e\u0001\u001e\u0005\u001e\u01a9\b\u001e\n\u001e\f\u001e\u01ac\t\u001e"+ - "\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01b1\b\u001f\n\u001f"+ - "\f\u001f\u01b4\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001!\u0003!\u01bb"+ - "\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01ca\b\"\n\"\f\"\u01cd\t\""+ - "\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01d5\b\"\n\""+ - "\f\"\u01d8\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\""+ - "\u01e0\b\"\n\"\f\"\u01e3\t\"\u0001\"\u0001\"\u0003\"\u01e7\b\"\u0001#"+ - "\u0001#\u0003#\u01eb\b#\u0001$\u0001$\u0001$\u0003$\u01f0\b$\u0001%\u0001"+ - "%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01f9\b&\n&\f&\u01fc\t&\u0001"+ - "\'\u0001\'\u0003\'\u0200\b\'\u0001\'\u0001\'\u0003\'\u0204\b\'\u0001("+ - "\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0005"+ - "*\u0210\b*\n*\f*\u0213\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ - ",\u0001,\u0003,\u021d\b,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001"+ - ".\u0001/\u0001/\u0001/\u0005/\u0229\b/\n/\f/\u022c\t/\u00010\u00010\u0001"+ - "0\u00010\u00011\u00011\u00012\u00012\u00032\u0236\b2\u00013\u00033\u0239"+ - "\b3\u00013\u00013\u00014\u00034\u023e\b4\u00014\u00014\u00015\u00015\u0001"+ - "6\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u00019\u0001"+ - "9\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0254\b:\u0001:\u0001:\u0001"+ - ":\u0001:\u0005:\u025a\b:\n:\f:\u025d\t:\u0003:\u025f\b:\u0001;\u0001;"+ - "\u0001;\u0003;\u0264\b;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001"+ - "<\u0001=\u0001=\u0001=\u0001=\u0003=\u0271\b=\u0001>\u0003>\u0274\b>\u0001"+ - ">\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0003?\u027d\b?\u0001@\u0001"+ - "@\u0001@\u0001@\u0005@\u0283\b@\n@\f@\u0286\t@\u0001A\u0001A\u0001A\u0000"+ - "\u0004\u0002\n\u0012\u0014B\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ - "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ - "TVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0000\t\u0001\u0000@A\u0001\u0000"+ + "\u0001\u0003\u0003\u0003\u00b0\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0003\u0005\u00bc\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0005\u0005\u00c3\b\u0005\n\u0005\f\u0005\u00c6"+ + "\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003"+ + "\u0005\u00cd\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00d2"+ + "\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0005\u0005\u00da\b\u0005\n\u0005\f\u0005\u00dd\t\u0005\u0001\u0006"+ + "\u0001\u0006\u0003\u0006\u00e1\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0003\u0006\u00e8\b\u0006\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0003\u0006\u00ed\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007"+ + "\u0003\u0007\u00f2\b\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00fc\b\b\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0003\t\u0102\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0005\t\u010a\b\t\n\t\f\t\u010d\t\t\u0001\n\u0001\n\u0001\n\u0001\n"+ + "\u0001\n\u0001\n\u0001\n\u0001\n\u0003\n\u0117\b\n\u0001\n\u0001\n\u0001"+ + "\n\u0005\n\u011c\b\n\n\n\f\n\u011f\t\n\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u0127\b\u000b\n\u000b"+ + "\f\u000b\u012a\t\u000b\u0001\u000b\u0001\u000b\u0003\u000b\u012e\b\u000b"+ + "\u0003\u000b\u0130\b\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001"+ + "\r\u0001\r\u0001\r\u0001\r\u0005\r\u013a\b\r\n\r\f\r\u013d\t\r\u0001\r"+ + "\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001"+ + "\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0005\u0011\u014d\b\u0011\n\u0011\f\u0011\u0150\t\u0011\u0001\u0012"+ + "\u0001\u0012\u0001\u0012\u0003\u0012\u0155\b\u0012\u0001\u0012\u0001\u0012"+ + "\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u015d\b\u0013"+ + "\n\u0013\f\u0013\u0160\t\u0013\u0001\u0013\u0003\u0013\u0163\b\u0013\u0001"+ + "\u0014\u0001\u0014\u0001\u0014\u0003\u0014\u0168\b\u0014\u0001\u0014\u0001"+ + "\u0014\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0017\u0005\u0017\u0174\b\u0017\n\u0017\f\u0017"+ + "\u0177\t\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018"+ + "\u017d\b\u0018\n\u0018\f\u0018\u0180\t\u0018\u0001\u0018\u0003\u0018\u0183"+ + "\b\u0018\u0001\u0018\u0001\u0018\u0003\u0018\u0187\b\u0018\u0001\u0019"+ + "\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u018e\b\u001a"+ + "\u0001\u001a\u0001\u001a\u0003\u001a\u0192\b\u001a\u0001\u001b\u0001\u001b"+ + "\u0001\u001b\u0005\u001b\u0197\b\u001b\n\u001b\f\u001b\u019a\t\u001b\u0001"+ + "\u001c\u0001\u001c\u0001\u001c\u0003\u001c\u019f\b\u001c\u0001\u001d\u0001"+ + "\u001d\u0001\u001d\u0005\u001d\u01a4\b\u001d\n\u001d\f\u001d\u01a7\t\u001d"+ + "\u0001\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u01ac\b\u001e\n\u001e"+ + "\f\u001e\u01af\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f"+ + "\u01b4\b\u001f\n\u001f\f\u001f\u01b7\t\u001f\u0001 \u0001 \u0001!\u0001"+ + "!\u0003!\u01bd\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01cc\b\"\n"+ + "\"\f\"\u01cf\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005"+ + "\"\u01d7\b\"\n\"\f\"\u01da\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\""+ + "\u0001\"\u0005\"\u01e2\b\"\n\"\f\"\u01e5\t\"\u0001\"\u0001\"\u0003\"\u01e9"+ + "\b\"\u0001#\u0001#\u0003#\u01ed\b#\u0001$\u0001$\u0003$\u01f1\b$\u0001"+ + "%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01fa\b&\n&\f&\u01fd"+ + "\t&\u0001\'\u0001\'\u0003\'\u0201\b\'\u0001\'\u0001\'\u0003\'\u0205\b"+ + "\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001"+ + "*\u0005*\u0211\b*\n*\f*\u0214\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001"+ + ",\u0001,\u0001,\u0003,\u021e\b,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001"+ + ".\u0001.\u0001/\u0001/\u0001/\u0005/\u022a\b/\n/\f/\u022d\t/\u00010\u0001"+ + "0\u00010\u00010\u00011\u00011\u00012\u00012\u00032\u0237\b2\u00013\u0003"+ + "3\u023a\b3\u00013\u00013\u00014\u00034\u023f\b4\u00014\u00014\u00015\u0001"+ + "5\u00016\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u0001"+ + "9\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0255\b:\u0001:\u0001"+ + ":\u0001:\u0001:\u0005:\u025b\b:\n:\f:\u025e\t:\u0003:\u0260\b:\u0001;"+ + "\u0001;\u0001;\u0003;\u0265\b;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001"+ + "<\u0001<\u0001=\u0001=\u0001=\u0001=\u0003=\u0272\b=\u0001>\u0001>\u0001"+ + ">\u0001>\u0001>\u0001?\u0001?\u0001@\u0001@\u0001@\u0001@\u0005@\u027f"+ + "\b@\n@\f@\u0282\t@\u0001A\u0001A\u0001B\u0001B\u0001B\u0001B\u0003B\u028a"+ + "\bB\u0001B\u0001B\u0001B\u0001B\u0001B\u0003B\u0291\bB\u0001B\u0000\u0004"+ + "\u0002\n\u0012\u0014C\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012"+ + "\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\"+ + "^`bdfhjlnprtvxz|~\u0080\u0082\u0084\u0000\t\u0001\u0000@A\u0001\u0000"+ "BD\u0002\u0000\u001e\u001eSS\u0001\u0000JK\u0002\u0000##((\u0002\u0000"+ - "++..\u0002\u0000**88\u0002\u000099;?\u0001\u0000\u0016\u0018\u02a5\u0000"+ - "\u0084\u0001\u0000\u0000\u0000\u0002\u0087\u0001\u0000\u0000\u0000\u0004"+ - "\u0098\u0001\u0000\u0000\u0000\u0006\u00ac\u0001\u0000\u0000\u0000\b\u00ae"+ - "\u0001\u0000\u0000\u0000\n\u00ce\u0001\u0000\u0000\u0000\f\u00e9\u0001"+ - "\u0000\u0000\u0000\u000e\u00eb\u0001\u0000\u0000\u0000\u0010\u00f8\u0001"+ - "\u0000\u0000\u0000\u0012\u00fe\u0001\u0000\u0000\u0000\u0014\u0113\u0001"+ - "\u0000\u0000\u0000\u0016\u011d\u0001\u0000\u0000\u0000\u0018\u0130\u0001"+ - "\u0000\u0000\u0000\u001a\u0132\u0001\u0000\u0000\u0000\u001c\u013d\u0001"+ - "\u0000\u0000\u0000\u001e\u0141\u0001\u0000\u0000\u0000 \u0143\u0001\u0000"+ - "\u0000\u0000\"\u0146\u0001\u0000\u0000\u0000$\u0151\u0001\u0000\u0000"+ - "\u0000&\u0155\u0001\u0000\u0000\u0000(\u0164\u0001\u0000\u0000\u0000*"+ - "\u0168\u0001\u0000\u0000\u0000,\u016a\u0001\u0000\u0000\u0000.\u016c\u0001"+ - "\u0000\u0000\u00000\u0175\u0001\u0000\u0000\u00002\u0185\u0001\u0000\u0000"+ - "\u00004\u0188\u0001\u0000\u0000\u00006\u0190\u0001\u0000\u0000\u00008"+ - "\u0198\u0001\u0000\u0000\u0000:\u019d\u0001\u0000\u0000\u0000<\u01a5\u0001"+ - "\u0000\u0000\u0000>\u01ad\u0001\u0000\u0000\u0000@\u01b5\u0001\u0000\u0000"+ - "\u0000B\u01ba\u0001\u0000\u0000\u0000D\u01e6\u0001\u0000\u0000\u0000F"+ - "\u01ea\u0001\u0000\u0000\u0000H\u01ef\u0001\u0000\u0000\u0000J\u01f1\u0001"+ - "\u0000\u0000\u0000L\u01f4\u0001\u0000\u0000\u0000N\u01fd\u0001\u0000\u0000"+ - "\u0000P\u0205\u0001\u0000\u0000\u0000R\u0208\u0001\u0000\u0000\u0000T"+ - "\u020b\u0001\u0000\u0000\u0000V\u0214\u0001\u0000\u0000\u0000X\u0218\u0001"+ - "\u0000\u0000\u0000Z\u021e\u0001\u0000\u0000\u0000\\\u0222\u0001\u0000"+ - "\u0000\u0000^\u0225\u0001\u0000\u0000\u0000`\u022d\u0001\u0000\u0000\u0000"+ - "b\u0231\u0001\u0000\u0000\u0000d\u0235\u0001\u0000\u0000\u0000f\u0238"+ - "\u0001\u0000\u0000\u0000h\u023d\u0001\u0000\u0000\u0000j\u0241\u0001\u0000"+ - "\u0000\u0000l\u0243\u0001\u0000\u0000\u0000n\u0245\u0001\u0000\u0000\u0000"+ - "p\u0248\u0001\u0000\u0000\u0000r\u024c\u0001\u0000\u0000\u0000t\u024f"+ - "\u0001\u0000\u0000\u0000v\u0263\u0001\u0000\u0000\u0000x\u0267\u0001\u0000"+ - "\u0000\u0000z\u026c\u0001\u0000\u0000\u0000|\u0273\u0001\u0000\u0000\u0000"+ - "~\u0279\u0001\u0000\u0000\u0000\u0080\u027e\u0001\u0000\u0000\u0000\u0082"+ - "\u0287\u0001\u0000\u0000\u0000\u0084\u0085\u0003\u0002\u0001\u0000\u0085"+ - "\u0086\u0005\u0000\u0000\u0001\u0086\u0001\u0001\u0000\u0000\u0000\u0087"+ - "\u0088\u0006\u0001\uffff\uffff\u0000\u0088\u0089\u0003\u0004\u0002\u0000"+ - "\u0089\u008f\u0001\u0000\u0000\u0000\u008a\u008b\n\u0001\u0000\u0000\u008b"+ - "\u008c\u0005\u001d\u0000\u0000\u008c\u008e\u0003\u0006\u0003\u0000\u008d"+ - "\u008a\u0001\u0000\u0000\u0000\u008e\u0091\u0001\u0000\u0000\u0000\u008f"+ - "\u008d\u0001\u0000\u0000\u0000\u008f\u0090\u0001\u0000\u0000\u0000\u0090"+ - "\u0003\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000\u0000\u0000\u0092"+ - "\u0099\u0003n7\u0000\u0093\u0099\u0003&\u0013\u0000\u0094\u0099\u0003"+ - " \u0010\u0000\u0095\u0099\u0003r9\u0000\u0096\u0097\u0004\u0002\u0001"+ - "\u0000\u0097\u0099\u00030\u0018\u0000\u0098\u0092\u0001\u0000\u0000\u0000"+ - "\u0098\u0093\u0001\u0000\u0000\u0000\u0098\u0094\u0001\u0000\u0000\u0000"+ - "\u0098\u0095\u0001\u0000\u0000\u0000\u0098\u0096\u0001\u0000\u0000\u0000"+ - "\u0099\u0005\u0001\u0000\u0000\u0000\u009a\u00ad\u00032\u0019\u0000\u009b"+ - "\u00ad\u0003\b\u0004\u0000\u009c\u00ad\u0003P(\u0000\u009d\u00ad\u0003"+ - "J%\u0000\u009e\u00ad\u00034\u001a\u0000\u009f\u00ad\u0003L&\u0000\u00a0"+ - "\u00ad\u0003R)\u0000\u00a1\u00ad\u0003T*\u0000\u00a2\u00ad\u0003X,\u0000"+ - "\u00a3\u00ad\u0003Z-\u0000\u00a4\u00ad\u0003t:\u0000\u00a5\u00ad\u0003"+ - "\\.\u0000\u00a6\u00a7\u0004\u0003\u0002\u0000\u00a7\u00ad\u0003z=\u0000"+ - "\u00a8\u00a9\u0004\u0003\u0003\u0000\u00a9\u00ad\u0003x<\u0000\u00aa\u00ab"+ - "\u0004\u0003\u0004\u0000\u00ab\u00ad\u0003|>\u0000\u00ac\u009a\u0001\u0000"+ - "\u0000\u0000\u00ac\u009b\u0001\u0000\u0000\u0000\u00ac\u009c\u0001\u0000"+ - "\u0000\u0000\u00ac\u009d\u0001\u0000\u0000\u0000\u00ac\u009e\u0001\u0000"+ - "\u0000\u0000\u00ac\u009f\u0001\u0000\u0000\u0000\u00ac\u00a0\u0001\u0000"+ - "\u0000\u0000\u00ac\u00a1\u0001\u0000\u0000\u0000\u00ac\u00a2\u0001\u0000"+ - "\u0000\u0000\u00ac\u00a3\u0001\u0000\u0000\u0000\u00ac\u00a4\u0001\u0000"+ - "\u0000\u0000\u00ac\u00a5\u0001\u0000\u0000\u0000\u00ac\u00a6\u0001\u0000"+ - "\u0000\u0000\u00ac\u00a8\u0001\u0000\u0000\u0000\u00ac\u00aa\u0001\u0000"+ - "\u0000\u0000\u00ad\u0007\u0001\u0000\u0000\u0000\u00ae\u00af\u0005\u0010"+ - "\u0000\u0000\u00af\u00b0\u0003\n\u0005\u0000\u00b0\t\u0001\u0000\u0000"+ - "\u0000\u00b1\u00b2\u0006\u0005\uffff\uffff\u0000\u00b2\u00b3\u00051\u0000"+ - "\u0000\u00b3\u00cf\u0003\n\u0005\b\u00b4\u00cf\u0003\u0010\b\u0000\u00b5"+ - "\u00cf\u0003\f\u0006\u0000\u00b6\u00b8\u0003\u0010\b\u0000\u00b7\u00b9"+ - "\u00051\u0000\u0000\u00b8\u00b7\u0001\u0000\u0000\u0000\u00b8\u00b9\u0001"+ - "\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000\u0000\u00ba\u00bb\u0005"+ - ",\u0000\u0000\u00bb\u00bc\u00050\u0000\u0000\u00bc\u00c1\u0003\u0010\b"+ - "\u0000\u00bd\u00be\u0005\'\u0000\u0000\u00be\u00c0\u0003\u0010\b\u0000"+ - "\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u00c3\u0001\u0000\u0000\u0000"+ - "\u00c1\u00bf\u0001\u0000\u0000\u0000\u00c1\u00c2\u0001\u0000\u0000\u0000"+ - "\u00c2\u00c4\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001\u0000\u0000\u0000"+ - "\u00c4\u00c5\u00057\u0000\u0000\u00c5\u00cf\u0001\u0000\u0000\u0000\u00c6"+ - "\u00c7\u0003\u0010\b\u0000\u00c7\u00c9\u0005-\u0000\u0000\u00c8\u00ca"+ - "\u00051\u0000\u0000\u00c9\u00c8\u0001\u0000\u0000\u0000\u00c9\u00ca\u0001"+ - "\u0000\u0000\u0000\u00ca\u00cb\u0001\u0000\u0000\u0000\u00cb\u00cc\u0005"+ - "2\u0000\u0000\u00cc\u00cf\u0001\u0000\u0000\u0000\u00cd\u00cf\u0003\u000e"+ - "\u0007\u0000\u00ce\u00b1\u0001\u0000\u0000\u0000\u00ce\u00b4\u0001\u0000"+ - "\u0000\u0000\u00ce\u00b5\u0001\u0000\u0000\u0000\u00ce\u00b6\u0001\u0000"+ - "\u0000\u0000\u00ce\u00c6\u0001\u0000\u0000\u0000\u00ce\u00cd\u0001\u0000"+ - "\u0000\u0000\u00cf\u00d8\u0001\u0000\u0000\u0000\u00d0\u00d1\n\u0005\u0000"+ - "\u0000\u00d1\u00d2\u0005\"\u0000\u0000\u00d2\u00d7\u0003\n\u0005\u0006"+ - "\u00d3\u00d4\n\u0004\u0000\u0000\u00d4\u00d5\u00054\u0000\u0000\u00d5"+ - "\u00d7\u0003\n\u0005\u0005\u00d6\u00d0\u0001\u0000\u0000\u0000\u00d6\u00d3"+ - "\u0001\u0000\u0000\u0000\u00d7\u00da\u0001\u0000\u0000\u0000\u00d8\u00d6"+ - "\u0001\u0000\u0000\u0000\u00d8\u00d9\u0001\u0000\u0000\u0000\u00d9\u000b"+ - "\u0001\u0000\u0000\u0000\u00da\u00d8\u0001\u0000\u0000\u0000\u00db\u00dd"+ - "\u0003\u0010\b\u0000\u00dc\u00de\u00051\u0000\u0000\u00dd\u00dc\u0001"+ - "\u0000\u0000\u0000\u00dd\u00de\u0001\u0000\u0000\u0000\u00de\u00df\u0001"+ - "\u0000\u0000\u0000\u00df\u00e0\u0005/\u0000\u0000\u00e0\u00e1\u0003j5"+ - "\u0000\u00e1\u00ea\u0001\u0000\u0000\u0000\u00e2\u00e4\u0003\u0010\b\u0000"+ - "\u00e3\u00e5\u00051\u0000\u0000\u00e4\u00e3\u0001\u0000\u0000\u0000\u00e4"+ - "\u00e5\u0001\u0000\u0000\u0000\u00e5\u00e6\u0001\u0000\u0000\u0000\u00e6"+ - "\u00e7\u00056\u0000\u0000\u00e7\u00e8\u0003j5\u0000\u00e8\u00ea\u0001"+ - "\u0000\u0000\u0000\u00e9\u00db\u0001\u0000\u0000\u0000\u00e9\u00e2\u0001"+ - "\u0000\u0000\u0000\u00ea\r\u0001\u0000\u0000\u0000\u00eb\u00ee\u0003:"+ - "\u001d\u0000\u00ec\u00ed\u0005%\u0000\u0000\u00ed\u00ef\u0003\u001e\u000f"+ - "\u0000\u00ee\u00ec\u0001\u0000\u0000\u0000\u00ee\u00ef\u0001\u0000\u0000"+ - "\u0000\u00ef\u00f0\u0001\u0000\u0000\u0000\u00f0\u00f1\u0005&\u0000\u0000"+ - "\u00f1\u00f2\u0003D\"\u0000\u00f2\u000f\u0001\u0000\u0000\u0000\u00f3"+ - "\u00f9\u0003\u0012\t\u0000\u00f4\u00f5\u0003\u0012\t\u0000\u00f5\u00f6"+ - "\u0003l6\u0000\u00f6\u00f7\u0003\u0012\t\u0000\u00f7\u00f9\u0001\u0000"+ - "\u0000\u0000\u00f8\u00f3\u0001\u0000\u0000\u0000\u00f8\u00f4\u0001\u0000"+ - "\u0000\u0000\u00f9\u0011\u0001\u0000\u0000\u0000\u00fa\u00fb\u0006\t\uffff"+ - "\uffff\u0000\u00fb\u00ff\u0003\u0014\n\u0000\u00fc\u00fd\u0007\u0000\u0000"+ - "\u0000\u00fd\u00ff\u0003\u0012\t\u0003\u00fe\u00fa\u0001\u0000\u0000\u0000"+ - "\u00fe\u00fc\u0001\u0000\u0000\u0000\u00ff\u0108\u0001\u0000\u0000\u0000"+ - "\u0100\u0101\n\u0002\u0000\u0000\u0101\u0102\u0007\u0001\u0000\u0000\u0102"+ - "\u0107\u0003\u0012\t\u0003\u0103\u0104\n\u0001\u0000\u0000\u0104\u0105"+ - "\u0007\u0000\u0000\u0000\u0105\u0107\u0003\u0012\t\u0002\u0106\u0100\u0001"+ - "\u0000\u0000\u0000\u0106\u0103\u0001\u0000\u0000\u0000\u0107\u010a\u0001"+ - "\u0000\u0000\u0000\u0108\u0106\u0001\u0000\u0000\u0000\u0108\u0109\u0001"+ - "\u0000\u0000\u0000\u0109\u0013\u0001\u0000\u0000\u0000\u010a\u0108\u0001"+ - "\u0000\u0000\u0000\u010b\u010c\u0006\n\uffff\uffff\u0000\u010c\u0114\u0003"+ - "D\"\u0000\u010d\u0114\u0003:\u001d\u0000\u010e\u0114\u0003\u0016\u000b"+ - "\u0000\u010f\u0110\u00050\u0000\u0000\u0110\u0111\u0003\n\u0005\u0000"+ - "\u0111\u0112\u00057\u0000\u0000\u0112\u0114\u0001\u0000\u0000\u0000\u0113"+ - "\u010b\u0001\u0000\u0000\u0000\u0113\u010d\u0001\u0000\u0000\u0000\u0113"+ - "\u010e\u0001\u0000\u0000\u0000\u0113\u010f\u0001\u0000\u0000\u0000\u0114"+ - "\u011a\u0001\u0000\u0000\u0000\u0115\u0116\n\u0001\u0000\u0000\u0116\u0117"+ - "\u0005%\u0000\u0000\u0117\u0119\u0003\u001e\u000f\u0000\u0118\u0115\u0001"+ - "\u0000\u0000\u0000\u0119\u011c\u0001\u0000\u0000\u0000\u011a\u0118\u0001"+ - "\u0000\u0000\u0000\u011a\u011b\u0001\u0000\u0000\u0000\u011b\u0015\u0001"+ - "\u0000\u0000\u0000\u011c\u011a\u0001\u0000\u0000\u0000\u011d\u011e\u0003"+ - "\u0018\f\u0000\u011e\u012c\u00050\u0000\u0000\u011f\u012d\u0005B\u0000"+ - "\u0000\u0120\u0125\u0003\n\u0005\u0000\u0121\u0122\u0005\'\u0000\u0000"+ - "\u0122\u0124\u0003\n\u0005\u0000\u0123\u0121\u0001\u0000\u0000\u0000\u0124"+ - "\u0127\u0001\u0000\u0000\u0000\u0125\u0123\u0001\u0000\u0000\u0000\u0125"+ - "\u0126\u0001\u0000\u0000\u0000\u0126\u012a\u0001\u0000\u0000\u0000\u0127"+ - "\u0125\u0001\u0000\u0000\u0000\u0128\u0129\u0005\'\u0000\u0000\u0129\u012b"+ - "\u0003\u001a\r\u0000\u012a\u0128\u0001\u0000\u0000\u0000\u012a\u012b\u0001"+ - "\u0000\u0000\u0000\u012b\u012d\u0001\u0000\u0000\u0000\u012c\u011f\u0001"+ - "\u0000\u0000\u0000\u012c\u0120\u0001\u0000\u0000\u0000\u012c\u012d\u0001"+ - "\u0000\u0000\u0000\u012d\u012e\u0001\u0000\u0000\u0000\u012e\u012f\u0005"+ - "7\u0000\u0000\u012f\u0017\u0001\u0000\u0000\u0000\u0130\u0131\u0003H$"+ - "\u0000\u0131\u0019\u0001\u0000\u0000\u0000\u0132\u0133\u0005E\u0000\u0000"+ - "\u0133\u0138\u0003\u001c\u000e\u0000\u0134\u0135\u0005\'\u0000\u0000\u0135"+ - "\u0137\u0003\u001c\u000e\u0000\u0136\u0134\u0001\u0000\u0000\u0000\u0137"+ - "\u013a\u0001\u0000\u0000\u0000\u0138\u0136\u0001\u0000\u0000\u0000\u0138"+ - "\u0139\u0001\u0000\u0000\u0000\u0139\u013b\u0001\u0000\u0000\u0000\u013a"+ - "\u0138\u0001\u0000\u0000\u0000\u013b\u013c\u0005F\u0000\u0000\u013c\u001b"+ - "\u0001\u0000\u0000\u0000\u013d\u013e\u0003j5\u0000\u013e\u013f\u0005&"+ - "\u0000\u0000\u013f\u0140\u0003D\"\u0000\u0140\u001d\u0001\u0000\u0000"+ - "\u0000\u0141\u0142\u0003@ \u0000\u0142\u001f\u0001\u0000\u0000\u0000\u0143"+ - "\u0144\u0005\f\u0000\u0000\u0144\u0145\u0003\"\u0011\u0000\u0145!\u0001"+ - "\u0000\u0000\u0000\u0146\u014b\u0003$\u0012\u0000\u0147\u0148\u0005\'"+ - "\u0000\u0000\u0148\u014a\u0003$\u0012\u0000\u0149\u0147\u0001\u0000\u0000"+ - "\u0000\u014a\u014d\u0001\u0000\u0000\u0000\u014b\u0149\u0001\u0000\u0000"+ - "\u0000\u014b\u014c\u0001\u0000\u0000\u0000\u014c#\u0001\u0000\u0000\u0000"+ - "\u014d\u014b\u0001\u0000\u0000\u0000\u014e\u014f\u0003:\u001d\u0000\u014f"+ - "\u0150\u0005$\u0000\u0000\u0150\u0152\u0001\u0000\u0000\u0000\u0151\u014e"+ - "\u0001\u0000\u0000\u0000\u0151\u0152\u0001\u0000\u0000\u0000\u0152\u0153"+ - "\u0001\u0000\u0000\u0000\u0153\u0154\u0003\n\u0005\u0000\u0154%\u0001"+ - "\u0000\u0000\u0000\u0155\u0156\u0005\u0006\u0000\u0000\u0156\u015b\u0003"+ - "(\u0014\u0000\u0157\u0158\u0005\'\u0000\u0000\u0158\u015a\u0003(\u0014"+ - "\u0000\u0159\u0157\u0001\u0000\u0000\u0000\u015a\u015d\u0001\u0000\u0000"+ - "\u0000\u015b\u0159\u0001\u0000\u0000\u0000\u015b\u015c\u0001\u0000\u0000"+ - "\u0000\u015c\u015f\u0001\u0000\u0000\u0000\u015d\u015b\u0001\u0000\u0000"+ - "\u0000\u015e\u0160\u0003.\u0017\u0000\u015f\u015e\u0001\u0000\u0000\u0000"+ - "\u015f\u0160\u0001\u0000\u0000\u0000\u0160\'\u0001\u0000\u0000\u0000\u0161"+ - "\u0162\u0003*\u0015\u0000\u0162\u0163\u0005&\u0000\u0000\u0163\u0165\u0001"+ - "\u0000\u0000\u0000\u0164\u0161\u0001\u0000\u0000\u0000\u0164\u0165\u0001"+ - "\u0000\u0000\u0000\u0165\u0166\u0001\u0000\u0000\u0000\u0166\u0167\u0003"+ - ",\u0016\u0000\u0167)\u0001\u0000\u0000\u0000\u0168\u0169\u0005S\u0000"+ - "\u0000\u0169+\u0001\u0000\u0000\u0000\u016a\u016b\u0007\u0002\u0000\u0000"+ - "\u016b-\u0001\u0000\u0000\u0000\u016c\u016d\u0005R\u0000\u0000\u016d\u0172"+ - "\u0005S\u0000\u0000\u016e\u016f\u0005\'\u0000\u0000\u016f\u0171\u0005"+ - "S\u0000\u0000\u0170\u016e\u0001\u0000\u0000\u0000\u0171\u0174\u0001\u0000"+ - "\u0000\u0000\u0172\u0170\u0001\u0000\u0000\u0000\u0172\u0173\u0001\u0000"+ - "\u0000\u0000\u0173/\u0001\u0000\u0000\u0000\u0174\u0172\u0001\u0000\u0000"+ - "\u0000\u0175\u0176\u0005\u0013\u0000\u0000\u0176\u017b\u0003(\u0014\u0000"+ - "\u0177\u0178\u0005\'\u0000\u0000\u0178\u017a\u0003(\u0014\u0000\u0179"+ - "\u0177\u0001\u0000\u0000\u0000\u017a\u017d\u0001\u0000\u0000\u0000\u017b"+ - "\u0179\u0001\u0000\u0000\u0000\u017b\u017c\u0001\u0000\u0000\u0000\u017c"+ - "\u017f\u0001\u0000\u0000\u0000\u017d\u017b\u0001\u0000\u0000\u0000\u017e"+ - "\u0180\u00036\u001b\u0000\u017f\u017e\u0001\u0000\u0000\u0000\u017f\u0180"+ - "\u0001\u0000\u0000\u0000\u0180\u0183\u0001\u0000\u0000\u0000\u0181\u0182"+ - "\u0005!\u0000\u0000\u0182\u0184\u0003\"\u0011\u0000\u0183\u0181\u0001"+ - "\u0000\u0000\u0000\u0183\u0184\u0001\u0000\u0000\u0000\u01841\u0001\u0000"+ - "\u0000\u0000\u0185\u0186\u0005\u0004\u0000\u0000\u0186\u0187\u0003\"\u0011"+ - "\u0000\u01873\u0001\u0000\u0000\u0000\u0188\u018a\u0005\u000f\u0000\u0000"+ - "\u0189\u018b\u00036\u001b\u0000\u018a\u0189\u0001\u0000\u0000\u0000\u018a"+ - "\u018b\u0001\u0000\u0000\u0000\u018b\u018e\u0001\u0000\u0000\u0000\u018c"+ - "\u018d\u0005!\u0000\u0000\u018d\u018f\u0003\"\u0011\u0000\u018e\u018c"+ - "\u0001\u0000\u0000\u0000\u018e\u018f\u0001\u0000\u0000\u0000\u018f5\u0001"+ - "\u0000\u0000\u0000\u0190\u0195\u00038\u001c\u0000\u0191\u0192\u0005\'"+ - "\u0000\u0000\u0192\u0194\u00038\u001c\u0000\u0193\u0191\u0001\u0000\u0000"+ - "\u0000\u0194\u0197\u0001\u0000\u0000\u0000\u0195\u0193\u0001\u0000\u0000"+ - "\u0000\u0195\u0196\u0001\u0000\u0000\u0000\u01967\u0001\u0000\u0000\u0000"+ - "\u0197\u0195\u0001\u0000\u0000\u0000\u0198\u019b\u0003$\u0012\u0000\u0199"+ - "\u019a\u0005\u0010\u0000\u0000\u019a\u019c\u0003\n\u0005\u0000\u019b\u0199"+ - "\u0001\u0000\u0000\u0000\u019b\u019c\u0001\u0000\u0000\u0000\u019c9\u0001"+ - "\u0000\u0000\u0000\u019d\u01a2\u0003H$\u0000\u019e\u019f\u0005)\u0000"+ - "\u0000\u019f\u01a1\u0003H$\u0000\u01a0\u019e\u0001\u0000\u0000\u0000\u01a1"+ - "\u01a4\u0001\u0000\u0000\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000\u01a2"+ - "\u01a3\u0001\u0000\u0000\u0000\u01a3;\u0001\u0000\u0000\u0000\u01a4\u01a2"+ - "\u0001\u0000\u0000\u0000\u01a5\u01aa\u0003B!\u0000\u01a6\u01a7\u0005)"+ - "\u0000\u0000\u01a7\u01a9\u0003B!\u0000\u01a8\u01a6\u0001\u0000\u0000\u0000"+ - "\u01a9\u01ac\u0001\u0000\u0000\u0000\u01aa\u01a8\u0001\u0000\u0000\u0000"+ - "\u01aa\u01ab\u0001\u0000\u0000\u0000\u01ab=\u0001\u0000\u0000\u0000\u01ac"+ - "\u01aa\u0001\u0000\u0000\u0000\u01ad\u01b2\u0003<\u001e\u0000\u01ae\u01af"+ - "\u0005\'\u0000\u0000\u01af\u01b1\u0003<\u001e\u0000\u01b0\u01ae\u0001"+ - "\u0000\u0000\u0000\u01b1\u01b4\u0001\u0000\u0000\u0000\u01b2\u01b0\u0001"+ - "\u0000\u0000\u0000\u01b2\u01b3\u0001\u0000\u0000\u0000\u01b3?\u0001\u0000"+ - "\u0000\u0000\u01b4\u01b2\u0001\u0000\u0000\u0000\u01b5\u01b6\u0007\u0003"+ - "\u0000\u0000\u01b6A\u0001\u0000\u0000\u0000\u01b7\u01bb\u0005W\u0000\u0000"+ - "\u01b8\u01b9\u0004!\n\u0000\u01b9\u01bb\u0003F#\u0000\u01ba\u01b7\u0001"+ - "\u0000\u0000\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bbC\u0001\u0000"+ - "\u0000\u0000\u01bc\u01e7\u00052\u0000\u0000\u01bd\u01be\u0003h4\u0000"+ - "\u01be\u01bf\u0005J\u0000\u0000\u01bf\u01e7\u0001\u0000\u0000\u0000\u01c0"+ - "\u01e7\u0003f3\u0000\u01c1\u01e7\u0003h4\u0000\u01c2\u01e7\u0003b1\u0000"+ - "\u01c3\u01e7\u0003F#\u0000\u01c4\u01e7\u0003j5\u0000\u01c5\u01c6\u0005"+ - "H\u0000\u0000\u01c6\u01cb\u0003d2\u0000\u01c7\u01c8\u0005\'\u0000\u0000"+ - "\u01c8\u01ca\u0003d2\u0000\u01c9\u01c7\u0001\u0000\u0000\u0000\u01ca\u01cd"+ - "\u0001\u0000\u0000\u0000\u01cb\u01c9\u0001\u0000\u0000\u0000\u01cb\u01cc"+ - "\u0001\u0000\u0000\u0000\u01cc\u01ce\u0001\u0000\u0000\u0000\u01cd\u01cb"+ - "\u0001\u0000\u0000\u0000\u01ce\u01cf\u0005I\u0000\u0000\u01cf\u01e7\u0001"+ - "\u0000\u0000\u0000\u01d0\u01d1\u0005H\u0000\u0000\u01d1\u01d6\u0003b1"+ - "\u0000\u01d2\u01d3\u0005\'\u0000\u0000\u01d3\u01d5\u0003b1\u0000\u01d4"+ - "\u01d2\u0001\u0000\u0000\u0000\u01d5\u01d8\u0001\u0000\u0000\u0000\u01d6"+ - "\u01d4\u0001\u0000\u0000\u0000\u01d6\u01d7\u0001\u0000\u0000\u0000\u01d7"+ - "\u01d9\u0001\u0000\u0000\u0000\u01d8\u01d6\u0001\u0000\u0000\u0000\u01d9"+ - "\u01da\u0005I\u0000\u0000\u01da\u01e7\u0001\u0000\u0000\u0000\u01db\u01dc"+ - "\u0005H\u0000\u0000\u01dc\u01e1\u0003j5\u0000\u01dd\u01de\u0005\'\u0000"+ - "\u0000\u01de\u01e0\u0003j5\u0000\u01df\u01dd\u0001\u0000\u0000\u0000\u01e0"+ - "\u01e3\u0001\u0000\u0000\u0000\u01e1\u01df\u0001\u0000\u0000\u0000\u01e1"+ - "\u01e2\u0001\u0000\u0000\u0000\u01e2\u01e4\u0001\u0000\u0000\u0000\u01e3"+ - "\u01e1\u0001\u0000\u0000\u0000\u01e4\u01e5\u0005I\u0000\u0000\u01e5\u01e7"+ - "\u0001\u0000\u0000\u0000\u01e6\u01bc\u0001\u0000\u0000\u0000\u01e6\u01bd"+ - "\u0001\u0000\u0000\u0000\u01e6\u01c0\u0001\u0000\u0000\u0000\u01e6\u01c1"+ - "\u0001\u0000\u0000\u0000\u01e6\u01c2\u0001\u0000\u0000\u0000\u01e6\u01c3"+ - "\u0001\u0000\u0000\u0000\u01e6\u01c4\u0001\u0000\u0000\u0000\u01e6\u01c5"+ - "\u0001\u0000\u0000\u0000\u01e6\u01d0\u0001\u0000\u0000\u0000\u01e6\u01db"+ - "\u0001\u0000\u0000\u0000\u01e7E\u0001\u0000\u0000\u0000\u01e8\u01eb\u0005"+ - "5\u0000\u0000\u01e9\u01eb\u0005G\u0000\u0000\u01ea\u01e8\u0001\u0000\u0000"+ - "\u0000\u01ea\u01e9\u0001\u0000\u0000\u0000\u01ebG\u0001\u0000\u0000\u0000"+ - "\u01ec\u01f0\u0003@ \u0000\u01ed\u01ee\u0004$\u000b\u0000\u01ee\u01f0"+ - "\u0003F#\u0000\u01ef\u01ec\u0001\u0000\u0000\u0000\u01ef\u01ed\u0001\u0000"+ - "\u0000\u0000\u01f0I\u0001\u0000\u0000\u0000\u01f1\u01f2\u0005\t\u0000"+ - "\u0000\u01f2\u01f3\u0005\u001f\u0000\u0000\u01f3K\u0001\u0000\u0000\u0000"+ - "\u01f4\u01f5\u0005\u000e\u0000\u0000\u01f5\u01fa\u0003N\'\u0000\u01f6"+ - "\u01f7\u0005\'\u0000\u0000\u01f7\u01f9\u0003N\'\u0000\u01f8\u01f6\u0001"+ - "\u0000\u0000\u0000\u01f9\u01fc\u0001\u0000\u0000\u0000\u01fa\u01f8\u0001"+ - "\u0000\u0000\u0000\u01fa\u01fb\u0001\u0000\u0000\u0000\u01fbM\u0001\u0000"+ - "\u0000\u0000\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fd\u01ff\u0003\n\u0005"+ - "\u0000\u01fe\u0200\u0007\u0004\u0000\u0000\u01ff\u01fe\u0001\u0000\u0000"+ - "\u0000\u01ff\u0200\u0001\u0000\u0000\u0000\u0200\u0203\u0001\u0000\u0000"+ - "\u0000\u0201\u0202\u00053\u0000\u0000\u0202\u0204\u0007\u0005\u0000\u0000"+ - "\u0203\u0201\u0001\u0000\u0000\u0000\u0203\u0204\u0001\u0000\u0000\u0000"+ - "\u0204O\u0001\u0000\u0000\u0000\u0205\u0206\u0005\b\u0000\u0000\u0206"+ - "\u0207\u0003>\u001f\u0000\u0207Q\u0001\u0000\u0000\u0000\u0208\u0209\u0005"+ - "\u0002\u0000\u0000\u0209\u020a\u0003>\u001f\u0000\u020aS\u0001\u0000\u0000"+ - "\u0000\u020b\u020c\u0005\u000b\u0000\u0000\u020c\u0211\u0003V+\u0000\u020d"+ - "\u020e\u0005\'\u0000\u0000\u020e\u0210\u0003V+\u0000\u020f\u020d\u0001"+ - "\u0000\u0000\u0000\u0210\u0213\u0001\u0000\u0000\u0000\u0211\u020f\u0001"+ - "\u0000\u0000\u0000\u0211\u0212\u0001\u0000\u0000\u0000\u0212U\u0001\u0000"+ - "\u0000\u0000\u0213\u0211\u0001\u0000\u0000\u0000\u0214\u0215\u0003<\u001e"+ - "\u0000\u0215\u0216\u0005[\u0000\u0000\u0216\u0217\u0003<\u001e\u0000\u0217"+ - "W\u0001\u0000\u0000\u0000\u0218\u0219\u0005\u0001\u0000\u0000\u0219\u021a"+ - "\u0003\u0014\n\u0000\u021a\u021c\u0003j5\u0000\u021b\u021d\u0003^/\u0000"+ - "\u021c\u021b\u0001\u0000\u0000\u0000\u021c\u021d\u0001\u0000\u0000\u0000"+ - "\u021dY\u0001\u0000\u0000\u0000\u021e\u021f\u0005\u0007\u0000\u0000\u021f"+ - "\u0220\u0003\u0014\n\u0000\u0220\u0221\u0003j5\u0000\u0221[\u0001\u0000"+ - "\u0000\u0000\u0222\u0223\u0005\n\u0000\u0000\u0223\u0224\u0003:\u001d"+ - "\u0000\u0224]\u0001\u0000\u0000\u0000\u0225\u022a\u0003`0\u0000\u0226"+ - "\u0227\u0005\'\u0000\u0000\u0227\u0229\u0003`0\u0000\u0228\u0226\u0001"+ - "\u0000\u0000\u0000\u0229\u022c\u0001\u0000\u0000\u0000\u022a\u0228\u0001"+ - "\u0000\u0000\u0000\u022a\u022b\u0001\u0000\u0000\u0000\u022b_\u0001\u0000"+ - "\u0000\u0000\u022c\u022a\u0001\u0000\u0000\u0000\u022d\u022e\u0003@ \u0000"+ - "\u022e\u022f\u0005$\u0000\u0000\u022f\u0230\u0003D\"\u0000\u0230a\u0001"+ - "\u0000\u0000\u0000\u0231\u0232\u0007\u0006\u0000\u0000\u0232c\u0001\u0000"+ - "\u0000\u0000\u0233\u0236\u0003f3\u0000\u0234\u0236\u0003h4\u0000\u0235"+ - "\u0233\u0001\u0000\u0000\u0000\u0235\u0234\u0001\u0000\u0000\u0000\u0236"+ - "e\u0001\u0000\u0000\u0000\u0237\u0239\u0007\u0000\u0000\u0000\u0238\u0237"+ - "\u0001\u0000\u0000\u0000\u0238\u0239\u0001\u0000\u0000\u0000\u0239\u023a"+ - "\u0001\u0000\u0000\u0000\u023a\u023b\u0005 \u0000\u0000\u023bg\u0001\u0000"+ - "\u0000\u0000\u023c\u023e\u0007\u0000\u0000\u0000\u023d\u023c\u0001\u0000"+ - "\u0000\u0000\u023d\u023e\u0001\u0000\u0000\u0000\u023e\u023f\u0001\u0000"+ - "\u0000\u0000\u023f\u0240\u0005\u001f\u0000\u0000\u0240i\u0001\u0000\u0000"+ - "\u0000\u0241\u0242\u0005\u001e\u0000\u0000\u0242k\u0001\u0000\u0000\u0000"+ - "\u0243\u0244\u0007\u0007\u0000\u0000\u0244m\u0001\u0000\u0000\u0000\u0245"+ - "\u0246\u0005\u0005\u0000\u0000\u0246\u0247\u0003p8\u0000\u0247o\u0001"+ - "\u0000\u0000\u0000\u0248\u0249\u0005H\u0000\u0000\u0249\u024a\u0003\u0002"+ - "\u0001\u0000\u024a\u024b\u0005I\u0000\u0000\u024bq\u0001\u0000\u0000\u0000"+ - "\u024c\u024d\u0005\r\u0000\u0000\u024d\u024e\u0005k\u0000\u0000\u024e"+ - "s\u0001\u0000\u0000\u0000\u024f\u0250\u0005\u0003\u0000\u0000\u0250\u0253"+ - "\u0005a\u0000\u0000\u0251\u0252\u0005_\u0000\u0000\u0252\u0254\u0003<"+ - "\u001e\u0000\u0253\u0251\u0001\u0000\u0000\u0000\u0253\u0254\u0001\u0000"+ - "\u0000\u0000\u0254\u025e\u0001\u0000\u0000\u0000\u0255\u0256\u0005`\u0000"+ - "\u0000\u0256\u025b\u0003v;\u0000\u0257\u0258\u0005\'\u0000\u0000\u0258"+ - "\u025a\u0003v;\u0000\u0259\u0257\u0001\u0000\u0000\u0000\u025a\u025d\u0001"+ - "\u0000\u0000\u0000\u025b\u0259\u0001\u0000\u0000\u0000\u025b\u025c\u0001"+ - "\u0000\u0000\u0000\u025c\u025f\u0001\u0000\u0000\u0000\u025d\u025b\u0001"+ - "\u0000\u0000\u0000\u025e\u0255\u0001\u0000\u0000\u0000\u025e\u025f\u0001"+ - "\u0000\u0000\u0000\u025fu\u0001\u0000\u0000\u0000\u0260\u0261\u0003<\u001e"+ - "\u0000\u0261\u0262\u0005$\u0000\u0000\u0262\u0264\u0001\u0000\u0000\u0000"+ - "\u0263\u0260\u0001\u0000\u0000\u0000\u0263\u0264\u0001\u0000\u0000\u0000"+ - "\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u0266\u0003<\u001e\u0000\u0266"+ - "w\u0001\u0000\u0000\u0000\u0267\u0268\u0005\u0012\u0000\u0000\u0268\u0269"+ - "\u0003(\u0014\u0000\u0269\u026a\u0005_\u0000\u0000\u026a\u026b\u0003>"+ - "\u001f\u0000\u026by\u0001\u0000\u0000\u0000\u026c\u026d\u0005\u0011\u0000"+ - "\u0000\u026d\u0270\u00036\u001b\u0000\u026e\u026f\u0005!\u0000\u0000\u026f"+ - "\u0271\u0003\"\u0011\u0000\u0270\u026e\u0001\u0000\u0000\u0000\u0270\u0271"+ - "\u0001\u0000\u0000\u0000\u0271{\u0001\u0000\u0000\u0000\u0272\u0274\u0007"+ - "\b\u0000\u0000\u0273\u0272\u0001\u0000\u0000\u0000\u0273\u0274\u0001\u0000"+ - "\u0000\u0000\u0274\u0275\u0001\u0000\u0000\u0000\u0275\u0276\u0005\u0014"+ - "\u0000\u0000\u0276\u0277\u0003~?\u0000\u0277\u0278\u0003\u0080@\u0000"+ - "\u0278}\u0001\u0000\u0000\u0000\u0279\u027c\u0003(\u0014\u0000\u027a\u027b"+ - "\u0005[\u0000\u0000\u027b\u027d\u0003@ \u0000\u027c\u027a\u0001\u0000"+ - "\u0000\u0000\u027c\u027d\u0001\u0000\u0000\u0000\u027d\u007f\u0001\u0000"+ - "\u0000\u0000\u027e\u027f\u0005_\u0000\u0000\u027f\u0284\u0003\u0082A\u0000"+ - "\u0280\u0281\u0005\'\u0000\u0000\u0281\u0283\u0003\u0082A\u0000\u0282"+ - "\u0280\u0001\u0000\u0000\u0000\u0283\u0286\u0001\u0000\u0000\u0000\u0284"+ - "\u0282\u0001\u0000\u0000\u0000\u0284\u0285\u0001\u0000\u0000\u0000\u0285"+ - "\u0081\u0001\u0000\u0000\u0000\u0286\u0284\u0001\u0000\u0000\u0000\u0287"+ - "\u0288\u0003\u0010\b\u0000\u0288\u0083\u0001\u0000\u0000\u0000?\u008f"+ - "\u0098\u00ac\u00b8\u00c1\u00c9\u00ce\u00d6\u00d8\u00dd\u00e4\u00e9\u00ee"+ - "\u00f8\u00fe\u0106\u0108\u0113\u011a\u0125\u012a\u012c\u0138\u014b\u0151"+ - "\u015b\u015f\u0164\u0172\u017b\u017f\u0183\u018a\u018e\u0195\u019b\u01a2"+ - "\u01aa\u01b2\u01ba\u01cb\u01d6\u01e1\u01e6\u01ea\u01ef\u01fa\u01ff\u0203"+ - "\u0211\u021c\u022a\u0235\u0238\u023d\u0253\u025b\u025e\u0263\u0270\u0273"+ - "\u027c\u0284"; + "++..\u0002\u0000**88\u0002\u000099;?\u0002\u0000\u0011\u0011\u0017\u0018"+ + "\u02ae\u0000\u0086\u0001\u0000\u0000\u0000\u0002\u0089\u0001\u0000\u0000"+ + "\u0000\u0004\u009a\u0001\u0000\u0000\u0000\u0006\u00af\u0001\u0000\u0000"+ + "\u0000\b\u00b1\u0001\u0000\u0000\u0000\n\u00d1\u0001\u0000\u0000\u0000"+ + "\f\u00ec\u0001\u0000\u0000\u0000\u000e\u00ee\u0001\u0000\u0000\u0000\u0010"+ + "\u00fb\u0001\u0000\u0000\u0000\u0012\u0101\u0001\u0000\u0000\u0000\u0014"+ + "\u0116\u0001\u0000\u0000\u0000\u0016\u0120\u0001\u0000\u0000\u0000\u0018"+ + "\u0133\u0001\u0000\u0000\u0000\u001a\u0135\u0001\u0000\u0000\u0000\u001c"+ + "\u0140\u0001\u0000\u0000\u0000\u001e\u0144\u0001\u0000\u0000\u0000 \u0146"+ + "\u0001\u0000\u0000\u0000\"\u0149\u0001\u0000\u0000\u0000$\u0154\u0001"+ + "\u0000\u0000\u0000&\u0158\u0001\u0000\u0000\u0000(\u0167\u0001\u0000\u0000"+ + "\u0000*\u016b\u0001\u0000\u0000\u0000,\u016d\u0001\u0000\u0000\u0000."+ + "\u016f\u0001\u0000\u0000\u00000\u0178\u0001\u0000\u0000\u00002\u0188\u0001"+ + "\u0000\u0000\u00004\u018b\u0001\u0000\u0000\u00006\u0193\u0001\u0000\u0000"+ + "\u00008\u019b\u0001\u0000\u0000\u0000:\u01a0\u0001\u0000\u0000\u0000<"+ + "\u01a8\u0001\u0000\u0000\u0000>\u01b0\u0001\u0000\u0000\u0000@\u01b8\u0001"+ + "\u0000\u0000\u0000B\u01bc\u0001\u0000\u0000\u0000D\u01e8\u0001\u0000\u0000"+ + "\u0000F\u01ec\u0001\u0000\u0000\u0000H\u01f0\u0001\u0000\u0000\u0000J"+ + "\u01f2\u0001\u0000\u0000\u0000L\u01f5\u0001\u0000\u0000\u0000N\u01fe\u0001"+ + "\u0000\u0000\u0000P\u0206\u0001\u0000\u0000\u0000R\u0209\u0001\u0000\u0000"+ + "\u0000T\u020c\u0001\u0000\u0000\u0000V\u0215\u0001\u0000\u0000\u0000X"+ + "\u0219\u0001\u0000\u0000\u0000Z\u021f\u0001\u0000\u0000\u0000\\\u0223"+ + "\u0001\u0000\u0000\u0000^\u0226\u0001\u0000\u0000\u0000`\u022e\u0001\u0000"+ + "\u0000\u0000b\u0232\u0001\u0000\u0000\u0000d\u0236\u0001\u0000\u0000\u0000"+ + "f\u0239\u0001\u0000\u0000\u0000h\u023e\u0001\u0000\u0000\u0000j\u0242"+ + "\u0001\u0000\u0000\u0000l\u0244\u0001\u0000\u0000\u0000n\u0246\u0001\u0000"+ + "\u0000\u0000p\u0249\u0001\u0000\u0000\u0000r\u024d\u0001\u0000\u0000\u0000"+ + "t\u0250\u0001\u0000\u0000\u0000v\u0264\u0001\u0000\u0000\u0000x\u0268"+ + "\u0001\u0000\u0000\u0000z\u026d\u0001\u0000\u0000\u0000|\u0273\u0001\u0000"+ + "\u0000\u0000~\u0278\u0001\u0000\u0000\u0000\u0080\u027a\u0001\u0000\u0000"+ + "\u0000\u0082\u0283\u0001\u0000\u0000\u0000\u0084\u0285\u0001\u0000\u0000"+ + "\u0000\u0086\u0087\u0003\u0002\u0001\u0000\u0087\u0088\u0005\u0000\u0000"+ + "\u0001\u0088\u0001\u0001\u0000\u0000\u0000\u0089\u008a\u0006\u0001\uffff"+ + "\uffff\u0000\u008a\u008b\u0003\u0004\u0002\u0000\u008b\u0091\u0001\u0000"+ + "\u0000\u0000\u008c\u008d\n\u0001\u0000\u0000\u008d\u008e\u0005\u001d\u0000"+ + "\u0000\u008e\u0090\u0003\u0006\u0003\u0000\u008f\u008c\u0001\u0000\u0000"+ + "\u0000\u0090\u0093\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000\u0000"+ + "\u0000\u0091\u0092\u0001\u0000\u0000\u0000\u0092\u0003\u0001\u0000\u0000"+ + "\u0000\u0093\u0091\u0001\u0000\u0000\u0000\u0094\u009b\u0003n7\u0000\u0095"+ + "\u009b\u0003&\u0013\u0000\u0096\u009b\u0003 \u0010\u0000\u0097\u009b\u0003"+ + "r9\u0000\u0098\u0099\u0004\u0002\u0001\u0000\u0099\u009b\u00030\u0018"+ + "\u0000\u009a\u0094\u0001\u0000\u0000\u0000\u009a\u0095\u0001\u0000\u0000"+ + "\u0000\u009a\u0096\u0001\u0000\u0000\u0000\u009a\u0097\u0001\u0000\u0000"+ + "\u0000\u009a\u0098\u0001\u0000\u0000\u0000\u009b\u0005\u0001\u0000\u0000"+ + "\u0000\u009c\u00b0\u00032\u0019\u0000\u009d\u00b0\u0003\b\u0004\u0000"+ + "\u009e\u00b0\u0003P(\u0000\u009f\u00b0\u0003J%\u0000\u00a0\u00b0\u0003"+ + "4\u001a\u0000\u00a1\u00b0\u0003L&\u0000\u00a2\u00b0\u0003R)\u0000\u00a3"+ + "\u00b0\u0003T*\u0000\u00a4\u00b0\u0003X,\u0000\u00a5\u00b0\u0003Z-\u0000"+ + "\u00a6\u00b0\u0003t:\u0000\u00a7\u00b0\u0003\\.\u0000\u00a8\u00b0\u0003"+ + "|>\u0000\u00a9\u00aa\u0004\u0003\u0002\u0000\u00aa\u00b0\u0003z=\u0000"+ + "\u00ab\u00ac\u0004\u0003\u0003\u0000\u00ac\u00b0\u0003x<\u0000\u00ad\u00ae"+ + "\u0004\u0003\u0004\u0000\u00ae\u00b0\u0003\u0084B\u0000\u00af\u009c\u0001"+ + "\u0000\u0000\u0000\u00af\u009d\u0001\u0000\u0000\u0000\u00af\u009e\u0001"+ + "\u0000\u0000\u0000\u00af\u009f\u0001\u0000\u0000\u0000\u00af\u00a0\u0001"+ + "\u0000\u0000\u0000\u00af\u00a1\u0001\u0000\u0000\u0000\u00af\u00a2\u0001"+ + "\u0000\u0000\u0000\u00af\u00a3\u0001\u0000\u0000\u0000\u00af\u00a4\u0001"+ + "\u0000\u0000\u0000\u00af\u00a5\u0001\u0000\u0000\u0000\u00af\u00a6\u0001"+ + "\u0000\u0000\u0000\u00af\u00a7\u0001\u0000\u0000\u0000\u00af\u00a8\u0001"+ + "\u0000\u0000\u0000\u00af\u00a9\u0001\u0000\u0000\u0000\u00af\u00ab\u0001"+ + "\u0000\u0000\u0000\u00af\u00ad\u0001\u0000\u0000\u0000\u00b0\u0007\u0001"+ + "\u0000\u0000\u0000\u00b1\u00b2\u0005\u0010\u0000\u0000\u00b2\u00b3\u0003"+ + "\n\u0005\u0000\u00b3\t\u0001\u0000\u0000\u0000\u00b4\u00b5\u0006\u0005"+ + "\uffff\uffff\u0000\u00b5\u00b6\u00051\u0000\u0000\u00b6\u00d2\u0003\n"+ + "\u0005\b\u00b7\u00d2\u0003\u0010\b\u0000\u00b8\u00d2\u0003\f\u0006\u0000"+ + "\u00b9\u00bb\u0003\u0010\b\u0000\u00ba\u00bc\u00051\u0000\u0000\u00bb"+ + "\u00ba\u0001\u0000\u0000\u0000\u00bb\u00bc\u0001\u0000\u0000\u0000\u00bc"+ + "\u00bd\u0001\u0000\u0000\u0000\u00bd\u00be\u0005,\u0000\u0000\u00be\u00bf"+ + "\u00050\u0000\u0000\u00bf\u00c4\u0003\u0010\b\u0000\u00c0\u00c1\u0005"+ + "\'\u0000\u0000\u00c1\u00c3\u0003\u0010\b\u0000\u00c2\u00c0\u0001\u0000"+ + "\u0000\u0000\u00c3\u00c6\u0001\u0000\u0000\u0000\u00c4\u00c2\u0001\u0000"+ + "\u0000\u0000\u00c4\u00c5\u0001\u0000\u0000\u0000\u00c5\u00c7\u0001\u0000"+ + "\u0000\u0000\u00c6\u00c4\u0001\u0000\u0000\u0000\u00c7\u00c8\u00057\u0000"+ + "\u0000\u00c8\u00d2\u0001\u0000\u0000\u0000\u00c9\u00ca\u0003\u0010\b\u0000"+ + "\u00ca\u00cc\u0005-\u0000\u0000\u00cb\u00cd\u00051\u0000\u0000\u00cc\u00cb"+ + "\u0001\u0000\u0000\u0000\u00cc\u00cd\u0001\u0000\u0000\u0000\u00cd\u00ce"+ + "\u0001\u0000\u0000\u0000\u00ce\u00cf\u00052\u0000\u0000\u00cf\u00d2\u0001"+ + "\u0000\u0000\u0000\u00d0\u00d2\u0003\u000e\u0007\u0000\u00d1\u00b4\u0001"+ + "\u0000\u0000\u0000\u00d1\u00b7\u0001\u0000\u0000\u0000\u00d1\u00b8\u0001"+ + "\u0000\u0000\u0000\u00d1\u00b9\u0001\u0000\u0000\u0000\u00d1\u00c9\u0001"+ + "\u0000\u0000\u0000\u00d1\u00d0\u0001\u0000\u0000\u0000\u00d2\u00db\u0001"+ + "\u0000\u0000\u0000\u00d3\u00d4\n\u0005\u0000\u0000\u00d4\u00d5\u0005\""+ + "\u0000\u0000\u00d5\u00da\u0003\n\u0005\u0006\u00d6\u00d7\n\u0004\u0000"+ + "\u0000\u00d7\u00d8\u00054\u0000\u0000\u00d8\u00da\u0003\n\u0005\u0005"+ + "\u00d9\u00d3\u0001\u0000\u0000\u0000\u00d9\u00d6\u0001\u0000\u0000\u0000"+ + "\u00da\u00dd\u0001\u0000\u0000\u0000\u00db\u00d9\u0001\u0000\u0000\u0000"+ + "\u00db\u00dc\u0001\u0000\u0000\u0000\u00dc\u000b\u0001\u0000\u0000\u0000"+ + "\u00dd\u00db\u0001\u0000\u0000\u0000\u00de\u00e0\u0003\u0010\b\u0000\u00df"+ + "\u00e1\u00051\u0000\u0000\u00e0\u00df\u0001\u0000\u0000\u0000\u00e0\u00e1"+ + "\u0001\u0000\u0000\u0000\u00e1\u00e2\u0001\u0000\u0000\u0000\u00e2\u00e3"+ + "\u0005/\u0000\u0000\u00e3\u00e4\u0003j5\u0000\u00e4\u00ed\u0001\u0000"+ + "\u0000\u0000\u00e5\u00e7\u0003\u0010\b\u0000\u00e6\u00e8\u00051\u0000"+ + "\u0000\u00e7\u00e6\u0001\u0000\u0000\u0000\u00e7\u00e8\u0001\u0000\u0000"+ + "\u0000\u00e8\u00e9\u0001\u0000\u0000\u0000\u00e9\u00ea\u00056\u0000\u0000"+ + "\u00ea\u00eb\u0003j5\u0000\u00eb\u00ed\u0001\u0000\u0000\u0000\u00ec\u00de"+ + "\u0001\u0000\u0000\u0000\u00ec\u00e5\u0001\u0000\u0000\u0000\u00ed\r\u0001"+ + "\u0000\u0000\u0000\u00ee\u00f1\u0003:\u001d\u0000\u00ef\u00f0\u0005%\u0000"+ + "\u0000\u00f0\u00f2\u0003\u001e\u000f\u0000\u00f1\u00ef\u0001\u0000\u0000"+ + "\u0000\u00f1\u00f2\u0001\u0000\u0000\u0000\u00f2\u00f3\u0001\u0000\u0000"+ + "\u0000\u00f3\u00f4\u0005&\u0000\u0000\u00f4\u00f5\u0003D\"\u0000\u00f5"+ + "\u000f\u0001\u0000\u0000\u0000\u00f6\u00fc\u0003\u0012\t\u0000\u00f7\u00f8"+ + "\u0003\u0012\t\u0000\u00f8\u00f9\u0003l6\u0000\u00f9\u00fa\u0003\u0012"+ + "\t\u0000\u00fa\u00fc\u0001\u0000\u0000\u0000\u00fb\u00f6\u0001\u0000\u0000"+ + "\u0000\u00fb\u00f7\u0001\u0000\u0000\u0000\u00fc\u0011\u0001\u0000\u0000"+ + "\u0000\u00fd\u00fe\u0006\t\uffff\uffff\u0000\u00fe\u0102\u0003\u0014\n"+ + "\u0000\u00ff\u0100\u0007\u0000\u0000\u0000\u0100\u0102\u0003\u0012\t\u0003"+ + "\u0101\u00fd\u0001\u0000\u0000\u0000\u0101\u00ff\u0001\u0000\u0000\u0000"+ + "\u0102\u010b\u0001\u0000\u0000\u0000\u0103\u0104\n\u0002\u0000\u0000\u0104"+ + "\u0105\u0007\u0001\u0000\u0000\u0105\u010a\u0003\u0012\t\u0003\u0106\u0107"+ + "\n\u0001\u0000\u0000\u0107\u0108\u0007\u0000\u0000\u0000\u0108\u010a\u0003"+ + "\u0012\t\u0002\u0109\u0103\u0001\u0000\u0000\u0000\u0109\u0106\u0001\u0000"+ + "\u0000\u0000\u010a\u010d\u0001\u0000\u0000\u0000\u010b\u0109\u0001\u0000"+ + "\u0000\u0000\u010b\u010c\u0001\u0000\u0000\u0000\u010c\u0013\u0001\u0000"+ + "\u0000\u0000\u010d\u010b\u0001\u0000\u0000\u0000\u010e\u010f\u0006\n\uffff"+ + "\uffff\u0000\u010f\u0117\u0003D\"\u0000\u0110\u0117\u0003:\u001d\u0000"+ + "\u0111\u0117\u0003\u0016\u000b\u0000\u0112\u0113\u00050\u0000\u0000\u0113"+ + "\u0114\u0003\n\u0005\u0000\u0114\u0115\u00057\u0000\u0000\u0115\u0117"+ + "\u0001\u0000\u0000\u0000\u0116\u010e\u0001\u0000\u0000\u0000\u0116\u0110"+ + "\u0001\u0000\u0000\u0000\u0116\u0111\u0001\u0000\u0000\u0000\u0116\u0112"+ + "\u0001\u0000\u0000\u0000\u0117\u011d\u0001\u0000\u0000\u0000\u0118\u0119"+ + "\n\u0001\u0000\u0000\u0119\u011a\u0005%\u0000\u0000\u011a\u011c\u0003"+ + "\u001e\u000f\u0000\u011b\u0118\u0001\u0000\u0000\u0000\u011c\u011f\u0001"+ + "\u0000\u0000\u0000\u011d\u011b\u0001\u0000\u0000\u0000\u011d\u011e\u0001"+ + "\u0000\u0000\u0000\u011e\u0015\u0001\u0000\u0000\u0000\u011f\u011d\u0001"+ + "\u0000\u0000\u0000\u0120\u0121\u0003\u0018\f\u0000\u0121\u012f\u00050"+ + "\u0000\u0000\u0122\u0130\u0005B\u0000\u0000\u0123\u0128\u0003\n\u0005"+ + "\u0000\u0124\u0125\u0005\'\u0000\u0000\u0125\u0127\u0003\n\u0005\u0000"+ + "\u0126\u0124\u0001\u0000\u0000\u0000\u0127\u012a\u0001\u0000\u0000\u0000"+ + "\u0128\u0126\u0001\u0000\u0000\u0000\u0128\u0129\u0001\u0000\u0000\u0000"+ + "\u0129\u012d\u0001\u0000\u0000\u0000\u012a\u0128\u0001\u0000\u0000\u0000"+ + "\u012b\u012c\u0005\'\u0000\u0000\u012c\u012e\u0003\u001a\r\u0000\u012d"+ + "\u012b\u0001\u0000\u0000\u0000\u012d\u012e\u0001\u0000\u0000\u0000\u012e"+ + "\u0130\u0001\u0000\u0000\u0000\u012f\u0122\u0001\u0000\u0000\u0000\u012f"+ + "\u0123\u0001\u0000\u0000\u0000\u012f\u0130\u0001\u0000\u0000\u0000\u0130"+ + "\u0131\u0001\u0000\u0000\u0000\u0131\u0132\u00057\u0000\u0000\u0132\u0017"+ + "\u0001\u0000\u0000\u0000\u0133\u0134\u0003H$\u0000\u0134\u0019\u0001\u0000"+ + "\u0000\u0000\u0135\u0136\u0005E\u0000\u0000\u0136\u013b\u0003\u001c\u000e"+ + "\u0000\u0137\u0138\u0005\'\u0000\u0000\u0138\u013a\u0003\u001c\u000e\u0000"+ + "\u0139\u0137\u0001\u0000\u0000\u0000\u013a\u013d\u0001\u0000\u0000\u0000"+ + "\u013b\u0139\u0001\u0000\u0000\u0000\u013b\u013c\u0001\u0000\u0000\u0000"+ + "\u013c\u013e\u0001\u0000\u0000\u0000\u013d\u013b\u0001\u0000\u0000\u0000"+ + "\u013e\u013f\u0005F\u0000\u0000\u013f\u001b\u0001\u0000\u0000\u0000\u0140"+ + "\u0141\u0003j5\u0000\u0141\u0142\u0005&\u0000\u0000\u0142\u0143\u0003"+ + "D\"\u0000\u0143\u001d\u0001\u0000\u0000\u0000\u0144\u0145\u0003@ \u0000"+ + "\u0145\u001f\u0001\u0000\u0000\u0000\u0146\u0147\u0005\f\u0000\u0000\u0147"+ + "\u0148\u0003\"\u0011\u0000\u0148!\u0001\u0000\u0000\u0000\u0149\u014e"+ + "\u0003$\u0012\u0000\u014a\u014b\u0005\'\u0000\u0000\u014b\u014d\u0003"+ + "$\u0012\u0000\u014c\u014a\u0001\u0000\u0000\u0000\u014d\u0150\u0001\u0000"+ + "\u0000\u0000\u014e\u014c\u0001\u0000\u0000\u0000\u014e\u014f\u0001\u0000"+ + "\u0000\u0000\u014f#\u0001\u0000\u0000\u0000\u0150\u014e\u0001\u0000\u0000"+ + "\u0000\u0151\u0152\u0003:\u001d\u0000\u0152\u0153\u0005$\u0000\u0000\u0153"+ + "\u0155\u0001\u0000\u0000\u0000\u0154\u0151\u0001\u0000\u0000\u0000\u0154"+ + "\u0155\u0001\u0000\u0000\u0000\u0155\u0156\u0001\u0000\u0000\u0000\u0156"+ + "\u0157\u0003\n\u0005\u0000\u0157%\u0001\u0000\u0000\u0000\u0158\u0159"+ + "\u0005\u0006\u0000\u0000\u0159\u015e\u0003(\u0014\u0000\u015a\u015b\u0005"+ + "\'\u0000\u0000\u015b\u015d\u0003(\u0014\u0000\u015c\u015a\u0001\u0000"+ + "\u0000\u0000\u015d\u0160\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000"+ + "\u0000\u0000\u015e\u015f\u0001\u0000\u0000\u0000\u015f\u0162\u0001\u0000"+ + "\u0000\u0000\u0160\u015e\u0001\u0000\u0000\u0000\u0161\u0163\u0003.\u0017"+ + "\u0000\u0162\u0161\u0001\u0000\u0000\u0000\u0162\u0163\u0001\u0000\u0000"+ + "\u0000\u0163\'\u0001\u0000\u0000\u0000\u0164\u0165\u0003*\u0015\u0000"+ + "\u0165\u0166\u0005&\u0000\u0000\u0166\u0168\u0001\u0000\u0000\u0000\u0167"+ + "\u0164\u0001\u0000\u0000\u0000\u0167\u0168\u0001\u0000\u0000\u0000\u0168"+ + "\u0169\u0001\u0000\u0000\u0000\u0169\u016a\u0003,\u0016\u0000\u016a)\u0001"+ + "\u0000\u0000\u0000\u016b\u016c\u0007\u0002\u0000\u0000\u016c+\u0001\u0000"+ + "\u0000\u0000\u016d\u016e\u0007\u0002\u0000\u0000\u016e-\u0001\u0000\u0000"+ + "\u0000\u016f\u0170\u0005R\u0000\u0000\u0170\u0175\u0005S\u0000\u0000\u0171"+ + "\u0172\u0005\'\u0000\u0000\u0172\u0174\u0005S\u0000\u0000\u0173\u0171"+ + "\u0001\u0000\u0000\u0000\u0174\u0177\u0001\u0000\u0000\u0000\u0175\u0173"+ + "\u0001\u0000\u0000\u0000\u0175\u0176\u0001\u0000\u0000\u0000\u0176/\u0001"+ + "\u0000\u0000\u0000\u0177\u0175\u0001\u0000\u0000\u0000\u0178\u0179\u0005"+ + "\u0015\u0000\u0000\u0179\u017e\u0003(\u0014\u0000\u017a\u017b\u0005\'"+ + "\u0000\u0000\u017b\u017d\u0003(\u0014\u0000\u017c\u017a\u0001\u0000\u0000"+ + "\u0000\u017d\u0180\u0001\u0000\u0000\u0000\u017e\u017c\u0001\u0000\u0000"+ + "\u0000\u017e\u017f\u0001\u0000\u0000\u0000\u017f\u0182\u0001\u0000\u0000"+ + "\u0000\u0180\u017e\u0001\u0000\u0000\u0000\u0181\u0183\u00036\u001b\u0000"+ + "\u0182\u0181\u0001\u0000\u0000\u0000\u0182\u0183\u0001\u0000\u0000\u0000"+ + "\u0183\u0186\u0001\u0000\u0000\u0000\u0184\u0185\u0005!\u0000\u0000\u0185"+ + "\u0187\u0003\"\u0011\u0000\u0186\u0184\u0001\u0000\u0000\u0000\u0186\u0187"+ + "\u0001\u0000\u0000\u0000\u01871\u0001\u0000\u0000\u0000\u0188\u0189\u0005"+ + "\u0004\u0000\u0000\u0189\u018a\u0003\"\u0011\u0000\u018a3\u0001\u0000"+ + "\u0000\u0000\u018b\u018d\u0005\u000f\u0000\u0000\u018c\u018e\u00036\u001b"+ + "\u0000\u018d\u018c\u0001\u0000\u0000\u0000\u018d\u018e\u0001\u0000\u0000"+ + "\u0000\u018e\u0191\u0001\u0000\u0000\u0000\u018f\u0190\u0005!\u0000\u0000"+ + "\u0190\u0192\u0003\"\u0011\u0000\u0191\u018f\u0001\u0000\u0000\u0000\u0191"+ + "\u0192\u0001\u0000\u0000\u0000\u01925\u0001\u0000\u0000\u0000\u0193\u0198"+ + "\u00038\u001c\u0000\u0194\u0195\u0005\'\u0000\u0000\u0195\u0197\u0003"+ + "8\u001c\u0000\u0196\u0194\u0001\u0000\u0000\u0000\u0197\u019a\u0001\u0000"+ + "\u0000\u0000\u0198\u0196\u0001\u0000\u0000\u0000\u0198\u0199\u0001\u0000"+ + "\u0000\u0000\u01997\u0001\u0000\u0000\u0000\u019a\u0198\u0001\u0000\u0000"+ + "\u0000\u019b\u019e\u0003$\u0012\u0000\u019c\u019d\u0005\u0010\u0000\u0000"+ + "\u019d\u019f\u0003\n\u0005\u0000\u019e\u019c\u0001\u0000\u0000\u0000\u019e"+ + "\u019f\u0001\u0000\u0000\u0000\u019f9\u0001\u0000\u0000\u0000\u01a0\u01a5"+ + "\u0003H$\u0000\u01a1\u01a2\u0005)\u0000\u0000\u01a2\u01a4\u0003H$\u0000"+ + "\u01a3\u01a1\u0001\u0000\u0000\u0000\u01a4\u01a7\u0001\u0000\u0000\u0000"+ + "\u01a5\u01a3\u0001\u0000\u0000\u0000\u01a5\u01a6\u0001\u0000\u0000\u0000"+ + "\u01a6;\u0001\u0000\u0000\u0000\u01a7\u01a5\u0001\u0000\u0000\u0000\u01a8"+ + "\u01ad\u0003B!\u0000\u01a9\u01aa\u0005)\u0000\u0000\u01aa\u01ac\u0003"+ + "B!\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ac\u01af\u0001\u0000\u0000"+ + "\u0000\u01ad\u01ab\u0001\u0000\u0000\u0000\u01ad\u01ae\u0001\u0000\u0000"+ + "\u0000\u01ae=\u0001\u0000\u0000\u0000\u01af\u01ad\u0001\u0000\u0000\u0000"+ + "\u01b0\u01b5\u0003<\u001e\u0000\u01b1\u01b2\u0005\'\u0000\u0000\u01b2"+ + "\u01b4\u0003<\u001e\u0000\u01b3\u01b1\u0001\u0000\u0000\u0000\u01b4\u01b7"+ + "\u0001\u0000\u0000\u0000\u01b5\u01b3\u0001\u0000\u0000\u0000\u01b5\u01b6"+ + "\u0001\u0000\u0000\u0000\u01b6?\u0001\u0000\u0000\u0000\u01b7\u01b5\u0001"+ + "\u0000\u0000\u0000\u01b8\u01b9\u0007\u0003\u0000\u0000\u01b9A\u0001\u0000"+ + "\u0000\u0000\u01ba\u01bd\u0005W\u0000\u0000\u01bb\u01bd\u0003F#\u0000"+ + "\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bc\u01bb\u0001\u0000\u0000\u0000"+ + "\u01bdC\u0001\u0000\u0000\u0000\u01be\u01e9\u00052\u0000\u0000\u01bf\u01c0"+ + "\u0003h4\u0000\u01c0\u01c1\u0005J\u0000\u0000\u01c1\u01e9\u0001\u0000"+ + "\u0000\u0000\u01c2\u01e9\u0003f3\u0000\u01c3\u01e9\u0003h4\u0000\u01c4"+ + "\u01e9\u0003b1\u0000\u01c5\u01e9\u0003F#\u0000\u01c6\u01e9\u0003j5\u0000"+ + "\u01c7\u01c8\u0005H\u0000\u0000\u01c8\u01cd\u0003d2\u0000\u01c9\u01ca"+ + "\u0005\'\u0000\u0000\u01ca\u01cc\u0003d2\u0000\u01cb\u01c9\u0001\u0000"+ + "\u0000\u0000\u01cc\u01cf\u0001\u0000\u0000\u0000\u01cd\u01cb\u0001\u0000"+ + "\u0000\u0000\u01cd\u01ce\u0001\u0000\u0000\u0000\u01ce\u01d0\u0001\u0000"+ + "\u0000\u0000\u01cf\u01cd\u0001\u0000\u0000\u0000\u01d0\u01d1\u0005I\u0000"+ + "\u0000\u01d1\u01e9\u0001\u0000\u0000\u0000\u01d2\u01d3\u0005H\u0000\u0000"+ + "\u01d3\u01d8\u0003b1\u0000\u01d4\u01d5\u0005\'\u0000\u0000\u01d5\u01d7"+ + "\u0003b1\u0000\u01d6\u01d4\u0001\u0000\u0000\u0000\u01d7\u01da\u0001\u0000"+ + "\u0000\u0000\u01d8\u01d6\u0001\u0000\u0000\u0000\u01d8\u01d9\u0001\u0000"+ + "\u0000\u0000\u01d9\u01db\u0001\u0000\u0000\u0000\u01da\u01d8\u0001\u0000"+ + "\u0000\u0000\u01db\u01dc\u0005I\u0000\u0000\u01dc\u01e9\u0001\u0000\u0000"+ + "\u0000\u01dd\u01de\u0005H\u0000\u0000\u01de\u01e3\u0003j5\u0000\u01df"+ + "\u01e0\u0005\'\u0000\u0000\u01e0\u01e2\u0003j5\u0000\u01e1\u01df\u0001"+ + "\u0000\u0000\u0000\u01e2\u01e5\u0001\u0000\u0000\u0000\u01e3\u01e1\u0001"+ + "\u0000\u0000\u0000\u01e3\u01e4\u0001\u0000\u0000\u0000\u01e4\u01e6\u0001"+ + "\u0000\u0000\u0000\u01e5\u01e3\u0001\u0000\u0000\u0000\u01e6\u01e7\u0005"+ + "I\u0000\u0000\u01e7\u01e9\u0001\u0000\u0000\u0000\u01e8\u01be\u0001\u0000"+ + "\u0000\u0000\u01e8\u01bf\u0001\u0000\u0000\u0000\u01e8\u01c2\u0001\u0000"+ + "\u0000\u0000\u01e8\u01c3\u0001\u0000\u0000\u0000\u01e8\u01c4\u0001\u0000"+ + "\u0000\u0000\u01e8\u01c5\u0001\u0000\u0000\u0000\u01e8\u01c6\u0001\u0000"+ + "\u0000\u0000\u01e8\u01c7\u0001\u0000\u0000\u0000\u01e8\u01d2\u0001\u0000"+ + "\u0000\u0000\u01e8\u01dd\u0001\u0000\u0000\u0000\u01e9E\u0001\u0000\u0000"+ + "\u0000\u01ea\u01ed\u00055\u0000\u0000\u01eb\u01ed\u0005G\u0000\u0000\u01ec"+ + "\u01ea\u0001\u0000\u0000\u0000\u01ec\u01eb\u0001\u0000\u0000\u0000\u01ed"+ + "G\u0001\u0000\u0000\u0000\u01ee\u01f1\u0003@ \u0000\u01ef\u01f1\u0003"+ + "F#\u0000\u01f0\u01ee\u0001\u0000\u0000\u0000\u01f0\u01ef\u0001\u0000\u0000"+ + "\u0000\u01f1I\u0001\u0000\u0000\u0000\u01f2\u01f3\u0005\t\u0000\u0000"+ + "\u01f3\u01f4\u0005\u001f\u0000\u0000\u01f4K\u0001\u0000\u0000\u0000\u01f5"+ + "\u01f6\u0005\u000e\u0000\u0000\u01f6\u01fb\u0003N\'\u0000\u01f7\u01f8"+ + "\u0005\'\u0000\u0000\u01f8\u01fa\u0003N\'\u0000\u01f9\u01f7\u0001\u0000"+ + "\u0000\u0000\u01fa\u01fd\u0001\u0000\u0000\u0000\u01fb\u01f9\u0001\u0000"+ + "\u0000\u0000\u01fb\u01fc\u0001\u0000\u0000\u0000\u01fcM\u0001\u0000\u0000"+ + "\u0000\u01fd\u01fb\u0001\u0000\u0000\u0000\u01fe\u0200\u0003\n\u0005\u0000"+ + "\u01ff\u0201\u0007\u0004\u0000\u0000\u0200\u01ff\u0001\u0000\u0000\u0000"+ + "\u0200\u0201\u0001\u0000\u0000\u0000\u0201\u0204\u0001\u0000\u0000\u0000"+ + "\u0202\u0203\u00053\u0000\u0000\u0203\u0205\u0007\u0005\u0000\u0000\u0204"+ + "\u0202\u0001\u0000\u0000\u0000\u0204\u0205\u0001\u0000\u0000\u0000\u0205"+ + "O\u0001\u0000\u0000\u0000\u0206\u0207\u0005\b\u0000\u0000\u0207\u0208"+ + "\u0003>\u001f\u0000\u0208Q\u0001\u0000\u0000\u0000\u0209\u020a\u0005\u0002"+ + "\u0000\u0000\u020a\u020b\u0003>\u001f\u0000\u020bS\u0001\u0000\u0000\u0000"+ + "\u020c\u020d\u0005\u000b\u0000\u0000\u020d\u0212\u0003V+\u0000\u020e\u020f"+ + "\u0005\'\u0000\u0000\u020f\u0211\u0003V+\u0000\u0210\u020e\u0001\u0000"+ + "\u0000\u0000\u0211\u0214\u0001\u0000\u0000\u0000\u0212\u0210\u0001\u0000"+ + "\u0000\u0000\u0212\u0213\u0001\u0000\u0000\u0000\u0213U\u0001\u0000\u0000"+ + "\u0000\u0214\u0212\u0001\u0000\u0000\u0000\u0215\u0216\u0003<\u001e\u0000"+ + "\u0216\u0217\u0005[\u0000\u0000\u0217\u0218\u0003<\u001e\u0000\u0218W"+ + "\u0001\u0000\u0000\u0000\u0219\u021a\u0005\u0001\u0000\u0000\u021a\u021b"+ + "\u0003\u0014\n\u0000\u021b\u021d\u0003j5\u0000\u021c\u021e\u0003^/\u0000"+ + "\u021d\u021c\u0001\u0000\u0000\u0000\u021d\u021e\u0001\u0000\u0000\u0000"+ + "\u021eY\u0001\u0000\u0000\u0000\u021f\u0220\u0005\u0007\u0000\u0000\u0220"+ + "\u0221\u0003\u0014\n\u0000\u0221\u0222\u0003j5\u0000\u0222[\u0001\u0000"+ + "\u0000\u0000\u0223\u0224\u0005\n\u0000\u0000\u0224\u0225\u0003:\u001d"+ + "\u0000\u0225]\u0001\u0000\u0000\u0000\u0226\u022b\u0003`0\u0000\u0227"+ + "\u0228\u0005\'\u0000\u0000\u0228\u022a\u0003`0\u0000\u0229\u0227\u0001"+ + "\u0000\u0000\u0000\u022a\u022d\u0001\u0000\u0000\u0000\u022b\u0229\u0001"+ + "\u0000\u0000\u0000\u022b\u022c\u0001\u0000\u0000\u0000\u022c_\u0001\u0000"+ + "\u0000\u0000\u022d\u022b\u0001\u0000\u0000\u0000\u022e\u022f\u0003@ \u0000"+ + "\u022f\u0230\u0005$\u0000\u0000\u0230\u0231\u0003D\"\u0000\u0231a\u0001"+ + "\u0000\u0000\u0000\u0232\u0233\u0007\u0006\u0000\u0000\u0233c\u0001\u0000"+ + "\u0000\u0000\u0234\u0237\u0003f3\u0000\u0235\u0237\u0003h4\u0000\u0236"+ + "\u0234\u0001\u0000\u0000\u0000\u0236\u0235\u0001\u0000\u0000\u0000\u0237"+ + "e\u0001\u0000\u0000\u0000\u0238\u023a\u0007\u0000\u0000\u0000\u0239\u0238"+ + "\u0001\u0000\u0000\u0000\u0239\u023a\u0001\u0000\u0000\u0000\u023a\u023b"+ + "\u0001\u0000\u0000\u0000\u023b\u023c\u0005 \u0000\u0000\u023cg\u0001\u0000"+ + "\u0000\u0000\u023d\u023f\u0007\u0000\u0000\u0000\u023e\u023d\u0001\u0000"+ + "\u0000\u0000\u023e\u023f\u0001\u0000\u0000\u0000\u023f\u0240\u0001\u0000"+ + "\u0000\u0000\u0240\u0241\u0005\u001f\u0000\u0000\u0241i\u0001\u0000\u0000"+ + "\u0000\u0242\u0243\u0005\u001e\u0000\u0000\u0243k\u0001\u0000\u0000\u0000"+ + "\u0244\u0245\u0007\u0007\u0000\u0000\u0245m\u0001\u0000\u0000\u0000\u0246"+ + "\u0247\u0005\u0005\u0000\u0000\u0247\u0248\u0003p8\u0000\u0248o\u0001"+ + "\u0000\u0000\u0000\u0249\u024a\u0005H\u0000\u0000\u024a\u024b\u0003\u0002"+ + "\u0001\u0000\u024b\u024c\u0005I\u0000\u0000\u024cq\u0001\u0000\u0000\u0000"+ + "\u024d\u024e\u0005\r\u0000\u0000\u024e\u024f\u0005k\u0000\u0000\u024f"+ + "s\u0001\u0000\u0000\u0000\u0250\u0251\u0005\u0003\u0000\u0000\u0251\u0254"+ + "\u0005a\u0000\u0000\u0252\u0253\u0005_\u0000\u0000\u0253\u0255\u0003<"+ + "\u001e\u0000\u0254\u0252\u0001\u0000\u0000\u0000\u0254\u0255\u0001\u0000"+ + "\u0000\u0000\u0255\u025f\u0001\u0000\u0000\u0000\u0256\u0257\u0005`\u0000"+ + "\u0000\u0257\u025c\u0003v;\u0000\u0258\u0259\u0005\'\u0000\u0000\u0259"+ + "\u025b\u0003v;\u0000\u025a\u0258\u0001\u0000\u0000\u0000\u025b\u025e\u0001"+ + "\u0000\u0000\u0000\u025c\u025a\u0001\u0000\u0000\u0000\u025c\u025d\u0001"+ + "\u0000\u0000\u0000\u025d\u0260\u0001\u0000\u0000\u0000\u025e\u025c\u0001"+ + "\u0000\u0000\u0000\u025f\u0256\u0001\u0000\u0000\u0000\u025f\u0260\u0001"+ + "\u0000\u0000\u0000\u0260u\u0001\u0000\u0000\u0000\u0261\u0262\u0003<\u001e"+ + "\u0000\u0262\u0263\u0005$\u0000\u0000\u0263\u0265\u0001\u0000\u0000\u0000"+ + "\u0264\u0261\u0001\u0000\u0000\u0000\u0264\u0265\u0001\u0000\u0000\u0000"+ + "\u0265\u0266\u0001\u0000\u0000\u0000\u0266\u0267\u0003<\u001e\u0000\u0267"+ + "w\u0001\u0000\u0000\u0000\u0268\u0269\u0005\u0014\u0000\u0000\u0269\u026a"+ + "\u0003(\u0014\u0000\u026a\u026b\u0005_\u0000\u0000\u026b\u026c\u0003>"+ + "\u001f\u0000\u026cy\u0001\u0000\u0000\u0000\u026d\u026e\u0005\u0013\u0000"+ + "\u0000\u026e\u0271\u00036\u001b\u0000\u026f\u0270\u0005!\u0000\u0000\u0270"+ + "\u0272\u0003\"\u0011\u0000\u0271\u026f\u0001\u0000\u0000\u0000\u0271\u0272"+ + "\u0001\u0000\u0000\u0000\u0272{\u0001\u0000\u0000\u0000\u0273\u0274\u0007"+ + "\b\u0000\u0000\u0274\u0275\u0005y\u0000\u0000\u0275\u0276\u0003~?\u0000"+ + "\u0276\u0277\u0003\u0080@\u0000\u0277}\u0001\u0000\u0000\u0000\u0278\u0279"+ + "\u0003(\u0014\u0000\u0279\u007f\u0001\u0000\u0000\u0000\u027a\u027b\u0005"+ + "_\u0000\u0000\u027b\u0280\u0003\u0082A\u0000\u027c\u027d\u0005\'\u0000"+ + "\u0000\u027d\u027f\u0003\u0082A\u0000\u027e\u027c\u0001\u0000\u0000\u0000"+ + "\u027f\u0282\u0001\u0000\u0000\u0000\u0280\u027e\u0001\u0000\u0000\u0000"+ + "\u0280\u0281\u0001\u0000\u0000\u0000\u0281\u0081\u0001\u0000\u0000\u0000"+ + "\u0282\u0280\u0001\u0000\u0000\u0000\u0283\u0284\u0003\u0010\b\u0000\u0284"+ + "\u0083\u0001\u0000\u0000\u0000\u0285\u0286\u0005\u0012\u0000\u0000\u0286"+ + "\u0289\u0003:\u001d\u0000\u0287\u0288\u0005_\u0000\u0000\u0288\u028a\u0003"+ + ":\u001d\u0000\u0289\u0287\u0001\u0000\u0000\u0000\u0289\u028a\u0001\u0000"+ + "\u0000\u0000\u028a\u0290\u0001\u0000\u0000\u0000\u028b\u028c\u0005[\u0000"+ + "\u0000\u028c\u028d\u0003:\u001d\u0000\u028d\u028e\u0005\'\u0000\u0000"+ + "\u028e\u028f\u0003:\u001d\u0000\u028f\u0291\u0001\u0000\u0000\u0000\u0290"+ + "\u028b\u0001\u0000\u0000\u0000\u0290\u0291\u0001\u0000\u0000\u0000\u0291"+ + "\u0085\u0001\u0000\u0000\u0000?\u0091\u009a\u00af\u00bb\u00c4\u00cc\u00d1"+ + "\u00d9\u00db\u00e0\u00e7\u00ec\u00f1\u00fb\u0101\u0109\u010b\u0116\u011d"+ + "\u0128\u012d\u012f\u013b\u014e\u0154\u015e\u0162\u0167\u0175\u017e\u0182"+ + "\u0186\u018d\u0191\u0198\u019e\u01a5\u01ad\u01b5\u01bc\u01cd\u01d8\u01e3"+ + "\u01e8\u01ec\u01f0\u01fb\u0200\u0204\u0212\u021d\u022b\u0236\u0239\u023e"+ + "\u0254\u025c\u025f\u0264\u0271\u0280\u0289\u0290"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index df6d8f1e35013..ee2c97e1e3817 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -1100,6 +1100,18 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

    The default implementation does nothing.

    */ @Override public void exitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterChangePointCommand(EsqlBaseParser.ChangePointCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitChangePointCommand(EsqlBaseParser.ChangePointCommandContext ctx) { } /** * {@inheritDoc} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index 885ba91e20dcd..c1e3a4b32795a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -650,4 +650,11 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitChangePointCommand(EsqlBaseParser.ChangePointCommandContext ctx) { return visitChildren(ctx); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index 2c1faa374695e..c4f72ae1444dd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -981,4 +981,14 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#changePointCommand}. + * @param ctx the parse tree + */ + void enterChangePointCommand(EsqlBaseParser.ChangePointCommandContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#changePointCommand}. + * @param ctx the parse tree + */ + void exitChangePointCommand(EsqlBaseParser.ChangePointCommandContext ctx); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index 73afd23393cdb..4dc2670662f57 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -591,4 +591,10 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#changePointCommand}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitChangePointCommand(EsqlBaseParser.ChangePointCommandContext ctx); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java index 9538e3ba495db..5912f1fe58bcd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java @@ -18,7 +18,9 @@ import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.xpack.esql.core.util.StringUtils; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import java.util.BitSet; import java.util.function.BiFunction; @@ -52,20 +54,27 @@ public void setEsqlConfig(EsqlConfig config) { this.config = config; } + // testing utility public LogicalPlan createStatement(String query) { return createStatement(query, new QueryParams()); } + // testing utility public LogicalPlan createStatement(String query, QueryParams params) { + return createStatement(query, params, new PlanTelemetry(new EsqlFunctionRegistry())); + } + + public LogicalPlan createStatement(String query, QueryParams params, PlanTelemetry metrics) { if (log.isDebugEnabled()) { log.debug("Parsing as statement: {}", query); } - return invokeParser(query, params, EsqlBaseParser::singleStatement, AstBuilder::plan); + return invokeParser(query, params, metrics, EsqlBaseParser::singleStatement, AstBuilder::plan); } private T invokeParser( String query, QueryParams params, + PlanTelemetry metrics, Function parseFunction, BiFunction result ) { @@ -99,7 +108,7 @@ private T invokeParser( log.trace("Parse tree: {}", tree.toStringTree()); } - return result.apply(new AstBuilder(params), tree); + return result.apply(new AstBuilder(new ExpressionBuilder.ParsingContext(params, metrics)), tree); } catch (StackOverflowError e) { throw new ParsingException("ESQL statement is too large, causing stack overflow when generating the parsing tree: [{}]", query); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 114fcda1e634a..9d7c3dd908477 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -62,6 +62,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEquals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import java.math.BigInteger; @@ -115,10 +116,12 @@ public abstract class ExpressionBuilder extends IdentifierBuilder { */ public static final int MAX_EXPRESSION_DEPTH = 400; - protected final QueryParams params; + protected final ParsingContext context; - ExpressionBuilder(QueryParams params) { - this.params = params; + public record ParsingContext(QueryParams params, PlanTelemetry telemetry) {} + + ExpressionBuilder(ParsingContext context) { + this.context = context; } protected Expression expression(ParseTree ctx) { @@ -621,7 +624,9 @@ public Expression visitFunctionExpression(EsqlBaseParser.FunctionExpressionConte @Override public String visitFunctionName(EsqlBaseParser.FunctionNameContext ctx) { - return visitIdentifierOrParameter(ctx.identifierOrParameter()); + var name = visitIdentifierOrParameter(ctx.identifierOrParameter()); + context.telemetry().function(name); + return name; } @Override @@ -683,7 +688,9 @@ private Expression castToType(Source source, ParseTree parseTree, EsqlBaseParser throw new ParsingException(source, "Unsupported conversion to type [{}]", dataType); } Expression expr = expression(parseTree); - return converterToFactory.apply(source, expr); + var convertFunction = converterToFactory.apply(source, expr); + context.telemetry().function(convertFunction.getClass()); + return convertFunction; } @Override @@ -788,9 +795,9 @@ public NamedExpression visitEnrichWithClause(EsqlBaseParser.EnrichWithClauseCont private NamedExpression enrichFieldName(EsqlBaseParser.QualifiedNamePatternContext ctx) { return visitQualifiedNamePattern(ctx, ne -> { - if (ne instanceof UnresolvedNamePattern up) { + if (ne instanceof UnresolvedNamePattern || ne instanceof UnresolvedStar) { var src = ne.source(); - throw new ParsingException(src, "Using wildcards [*] in ENRICH WITH projections is not allowed [{}]", up.pattern()); + throw new ParsingException(src, "Using wildcards [*] in ENRICH WITH projections is not allowed, found [{}]", src.text()); } }); } @@ -915,10 +922,10 @@ QueryParam paramByToken(TerminalNode node) { return null; } Token token = node.getSymbol(); - if (params.contains(token) == false) { + if (context.params().contains(token) == false) { throw new ParsingException(source(node), "Unexpected parameter"); } - return params.get(token); + return context.params().get(token); } QueryParam paramByNameOrPosition(TerminalNode node) { @@ -929,26 +936,28 @@ QueryParam paramByNameOrPosition(TerminalNode node) { String nameOrPosition = token.getText().substring(1); if (isInteger(nameOrPosition)) { int index = Integer.parseInt(nameOrPosition); - if (params.get(index) == null) { + if (context.params().get(index) == null) { String message = ""; - int np = params.size(); + int np = context.params().size(); if (np > 0) { message = ", did you mean " + (np == 1 ? "position 1?" : "any position between 1 and " + np + "?"); } - params.addParsingError(new ParsingException(source(node), "No parameter is defined for position " + index + message)); + context.params() + .addParsingError(new ParsingException(source(node), "No parameter is defined for position " + index + message)); } - return params.get(index); + return context.params().get(index); } else { - if (params.contains(nameOrPosition) == false) { + if (context.params().contains(nameOrPosition) == false) { String message = ""; - List potentialMatches = StringUtils.findSimilar(nameOrPosition, params.namedParams().keySet()); + List potentialMatches = StringUtils.findSimilar(nameOrPosition, context.params().namedParams().keySet()); if (potentialMatches.size() > 0) { message = ", did you mean " + (potentialMatches.size() == 1 ? "[" + potentialMatches.get(0) + "]?" : "any of " + potentialMatches + "?"); } - params.addParsingError(new ParsingException(source(node), "Unknown query parameter [" + nameOrPosition + "]" + message)); + context.params() + .addParsingError(new ParsingException(source(node), "Unknown query parameter [" + nameOrPosition + "]" + message)); } - return params.get(nameOrPosition); + return context.params().get(nameOrPosition); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java index ae2379318474b..fdcd7fc961b9c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.metadata.MetadataCreateIndexService; import org.elasticsearch.common.Strings; import org.elasticsearch.indices.InvalidIndexNameException; +import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.IdentifierContext; import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.IndexStringContext; @@ -51,10 +52,24 @@ protected static String quoteIdString(String unquotedString) { return "`" + unquotedString.replace("`", "``") + "`"; } + @Override + public String visitClusterString(EsqlBaseParser.ClusterStringContext ctx) { + if (ctx == null) { + return null; + } else if (ctx.UNQUOTED_SOURCE() != null) { + return ctx.UNQUOTED_SOURCE().getText(); + } else { + return unquote(ctx.QUOTED_STRING().getText()); + } + } + @Override public String visitIndexString(IndexStringContext ctx) { - TerminalNode n = ctx.UNQUOTED_SOURCE(); - return n != null ? n.getText() : unquote(ctx.QUOTED_STRING().getText()); + if (ctx.UNQUOTED_SOURCE() != null) { + return ctx.UNQUOTED_SOURCE().getText(); + } else { + return unquote(ctx.QUOTED_STRING().getText()); + } } public String visitIndexPattern(List ctx) { @@ -62,18 +77,26 @@ public String visitIndexPattern(List ctx) { Holder hasSeenStar = new Holder<>(false); ctx.forEach(c -> { String indexPattern = visitIndexString(c.indexString()); - String clusterString = c.clusterString() != null ? c.clusterString().getText() : null; + String clusterString = visitClusterString(c.clusterString()); // skip validating index on remote cluster, because the behavior of remote cluster is not consistent with local cluster // For example, invalid#index is an invalid index name, however FROM *:invalid#index does not return an error if (clusterString == null) { hasSeenStar.set(indexPattern.contains(WILDCARD) || hasSeenStar.get()); validateIndexPattern(indexPattern, c, hasSeenStar.get()); + } else { + validateClusterString(clusterString, c); } patterns.add(clusterString != null ? clusterString + REMOTE_CLUSTER_INDEX_SEPARATOR + indexPattern : indexPattern); }); return Strings.collectionToDelimitedString(patterns, ","); } + protected static void validateClusterString(String clusterString, EsqlBaseParser.IndexPatternContext ctx) { + if (clusterString.indexOf(RemoteClusterService.REMOTE_CLUSTER_INDEX_SEPARATOR) != -1) { + throw new ParsingException(source(ctx), "cluster string [{}] must not contain ':'", clusterString); + } + } + private static void validateIndexPattern(String indexPattern, EsqlBaseParser.IndexPatternContext ctx, boolean hasSeenStar) { // multiple index names can be in the same double quote, e.g. indexPattern = "idx1, *, -idx2" String[] indices = indexPattern.split(","); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 7ddd3dafd2784..4645a15bc196b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -18,6 +18,8 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; @@ -28,6 +30,7 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedStar; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -38,6 +41,7 @@ import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.plan.IndexPattern; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; +import org.elasticsearch.xpack.esql.plan.logical.ChangePoint; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Drop; import org.elasticsearch.xpack.esql.plan.logical.Enrich; @@ -94,29 +98,19 @@ interface PlanFactory extends Function {} */ public static final int MAX_QUERY_DEPTH = 500; - public LogicalPlanBuilder(QueryParams params) { - super(params); + public LogicalPlanBuilder(ParsingContext context) { + super(context); } private int queryDepth = 0; protected LogicalPlan plan(ParseTree ctx) { LogicalPlan p = ParserUtils.typedParsing(this, ctx, LogicalPlan.class); - var errors = this.params.parsingErrors(); + var errors = this.context.params().parsingErrors(); if (errors.hasNext() == false) { return p; } else { - StringBuilder message = new StringBuilder(); - int i = 0; - - while (errors.hasNext()) { - if (i > 0) { - message.append("; "); - } - message.append(errors.next().getMessage()); - i++; - } - throw new ParsingException(message.toString()); + throw ParsingException.combineParsingExceptions(errors); } } @@ -126,7 +120,9 @@ protected List plans(List ctxs) { @Override public LogicalPlan visitSingleStatement(EsqlBaseParser.SingleStatementContext ctx) { - return plan(ctx.query()); + var plan = plan(ctx.query()); + telemetryAccounting(plan); + return plan; } @Override @@ -141,6 +137,7 @@ public LogicalPlan visitCompositeQuery(EsqlBaseParser.CompositeQueryContext ctx) } try { LogicalPlan input = plan(ctx.query()); + telemetryAccounting(input); PlanFactory makePlan = typedParsing(this, ctx.processingCommand(), PlanFactory.class); return makePlan.apply(input); } finally { @@ -148,6 +145,13 @@ public LogicalPlan visitCompositeQuery(EsqlBaseParser.CompositeQueryContext ctx) } } + private LogicalPlan telemetryAccounting(LogicalPlan node) { + if (node instanceof TelemetryAware ma) { + this.context.telemetry().command(ma); + } + return node; + } + @Override public PlanFactory visitEvalCommand(EsqlBaseParser.EvalCommandContext ctx) { return p -> new Eval(source(ctx), p, visitFields(ctx.fields())); @@ -420,7 +424,11 @@ public PlanFactory visitEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx) { : matchField instanceof UnresolvedStar ? WILDCARD : null; if (patternString != null) { - throw new ParsingException(source, "Using wildcards [*] in ENRICH WITH projections is not allowed [{}]", patternString); + throw new ParsingException( + source, + "Using wildcards [*] in ENRICH WITH projections is not allowed, found [{}]", + patternString + ); } List keepClauses = visitList(this, ctx.enrichWithClause(), NamedExpression.class); @@ -437,6 +445,24 @@ public PlanFactory visitEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx) { }; } + @Override + public PlanFactory visitChangePointCommand(EsqlBaseParser.ChangePointCommandContext ctx) { + Source src = source(ctx); + Attribute value = visitQualifiedName(ctx.value); + Attribute key = ctx.key == null ? new UnresolvedAttribute(src, "@timestamp") : visitQualifiedName(ctx.key); + Attribute targetType = new ReferenceAttribute( + src, + ctx.targetType == null ? "type" : visitQualifiedName(ctx.targetType).name(), + DataType.KEYWORD + ); + Attribute targetPvalue = new ReferenceAttribute( + src, + ctx.targetPvalue == null ? "pvalue" : visitQualifiedName(ctx.targetPvalue).name(), + DataType.DOUBLE + ); + return child -> new ChangePoint(src, child, value, key, targetType, targetPvalue); + } + private static Tuple parsePolicyName(Token policyToken) { String stringValue = policyToken.getText(); int index = stringValue.indexOf(":"); @@ -482,8 +508,7 @@ public LogicalPlan visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) false, List.of(new MetadataAttribute(source, MetadataAttribute.TSID_FIELD, DataType.KEYWORD, false)), IndexMode.TIME_SERIES, - null, - "FROM TS" + null ); return new Aggregate(source, relation, Aggregate.AggregateType.METRICS, stats.groupings, stats.aggregates); } @@ -515,11 +540,11 @@ public PlanFactory visitLookupCommand(EsqlBaseParser.LookupCommandContext ctx) { public PlanFactory visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { var source = source(ctx); - if (false == Build.current().isSnapshot()) { + if (false == EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()) { throw new ParsingException(source, "JOIN is in preview and only available in SNAPSHOT build"); } - if (ctx.type != null && ctx.type.getType() != EsqlBaseParser.DEV_JOIN_LOOKUP) { + if (ctx.type != null && ctx.type.getType() != EsqlBaseParser.JOIN_LOOKUP) { String joinType = ctx.type == null ? "(INNER)" : ctx.type.getText(); throw new ParsingException(source, "only LOOKUP JOIN available, {} JOIN unsupported at the moment", joinType); } @@ -543,8 +568,7 @@ public PlanFactory visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { false, emptyList(), IndexMode.LOOKUP, - null, - "???" + null ); var condition = ctx.joinCondition(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java index c25ab92437bfc..119e96bbd865c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java @@ -9,6 +9,8 @@ import org.elasticsearch.xpack.esql.EsqlClientException; import org.elasticsearch.xpack.esql.core.tree.Source; +import java.util.Iterator; + import static org.elasticsearch.common.logging.LoggerMessageFormat.format; public class ParsingException extends EsqlClientException { @@ -21,6 +23,10 @@ public ParsingException(String message, Exception cause, int line, int charPosit this.charPositionInLine = charPositionInLine + 1; } + /** + * To be used only if the exception cannot be associated with a specific position in the query. + * Error message will start with {@code line -1:-1:} instead of using specific location. + */ public ParsingException(String message, Object... args) { this(Source.EMPTY, message, args); } @@ -37,6 +43,38 @@ public ParsingException(Exception cause, Source source, String message, Object.. this.charPositionInLine = source.source().getColumnNumber(); } + private ParsingException(int line, int charPositionInLine, String message, Object... args) { + super(message, args); + this.line = line; + this.charPositionInLine = charPositionInLine; + } + + /** + * Combine multiple {@code ParsingException} into one, this is used by {@code LogicalPlanBuilder} to + * consolidate multiple named parameters related {@code ParsingException}. + */ + public static ParsingException combineParsingExceptions(Iterator parsingExceptions) { + StringBuilder message = new StringBuilder(); + int i = 0; + int line = -1; + int charPositionInLine = -1; + + while (parsingExceptions.hasNext()) { + ParsingException e = parsingExceptions.next(); + if (i > 0) { + message.append("; "); + message.append(e.getMessage()); + } else { + // line and column numbers are the associated with the first error + line = e.getLineNumber(); + charPositionInLine = e.getColumnNumber(); + message.append(e.getErrorMessage()); + } + i++; + } + return new ParsingException(line, charPositionInLine, message.toString()); + } + public int getLineNumber() { return line; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java index b3c273cbfa1bb..ab2b6b6e05858 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java @@ -43,7 +43,6 @@ import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; -import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.esql.plan.physical.SubqueryExec; @@ -57,7 +56,7 @@ public class PlanWritables { public static List getNamedWriteables() { List entries = new ArrayList<>(); entries.addAll(logical()); - entries.addAll(phsyical()); + entries.addAll(physical()); return entries; } @@ -84,7 +83,7 @@ public static List logical() { ); } - public static List phsyical() { + public static List physical() { return List.of( AggregateExec.ENTRY, DissectExec.ENTRY, @@ -103,7 +102,6 @@ public static List phsyical() { LimitExec.ENTRY, LocalSourceExec.ENTRY, MvExpandExec.ENTRY, - OrderExec.ENTRY, ProjectExec.ENTRY, ShowExec.ENTRY, SubqueryExec.ENTRY, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java index 0111d23fac281..8cff1d4c88e90 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -39,7 +40,7 @@ import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; import static org.elasticsearch.xpack.esql.plan.logical.Filter.checkFilterConditionDataType; -public class Aggregate extends UnaryPlan implements PostAnalysisVerificationAware { +public class Aggregate extends UnaryPlan implements PostAnalysisVerificationAware, TelemetryAware, SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( LogicalPlan.class, "Aggregate", @@ -142,7 +143,7 @@ public List aggregates() { } @Override - public String commandName() { + public String telemetryLabel() { return switch (aggregateType) { case STANDARD -> "STATS"; case METRICS -> "METRICS"; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/ChangePoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/ChangePoint.java new file mode 100644 index 0000000000000..5814256838b1e --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/ChangePoint.java @@ -0,0 +1,156 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.plan.logical; + +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.operator.ChangePointOperator; +import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.common.Failures; +import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; +import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.NamedExpressions; +import org.elasticsearch.xpack.esql.expression.Order; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.esql.common.Failure.fail; + +/** + * Plan that detects change points in a list of values. See also: + *
      + *
    • {@link org.elasticsearch.compute.operator.ChangePointOperator} + *
    • {@link org.elasticsearch.xpack.ml.aggs.changepoint.ChangePointDetector} + *
    + * + * ChangePoint should always run on the coordinating node after the data is collected + * in sorted order by key. This is enforced by adding OrderBy in the surrogate plan. + * Furthermore, ChangePoint should be called with at most 1000 data points. That's + * enforced by the Limit in the surrogate plan. + */ +public class ChangePoint extends UnaryPlan implements SurrogateLogicalPlan, PostAnalysisVerificationAware { + + private final Attribute value; + private final Attribute key; + private final Attribute targetType; + private final Attribute targetPvalue; + + private List output; + + public ChangePoint(Source source, LogicalPlan child, Attribute value, Attribute key, Attribute targetType, Attribute targetPvalue) { + super(source, child); + this.value = value; + this.key = key; + this.targetType = targetType; + this.targetPvalue = targetPvalue; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException("not serialized"); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException("not serialized"); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, ChangePoint::new, child(), value, key, targetType, targetPvalue); + } + + @Override + public UnaryPlan replaceChild(LogicalPlan newChild) { + return new ChangePoint(source(), newChild, value, key, targetType, targetPvalue); + } + + @Override + public List output() { + if (output == null) { + output = NamedExpressions.mergeOutputAttributes(List.of(targetType, targetPvalue), child().output()); + } + return output; + } + + public Attribute value() { + return value; + } + + public Attribute key() { + return key; + } + + public Attribute targetType() { + return targetType; + } + + public Attribute targetPvalue() { + return targetPvalue; + } + + @Override + protected AttributeSet computeReferences() { + return Expressions.references(List.of(key, value)); + } + + @Override + public boolean expressionsResolved() { + return value.resolved() && key.resolved(); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), value, key, targetType, targetPvalue); + } + + @Override + public boolean equals(Object other) { + return super.equals(other) + && Objects.equals(value, ((ChangePoint) other).value) + && Objects.equals(key, ((ChangePoint) other).key) + && Objects.equals(targetType, ((ChangePoint) other).targetType) + && Objects.equals(targetPvalue, ((ChangePoint) other).targetPvalue); + } + + private Order order() { + return new Order(source(), key, Order.OrderDirection.ASC, Order.NullsPosition.ANY); + } + + @Override + public LogicalPlan surrogate() { + OrderBy orderBy = new OrderBy(source(), child(), List.of(order())); + // The first Limit of N+1 data points is necessary to generate a possible warning, + Limit limit = new Limit( + source(), + new Literal(Source.EMPTY, ChangePointOperator.INPUT_VALUE_COUNT_LIMIT + 1, DataType.INTEGER), + orderBy + ); + ChangePoint changePoint = new ChangePoint(source(), limit, value, key, targetType, targetPvalue); + // The second Limit of N data points is to truncate the output. + return new Limit(source(), new Literal(Source.EMPTY, ChangePointOperator.INPUT_VALUE_COUNT_LIMIT, DataType.INTEGER), changePoint); + } + + @Override + public void postAnalysisVerification(Failures failures) { + // Key must be sortable + Order order = order(); + if (DataType.isSortable(order.dataType()) == false) { + failures.add(fail(this, "change point key [" + key.name() + "] must be sortable")); + } + // Value must be a number + if (value.dataType().isNumeric() == false) { + failures.add(fail(this, "change point value [" + value.name() + "] must be numeric")); + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java index a83e102e51005..9200850b2f9db 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.dissect.DissectParser; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; @@ -25,7 +26,7 @@ import java.util.List; import java.util.Objects; -public class Dissect extends RegexExtract { +public class Dissect extends RegexExtract implements TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Dissect", Dissect::new); private final Parser parser; @@ -123,11 +124,6 @@ public boolean equals(Object o) { return Objects.equals(parser, dissect.parser); } - @Override - public String commandName() { - return "DISSECT"; - } - @Override public int hashCode() { return Objects.hash(super.hashCode(), parser); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java index add5a2d576c00..c8668f58ab5c0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -16,7 +17,7 @@ import java.util.List; import java.util.Objects; -public class Drop extends UnaryPlan { +public class Drop extends UnaryPlan implements TelemetryAware, SortAgnostic { private final List removals; public Drop(Source source, LogicalPlan child, List removals) { @@ -38,10 +39,6 @@ public List removals() { return removals; } - public String commandName() { - return "DROP"; - } - @Override public boolean expressionsResolved() { return Resolvables.resolved(removals); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java index 9b81060349815..11e9a57064e5b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java @@ -18,6 +18,7 @@ import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -48,7 +49,7 @@ import static org.elasticsearch.xpack.esql.core.expression.Expressions.asAttributes; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; -public class Enrich extends UnaryPlan implements GeneratingPlan, PostAnalysisPlanVerificationAware { +public class Enrich extends UnaryPlan implements GeneratingPlan, PostAnalysisPlanVerificationAware, TelemetryAware, SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( LogicalPlan.class, "Enrich", @@ -202,10 +203,6 @@ protected AttributeSet computeReferences() { return matchField.references(); } - public String commandName() { - return "ENRICH"; - } - @Override public boolean expressionsResolved() { return policyName.resolved() diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java index 90b3aa8625087..448085df1e831 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java @@ -172,11 +172,6 @@ public Set concreteIndices() { return indexNameWithModes.keySet(); } - @Override - public String commandName() { - return "FROM"; - } - @Override public boolean expressionsResolved() { // For unresolved expressions to exist in EsRelation is fine, as long as they are not used in later operations diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java index cbd79011032df..af81e26d57c60 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -37,7 +38,7 @@ import static org.elasticsearch.xpack.esql.core.expression.Expressions.asAttributes; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; -public class Eval extends UnaryPlan implements GeneratingPlan, PostAnalysisVerificationAware { +public class Eval extends UnaryPlan implements GeneratingPlan, PostAnalysisVerificationAware, TelemetryAware, SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Eval", Eval::new); private final List fields; @@ -131,11 +132,6 @@ private List renameAliases(List originalAttributes, List n return newFieldsWithUpdatedRefs; } - @Override - public String commandName() { - return "EVAL"; - } - @Override public boolean expressionsResolved() { return Resolvables.resolved(fields); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java index 38e7c19522df6..bd49ed04881cc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -17,7 +18,7 @@ import java.util.List; import java.util.Objects; -public class Explain extends LeafPlan { +public class Explain extends LeafPlan implements TelemetryAware { public enum Type { PARSED, @@ -69,11 +70,6 @@ public List output() { ); } - @Override - public String commandName() { - return "EXPLAIN"; - } - @Override public boolean expressionsResolved() { return true; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java index 0fae5e5831fc7..7a1726ea59e97 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -28,7 +29,7 @@ * {@code SELECT x FROM y WHERE z ..} the "WHERE" clause is a Filter. A * {@code Filter} has a "condition" Expression that does the filtering. */ -public class Filter extends UnaryPlan implements PostAnalysisVerificationAware { +public class Filter extends UnaryPlan implements PostAnalysisVerificationAware, TelemetryAware, SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Filter", Filter::new); private final Expression condition; @@ -69,7 +70,7 @@ public Expression condition() { } @Override - public String commandName() { + public String telemetryLabel() { return "WHERE"; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java index fcfd1ac0f04da..1fab2cbecd034 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java @@ -15,6 +15,7 @@ import org.elasticsearch.grok.GrokCaptureType; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; @@ -31,7 +32,7 @@ import java.util.Objects; import java.util.stream.Collectors; -public class Grok extends RegexExtract { +public class Grok extends RegexExtract implements TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Grok", Grok::readFrom); public record Parser(String pattern, org.elasticsearch.grok.Grok grok) { @@ -148,11 +149,6 @@ public boolean equals(Object o) { return Objects.equals(parser, grok.parser); } - @Override - public String commandName() { - return "GROK"; - } - @Override public int hashCode() { return Objects.hash(super.hashCode(), parser); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java index 4211f8a0d45b6..724aa2da25983 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; @@ -36,7 +37,7 @@ * underlying aggregate. *

    */ -public class InlineStats extends UnaryPlan implements NamedWriteable, SurrogateLogicalPlan { +public class InlineStats extends UnaryPlan implements NamedWriteable, SurrogateLogicalPlan, TelemetryAware, SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( LogicalPlan.class, "InlineStats", @@ -80,11 +81,6 @@ public Aggregate aggregate() { return aggregate; } - @Override - public String commandName() { - return "INLINESTATS"; - } - @Override public boolean expressionsResolved() { return aggregate.expressionsResolved(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java index 4c03d68e6e6f7..268c6bbe17242 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.plan.logical; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -14,7 +15,7 @@ import java.util.List; import java.util.Objects; -public class Keep extends Project { +public class Keep extends Project implements TelemetryAware, SortAgnostic { public Keep(Source source, LogicalPlan child, List projections) { super(source, child, projections); @@ -44,9 +45,4 @@ public int hashCode() { public boolean equals(Object obj) { return super.equals(obj); } - - @Override - public String commandName() { - return "KEEP"; - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java index ea64b7687f4c0..a59433e94f965 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -17,25 +18,56 @@ import java.io.IOException; import java.util.Objects; -public class Limit extends UnaryPlan { +public class Limit extends UnaryPlan implements TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Limit", Limit::new); private final Expression limit; - + /** + * Important for optimizations. This should be {@code false} in most cases, which allows this instance to be duplicated past a child + * plan node that increases the number of rows, like for LOOKUP JOIN and MV_EXPAND. + * Needs to be set to {@code true} in {@link org.elasticsearch.xpack.esql.optimizer.rules.logical.PushDownAndCombineLimits} to avoid + * infinite loops from adding a duplicate of the limit past the child over and over again. + */ + private final transient boolean duplicated; + + /** + * Default way to create a new instance. Do not use this to copy an existing instance, as this sets {@link Limit#duplicated} to + * {@code false}. + */ public Limit(Source source, Expression limit, LogicalPlan child) { + this(source, limit, child, false); + } + + public Limit(Source source, Expression limit, LogicalPlan child, boolean duplicated) { super(source, child); this.limit = limit; + this.duplicated = duplicated; } + /** + * Omits reading {@link Limit#duplicated}, c.f. {@link Limit#writeTo}. + */ private Limit(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(LogicalPlan.class)); + this( + Source.readFrom((PlanStreamInput) in), + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(LogicalPlan.class), + false + ); } + /** + * Omits serializing {@link Limit#duplicated} because when sent to a data node, this should always be {@code false}. + * That's because if it's true, this means a copy of this limit was pushed down below an MvExpand or Join, and thus there's + * another pipeline breaker further upstream - we're already on the coordinator node. + */ @Override public void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); out.writeNamedWriteable(limit()); out.writeNamedWriteable(child()); + // Let's make sure we notice during tests if we ever serialize a duplicated Limit. + assert duplicated == false; } @Override @@ -45,21 +77,28 @@ public String getWriteableName() { @Override protected NodeInfo info() { - return NodeInfo.create(this, Limit::new, limit, child()); + return NodeInfo.create(this, Limit::new, limit, child(), duplicated); } @Override public Limit replaceChild(LogicalPlan newChild) { - return new Limit(source(), limit, newChild); + return new Limit(source(), limit, newChild, duplicated); } public Expression limit() { return limit; } - @Override - public String commandName() { - return "LIMIT"; + public Limit withLimit(Expression limit) { + return new Limit(source(), limit, child(), duplicated); + } + + public boolean duplicated() { + return duplicated; + } + + public Limit withDuplicated(boolean duplicated) { + return new Limit(source(), limit, child(), duplicated); } @Override @@ -69,7 +108,7 @@ public boolean expressionsResolved() { @Override public int hashCode() { - return Objects.hash(limit, child()); + return Objects.hash(limit, child(), duplicated); } @Override @@ -83,6 +122,6 @@ public boolean equals(Object obj) { Limit other = (Limit) obj; - return Objects.equals(limit, other.limit) && Objects.equals(child(), other.child()); + return Objects.equals(limit, other.limit) && Objects.equals(child(), other.child()) && (duplicated == other.duplicated); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java index e845c25bd3b32..ac4baea8bc853 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java @@ -75,8 +75,6 @@ public boolean resolved() { return lazyResolved; } - public abstract String commandName(); - public abstract boolean expressionsResolved(); @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java index 6e7f421003292..56dae7b1f16c0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -31,7 +32,7 @@ * Looks up values from the associated {@code tables}. * The class is supposed to be substituted by a {@link Join}. */ -public class Lookup extends UnaryPlan implements SurrogateLogicalPlan { +public class Lookup extends UnaryPlan implements SurrogateLogicalPlan, TelemetryAware, SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Lookup", Lookup::new); private final Expression tableName; @@ -117,11 +118,6 @@ public JoinConfig joinConfig() { return new JoinConfig(JoinTypes.LEFT, matchFields, leftFields, rightFields); } - @Override - public String commandName() { - return "LOOKUP"; - } - @Override public boolean expressionsResolved() { return tableName.resolved() && Resolvables.resolved(matchFields) && localRelation != null; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java index 949e4906e5033..f65811fc26526 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; @@ -22,24 +23,18 @@ import java.util.List; import java.util.Objects; -public class MvExpand extends UnaryPlan { +public class MvExpand extends UnaryPlan implements TelemetryAware, SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "MvExpand", MvExpand::new); private final NamedExpression target; private final Attribute expanded; - private final Integer limit; private List output; public MvExpand(Source source, LogicalPlan child, NamedExpression target, Attribute expanded) { - this(source, child, target, expanded, null); - } - - public MvExpand(Source source, LogicalPlan child, NamedExpression target, Attribute expanded, Integer limit) { super(source, child); this.target = target; this.expanded = expanded; - this.limit = limit; } private MvExpand(StreamInput in) throws IOException { @@ -47,8 +42,7 @@ private MvExpand(StreamInput in) throws IOException { Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(LogicalPlan.class), in.readNamedWriteable(NamedExpression.class), - in.readNamedWriteable(Attribute.class), - null // we only need this on the coordinator + in.readNamedWriteable(Attribute.class) ); } @@ -58,7 +52,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeNamedWriteable(child()); out.writeNamedWriteable(target()); out.writeNamedWriteable(expanded()); - assert limit == null; } @Override @@ -86,16 +79,12 @@ public Attribute expanded() { return expanded; } - public Integer limit() { - return limit; - } - @Override protected AttributeSet computeReferences() { return target.references(); } - public String commandName() { + public String telemetryLabel() { return "MV_EXPAND"; } @@ -105,8 +94,8 @@ public boolean expressionsResolved() { } @Override - public UnaryPlan replaceChild(LogicalPlan newChild) { - return new MvExpand(source(), newChild, target, expanded, limit); + public MvExpand replaceChild(LogicalPlan newChild) { + return new MvExpand(source(), newChild, target, expanded); } @Override @@ -119,12 +108,12 @@ public List output() { @Override protected NodeInfo info() { - return NodeInfo.create(this, MvExpand::new, child(), target, expanded, limit); + return NodeInfo.create(this, MvExpand::new, child(), target, expanded); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), target, expanded, limit); + return Objects.hash(super.hashCode(), target, expanded); } @Override @@ -133,6 +122,6 @@ public boolean equals(Object obj) { return false; } MvExpand other = ((MvExpand) obj); - return Objects.equals(target, other.target) && Objects.equals(expanded, other.expanded) && Objects.equals(limit, other.limit); + return Objects.equals(target, other.target) && Objects.equals(expanded, other.expanded); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java index d927d78701c65..ddb07e0490db3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java @@ -10,6 +10,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.PostOptimizationVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -24,7 +26,12 @@ import static org.elasticsearch.xpack.esql.common.Failure.fail; -public class OrderBy extends UnaryPlan implements PostAnalysisVerificationAware { +public class OrderBy extends UnaryPlan + implements + PostAnalysisVerificationAware, + PostOptimizationVerificationAware, + TelemetryAware, + SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "OrderBy", OrderBy::new); private final List order; @@ -69,7 +76,7 @@ public List order() { } @Override - public String commandName() { + public String telemetryLabel() { return "SORT"; } @@ -108,4 +115,9 @@ public void postAnalysisVerification(Failures failures) { } }); } + + @Override + public void postOptimizationVerification(Failures failures) { + failures.add(fail(this, "Unbounded sort not supported yet [{}] please add a limit", this.sourceText())); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java index 841e7fbe81896..a36341f60525a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java @@ -25,7 +25,7 @@ /** * A {@code Project} is a {@code Plan} with one child. In {@code SELECT x FROM y}, the "SELECT" statement is a Project. */ -public class Project extends UnaryPlan { +public class Project extends UnaryPlan implements SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Project", Project::new); private final List projections; @@ -78,14 +78,6 @@ public boolean resolved() { return super.resolved() && Expressions.anyMatch(projections, Functions::isAggregate) == false; } - @Override - public String commandName() { - // this could represent multiple commands (KEEP, DROP, RENAME) - // and should not be present in a pre-analyzed plan. - // maybe it should throw exception? - return ""; - } - @Override public boolean expressionsResolved() { return Resolvables.resolved(projections); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/RegexExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/RegexExtract.java index d691507b62cb3..f111b5d03edb3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/RegexExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/RegexExtract.java @@ -24,7 +24,7 @@ import static org.elasticsearch.xpack.esql.common.Failure.fail; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; -public abstract class RegexExtract extends UnaryPlan implements GeneratingPlan, PostAnalysisVerificationAware { +public abstract class RegexExtract extends UnaryPlan implements GeneratingPlan, PostAnalysisVerificationAware, SortAgnostic { protected final Expression input; protected final List extractedFields; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java index 773d3fd015e5f..c609bfdae87e7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.analysis.Analyzer.ResolveRefs; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expressions; @@ -20,7 +21,7 @@ import java.util.List; import java.util.Objects; -public class Rename extends UnaryPlan { +public class Rename extends UnaryPlan implements TelemetryAware, SortAgnostic { private final List renamings; @@ -51,11 +52,6 @@ public List output() { return Expressions.asAttributes(projectionsAfterResolution); } - @Override - public String commandName() { - return "RENAME"; - } - @Override public boolean expressionsResolved() { for (var alias : renamings) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java index 65d1adf5e2799..005ca45d19131 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -23,7 +24,7 @@ import static org.elasticsearch.xpack.esql.common.Failure.fail; -public class Row extends LeafPlan implements PostAnalysisVerificationAware { +public class Row extends LeafPlan implements PostAnalysisVerificationAware, TelemetryAware { private final List fields; @@ -51,11 +52,6 @@ public List output() { return Expressions.asAttributes(fields); } - @Override - public String commandName() { - return "ROW"; - } - @Override public boolean expressionsResolved() { return Resolvables.resolved(fields); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/SortAgnostic.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/SortAgnostic.java new file mode 100644 index 0000000000000..3955b542ca496 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/SortAgnostic.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical; + +/** + * This interface is intended to check redundancy of a previous SORT. + *

    + * + * An example is with commands that compute values record by record, regardless of the input order + * and that don't rely on the context (intended as previous/next records). + * + *


    + *

    + * + * Example 1: if a MY_COMMAND that implements this interface is used between two sorts, + * then we can assume that + *

    + * + * | SORT x, y, z | MY_COMMAND | SORT a, b, c + * + *

    + * is equivalent to + *

    + * + * | MY_COMMAND | SORT a, b, c + * + * + *


    + *

    + * + * Example 2: commands that make previous order irrelevant, eg. because they collapse the results; + * STATS is one of them, eg. + * + *

    + * + * | SORT x, y, z | STATS count(*) + * + *

    + * is equivalent to + *

    + * + * | STATS count(*) + * + *

    + * + * and if MY_COMMAND implements this interface, then + * + *

    + * + * | SORT x, y, z | MY_COMMAND | STATS count(*) + * + *

    + * is equivalent to + *

    + * + * | MY_COMMAND | STATS count(*) + * + * + *


    + *

    + * + * In all the other cases, eg. if the command does not implement this interface + * then we assume that the previous SORT is still relevant and cannot be pruned. + * + *


    + *

    + * + * Eg. LIMIT does not implement this interface, because + * + *

    + * + * | SORT x, y, z | LIMIT 10 | SORT a, b, c + * + *

    + * is NOT equivalent to + *

    + * + * | LIMIT 10 | SORT a, b, c + * + * + *


    + *

    + * + * For n-ary plans that implement this interface, + * we assume that the above applies to all the children + * + */ +public interface SortAgnostic {} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java index d6e0e4334bd47..a9a5dbddc544f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java @@ -55,13 +55,6 @@ public String getWriteableName() { return ENTRY.name; } - @Override - public String commandName() { - // this is the result of optimizations, it will never appear in a pre-analyzed plan - // maybe we should throw exception? - return ""; - } - @Override public boolean expressionsResolved() { return limit.resolved() && Resolvables.resolved(order); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java index 0a20e1dd9080d..5d22a86b2cdf7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java @@ -8,11 +8,13 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.plan.IndexPattern; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import java.util.Collections; import java.util.List; @@ -20,7 +22,7 @@ import static java.util.Collections.singletonList; -public class UnresolvedRelation extends LeafPlan implements Unresolvable { +public class UnresolvedRelation extends LeafPlan implements Unresolvable, TelemetryAware { private final IndexPattern indexPattern; private final boolean frozen; @@ -56,6 +58,17 @@ public UnresolvedRelation( this.commandName = commandName; } + public UnresolvedRelation( + Source source, + IndexPattern table, + boolean frozen, + List metadataFields, + IndexMode indexMode, + String unresolvedMessage + ) { + this(source, table, frozen, metadataFields, indexMode, unresolvedMessage, null); + } + @Override public void writeTo(StreamOutput out) { throw new UnsupportedOperationException("not serialized"); @@ -86,7 +99,7 @@ public boolean resolved() { /** * - * This is used by {@link org.elasticsearch.xpack.esql.stats.PlanningMetrics} to collect query statistics + * This is used by {@link PlanTelemetry} to collect query statistics * It can return *

      *
    • "FROM" if this a |FROM idx command
    • @@ -95,7 +108,7 @@ public boolean resolved() { *
    */ @Override - public String commandName() { + public String telemetryLabel() { return commandName; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/InlineJoin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/InlineJoin.java index 87c9db1db4807..e3daa4fcbfb99 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/InlineJoin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/InlineJoin.java @@ -16,10 +16,9 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; @@ -71,10 +70,9 @@ public static LogicalPlan inlineData(InlineJoin target, LocalRelation data) { List aliases = new ArrayList<>(schema.size()); for (int i = 0; i < schema.size(); i++) { Attribute attr = schema.get(i); - aliases.add(new Alias(attr.source(), attr.name(), Literal.of(attr, BlockUtils.toJavaObject(blocks[i], 0)))); + aliases.add(new Alias(attr.source(), attr.name(), Literal.of(attr, BlockUtils.toJavaObject(blocks[i], 0)), attr.id())); } - LogicalPlan left = target.left(); - return new Project(target.source(), left, CollectionUtils.combine(left.output(), aliases)); + return new Eval(target.source(), target.left(), aliases); } else { return target.replaceRight(data); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java index a541142f952e0..f8aeb671290ee 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.plan.logical.BinaryPlan; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.SortAgnostic; import java.io.IOException; import java.util.ArrayList; @@ -32,7 +33,7 @@ import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; import static org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.LEFT; -public class Join extends BinaryPlan implements PostAnalysisVerificationAware { +public class Join extends BinaryPlan implements PostAnalysisVerificationAware, SortAgnostic { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Join", Join::new); private final JoinConfig config; @@ -63,7 +64,7 @@ public Join(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - Source.EMPTY.writeTo(out); + source().writeTo(out); out.writeNamedWriteable(left()); out.writeNamedWriteable(right()); config.writeTo(out); @@ -189,11 +190,6 @@ public Join replaceChildren(LogicalPlan left, LogicalPlan right) { return new Join(source(), left, right, config); } - @Override - public String commandName() { - return "JOIN"; - } - @Override public int hashCode() { return Objects.hash(config, left(), right()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java index c29cf0ec7f414..5f1f569e3671b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java @@ -9,6 +9,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -27,7 +28,7 @@ /** * Lookup join - specialized LEFT (OUTER) JOIN between the main left side and a lookup index (index_mode = lookup) on the right. */ -public class LookupJoin extends Join implements SurrogateLogicalPlan, PostAnalysisVerificationAware { +public class LookupJoin extends Join implements SurrogateLogicalPlan, PostAnalysisVerificationAware, TelemetryAware { public LookupJoin(Source source, LogicalPlan left, LogicalPlan right, List joinFields) { this(source, left, right, new UsingJoinType(LEFT, joinFields), emptyList(), emptyList(), emptyList()); @@ -77,6 +78,11 @@ protected NodeInfo info() { ); } + @Override + public String telemetryLabel() { + return "LOOKUP JOIN"; + } + @Override public void postAnalysisVerification(Failures failures) { super.postAnalysisVerification(failures); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/StubRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/StubRelation.java index 4f04024d61d46..33e1f385f9eec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/StubRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/StubRelation.java @@ -67,11 +67,6 @@ protected NodeInfo info() { return NodeInfo.create(this, StubRelation::new, output); } - @Override - public String commandName() { - return ""; - } - @Override public int hashCode() { return Objects.hash(StubRelation.class, output); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java index 07432481d2341..d6106bae6b6b8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java @@ -63,14 +63,6 @@ public LocalSupplier supplier() { return supplier; } - @Override - public String commandName() { - // this colud be an empty source, a lookup table or something else - // but it should not be present in a pre-analyzed plan - // maybe we sholud throw exception? - return ""; - } - @Override public boolean expressionsResolved() { return true; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java index fa432537d27e3..99c917ba803a9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Build; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -22,7 +23,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; -public class ShowInfo extends LeafPlan { +public class ShowInfo extends LeafPlan implements TelemetryAware { private final List attributes; @@ -59,7 +60,7 @@ public List> values() { } @Override - public String commandName() { + public String telemetryLabel() { return "SHOW"; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/ChangePointExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/ChangePointExec.java new file mode 100644 index 0000000000000..b0ce1be869909 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/ChangePointExec.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.physical; + +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.NamedExpressions; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public class ChangePointExec extends UnaryExec { + + private final Attribute value; + private final Attribute key; + private final Attribute targetType; + private final Attribute targetPvalue; + + private List output; + + public ChangePointExec( + Source source, + PhysicalPlan child, + Attribute value, + Attribute key, + Attribute targetType, + Attribute targetPvalue + ) { + super(source, child); + this.value = value; + this.key = key; + this.targetType = targetType; + this.targetPvalue = targetPvalue; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException("not serialized"); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException("not serialized"); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, ChangePointExec::new, child(), value, key, targetType, targetPvalue); + } + + @Override + public ChangePointExec replaceChild(PhysicalPlan newChild) { + return new ChangePointExec(source(), newChild, value, key, targetType, targetPvalue); + } + + @Override + protected AttributeSet computeReferences() { + return key.references().combine(value.references()); + } + + @Override + public List output() { + if (output == null) { + output = NamedExpressions.mergeOutputAttributes(List.of(targetType, targetPvalue), child().output()); + } + return output; + } + + public Attribute value() { + return value; + } + + public Attribute key() { + return key; + } + + public Attribute targetType() { + return targetType; + } + + public Attribute targetPvalue() { + return targetPvalue; + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), value, key, targetType, targetPvalue); + } + + @Override + public boolean equals(Object other) { + return super.equals(other) + && Objects.equals(value, ((ChangePointExec) other).value) + && Objects.equals(key, ((ChangePointExec) other).key) + && Objects.equals(targetType, ((ChangePointExec) other).targetType) + && Objects.equals(targetPvalue, ((ChangePointExec) other).targetPvalue); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/OrderExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/OrderExec.java deleted file mode 100644 index 9d53e828f4f81..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/OrderExec.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.plan.physical; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.Order; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class OrderExec extends UnaryExec { - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( - PhysicalPlan.class, - "OrderExec", - OrderExec::new - ); - - private final List order; - - public OrderExec(Source source, PhysicalPlan child, List order) { - super(source, child); - this.order = order; - } - - private OrderExec(StreamInput in) throws IOException { - this( - Source.readFrom((PlanStreamInput) in), - in.readNamedWriteable(PhysicalPlan.class), - in.readCollectionAsList(org.elasticsearch.xpack.esql.expression.Order::new) - ); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - Source.EMPTY.writeTo(out); - out.writeNamedWriteable(child()); - out.writeCollection(order()); - } - - @Override - public String getWriteableName() { - return ENTRY.name; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, OrderExec::new, child(), order); - } - - @Override - public OrderExec replaceChild(PhysicalPlan newChild) { - return new OrderExec(source(), newChild, order); - } - - public List order() { - return order; - } - - @Override - public int hashCode() { - return Objects.hash(order, child()); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - OrderExec other = (OrderExec) obj; - - return Objects.equals(order, other.order) && Objects.equals(child(), other.child()); - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java index 072bae21da2a3..8fb51457b6a8a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java @@ -95,7 +95,7 @@ public final PhysicalOperation groupingPhysicalOperation( aggregatorMode, sourceLayout, false, // non-grouping - s -> aggregatorFactories.add(s.supplier.aggregatorFactory(s.mode)) + s -> aggregatorFactories.add(s.supplier.aggregatorFactory(s.mode, s.channels)) ); if (aggregatorFactories.isEmpty() == false) { @@ -169,7 +169,7 @@ else if (aggregatorMode.isOutputPartial()) { aggregatorMode, sourceLayout, true, // grouping - s -> aggregatorFactories.add(s.supplier.groupingAggregatorFactory(s.mode)) + s -> aggregatorFactories.add(s.supplier.groupingAggregatorFactory(s.mode, s.channels)) ); if (groupSpecs.size() == 1 && groupSpecs.get(0).channel == null) { @@ -251,7 +251,7 @@ public static List intermediateAttributes(List channels, AggregatorMode mode) {} private void aggregatesToFactory( @@ -308,11 +308,12 @@ else if (mode == AggregatorMode.FINAL || mode == AggregatorMode.INTERMEDIATE) { } else { throw new EsqlIllegalArgumentException("illegal aggregation mode"); } + + AggregatorFunctionSupplier aggSupplier = supplier(aggregateFunction); + List inputChannels = sourceAttr.stream().map(attr -> layout.get(attr.id()).channel()).toList(); assert inputChannels.stream().allMatch(i -> i >= 0) : inputChannels; - AggregatorFunctionSupplier aggSupplier = supplier(aggregateFunction, inputChannels); - // apply the filter only in the initial phase - as the rest of the data is already filtered if (aggregateFunction.hasFilter() && mode.isInputPartial() == false) { EvalOperator.ExpressionEvaluator.Factory evalFactory = EvalMapper.toEvaluator( @@ -322,15 +323,15 @@ else if (mode == AggregatorMode.FINAL || mode == AggregatorMode.INTERMEDIATE) { ); aggSupplier = new FilteredAggregatorFunctionSupplier(aggSupplier, evalFactory); } - consumer.accept(new AggFunctionSupplierContext(aggSupplier, mode)); + consumer.accept(new AggFunctionSupplierContext(aggSupplier, inputChannels, mode)); } } } } - private static AggregatorFunctionSupplier supplier(AggregateFunction aggregateFunction, List inputChannels) { + private static AggregatorFunctionSupplier supplier(AggregateFunction aggregateFunction) { if (aggregateFunction instanceof ToAggregator delegate) { - return delegate.supplier(inputChannels); + return delegate.supplier(); } throw new EsqlIllegalArgumentException("aggregate functions must extend ToAggregator"); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index e420cd501cccd..505d87d10196c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.core.Tuple; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; @@ -20,95 +19,22 @@ import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; -import org.elasticsearch.xpack.esql.expression.function.aggregate.CountDistinct; -import org.elasticsearch.xpack.esql.expression.function.aggregate.FromPartial; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; -import org.elasticsearch.xpack.esql.expression.function.aggregate.MedianAbsoluteDeviation; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; -import org.elasticsearch.xpack.esql.expression.function.aggregate.NumericAggregate; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; -import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialAggregateFunction; -import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; -import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialExtent; -import org.elasticsearch.xpack.esql.expression.function.aggregate.StdDev; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; -import org.elasticsearch.xpack.esql.expression.function.aggregate.ToPartial; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Top; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; -import java.lang.invoke.MethodHandle; -import java.lang.invoke.MethodHandles; -import java.lang.invoke.MethodType; import java.util.HashMap; import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; import java.util.stream.Stream; /** * Static class used to convert aggregate expressions to the named expressions that represent their intermediate state. - *

    - * At class load time, the mapper is populated with all supported aggregate functions and their intermediate state. - *

    - *

    - * Reflection is used to call the {@code intermediateStateDesc()}` static method of the aggregate functions, - * but the function classes are found based on the exising information within this class. - *

    - *

    - * This class must be updated when aggregations are created or updated, by adding the new aggs or types to the corresponding methods. - *

    */ final class AggregateMapper { - private static final List NUMERIC = List.of("Int", "Long", "Double"); - private static final List SPATIAL_EXTRA_CONFIGS = List.of("SourceValues", "DocValues"); - - /** List of all mappable ESQL agg functions (excludes surrogates like AVG = SUM/COUNT). */ - private static final List> AGG_FUNCTIONS = List.of( - Count.class, - CountDistinct.class, - Max.class, - MedianAbsoluteDeviation.class, - Min.class, - Percentile.class, - SpatialCentroid.class, - SpatialExtent.class, - StdDev.class, - Sum.class, - Values.class, - Top.class, - Rate.class, - - // internal function - FromPartial.class, - ToPartial.class - ); - - /** Record of agg Class, type, and grouping (or non-grouping). */ - private record AggDef(Class aggClazz, String type, String extra, boolean grouping) { - public AggDef withoutExtra() { - return new AggDef(aggClazz, type, "", grouping); - } - } - - /** Map of AggDef types to intermediate named expressions. */ - private static final Map> MAPPER = AGG_FUNCTIONS.stream() - .flatMap(AggregateMapper::typeAndNames) - .flatMap(AggregateMapper::groupingAndNonGrouping) - .collect(Collectors.toUnmodifiableMap(aggDef -> aggDef, AggregateMapper::lookupIntermediateState)); - + // TODO: Do we need this cache? /** Cache of aggregates to intermediate expressions. */ - private final HashMap> cache; - - AggregateMapper() { - cache = new HashMap<>(); - } + private final HashMap> cache = new HashMap<>(); public List mapNonGrouping(List aggregates) { return doMapping(aggregates, false); @@ -148,140 +74,21 @@ private static List computeEntryForAgg(String aggAlias, Express } private static List entryForAgg(String aggAlias, AggregateFunction aggregateFunction, boolean grouping) { - var aggDef = new AggDef( - aggregateFunction.getClass(), - dataTypeToString(aggregateFunction.field().dataType(), aggregateFunction.getClass()), - aggregateFunction instanceof SpatialAggregateFunction ? "SourceValues" : "", - grouping - ); - var is = getNonNull(aggDef); - return isToNE(is, aggAlias).toList(); - } - - /** Gets the agg from the mapper - wrapper around map::get for more informative failure.*/ - private static List getNonNull(AggDef aggDef) { - var l = MAPPER.getOrDefault(aggDef, MAPPER.get(aggDef.withoutExtra())); - if (l == null) { - throw new EsqlIllegalArgumentException("Cannot find intermediate state for: " + aggDef); - } - return l; - } - - private static Stream, Tuple>> typeAndNames(Class clazz) { - List types; - List extraConfigs = List.of(""); - if (NumericAggregate.class.isAssignableFrom(clazz)) { - types = NUMERIC; - } else if (Max.class.isAssignableFrom(clazz) || Min.class.isAssignableFrom(clazz)) { - types = List.of("Boolean", "Int", "Long", "Double", "Ip", "BytesRef"); - } else if (clazz == Count.class) { - types = List.of(""); // no extra type distinction - } else if (clazz == SpatialCentroid.class) { - types = List.of("GeoPoint", "CartesianPoint"); - extraConfigs = SPATIAL_EXTRA_CONFIGS; - } else if (clazz == SpatialExtent.class) { - types = List.of("GeoPoint", "CartesianPoint", "GeoShape", "CartesianShape"); - extraConfigs = SPATIAL_EXTRA_CONFIGS; - } else if (Values.class.isAssignableFrom(clazz)) { - // TODO can't we figure this out from the function itself? - types = List.of("Int", "Long", "Double", "Boolean", "BytesRef"); - } else if (Top.class.isAssignableFrom(clazz)) { - types = List.of("Boolean", "Int", "Long", "Double", "Ip", "BytesRef"); - } else if (Rate.class.isAssignableFrom(clazz) || StdDev.class.isAssignableFrom(clazz)) { - types = List.of("Int", "Long", "Double"); - } else if (FromPartial.class.isAssignableFrom(clazz) || ToPartial.class.isAssignableFrom(clazz)) { - types = List.of(""); // no type - } else if (CountDistinct.class.isAssignableFrom(clazz)) { - types = Stream.concat(NUMERIC.stream(), Stream.of("Boolean", "BytesRef")).toList(); - } else { - assert false : "unknown aggregate type " + clazz; - throw new IllegalArgumentException("unknown aggregate type " + clazz); - } - return combine(clazz, types, extraConfigs); - } - - private static Stream, Tuple>> combine(Class clazz, List types, List extraConfigs) { - return combinations(types, extraConfigs).map(combo -> new Tuple<>(clazz, combo)); - } - - private static Stream> combinations(List types, List extraConfigs) { - return types.stream().flatMap(type -> extraConfigs.stream().map(config -> new Tuple<>(type, config))); - } - - private static Stream groupingAndNonGrouping(Tuple, Tuple> tuple) { - if (tuple.v1().isAssignableFrom(Rate.class)) { - // rate doesn't support non-grouping aggregations - return Stream.of(new AggDef(tuple.v1(), tuple.v2().v1(), tuple.v2().v2(), true)); + List intermediateState; + if (aggregateFunction instanceof ToAggregator toAggregator) { + var supplier = toAggregator.supplier(); + intermediateState = grouping ? supplier.groupingIntermediateStateDesc() : supplier.nonGroupingIntermediateStateDesc(); } else { - return Stream.of( - new AggDef(tuple.v1(), tuple.v2().v1(), tuple.v2().v2(), true), - new AggDef(tuple.v1(), tuple.v2().v1(), tuple.v2().v2(), false) - ); + throw new EsqlIllegalArgumentException("Aggregate has no defined intermediate state: " + aggregateFunction); } - } - - /** Retrieves the intermediate state description for a given class, type, and grouping. */ - private static List lookupIntermediateState(AggDef aggDef) { - try { - return (List) lookup(aggDef.aggClazz(), aggDef.type(), aggDef.extra(), aggDef.grouping()).invokeExact(); - } catch (Throwable t) { - // invokeExact forces us to handle any Throwable thrown by lookup. - throw new EsqlIllegalArgumentException(t); - } - } - - /** Looks up the intermediate state method for a given class, type, and grouping. */ - private static MethodHandle lookup(Class clazz, String type, String extra, boolean grouping) { - try { - return lookupRetry(clazz, type, extra, grouping); - } catch (IllegalAccessException | NoSuchMethodException | ClassNotFoundException e) { - throw new EsqlIllegalArgumentException(e); - } - } - - private static MethodHandle lookupRetry(Class clazz, String type, String extra, boolean grouping) throws IllegalAccessException, - NoSuchMethodException, ClassNotFoundException { - try { - return MethodHandles.lookup() - .findStatic( - Class.forName(determineAggName(clazz, type, extra, grouping)), - "intermediateStateDesc", - MethodType.methodType(List.class) - ); - } catch (NoSuchMethodException ignore) { - // Retry without the extra information. - return MethodHandles.lookup() - .findStatic( - Class.forName(determineAggName(clazz, type, "", grouping)), - "intermediateStateDesc", - MethodType.methodType(List.class) - ); - } - } - - /** Determines the engines agg class name, for the given class, type, and grouping. */ - private static String determineAggName(Class clazz, String type, String extra, boolean grouping) { - StringBuilder sb = new StringBuilder(); - sb.append(determinePackageName(clazz)).append("."); - sb.append(clazz.getSimpleName()); - sb.append(type); - sb.append(extra); - sb.append(grouping ? "Grouping" : ""); - sb.append("AggregatorFunction"); - return sb.toString(); - } - - /** Determines the engine agg package name, for the given class. */ - private static String determinePackageName(Class clazz) { - if (clazz.getSimpleName().startsWith("Spatial")) { - // All spatial aggs are in the spatial sub-package - return "org.elasticsearch.compute.aggregation.spatial"; - } - return "org.elasticsearch.compute.aggregation"; + return intermediateStateToNamedExpressions(intermediateState, aggAlias).toList(); } /** Maps intermediate state description to named expressions. */ - private static Stream isToNE(List intermediateStateDescs, String aggAlias) { + private static Stream intermediateStateToNamedExpressions( + List intermediateStateDescs, + String aggAlias + ) { return intermediateStateDescs.stream().map(is -> { final DataType dataType; if (Strings.isEmpty(is.dataType())) { @@ -305,36 +112,4 @@ private static DataType toDataType(ElementType elementType) { case FLOAT, NULL, DOC, COMPOSITE, UNKNOWN -> throw new EsqlIllegalArgumentException("unsupported agg type: " + elementType); }; } - - /** Returns the string representation for the data type. This reflects the engine's aggs naming structure. */ - private static String dataTypeToString(DataType type, Class aggClass) { - if (aggClass == Count.class) { - return ""; // no type distinction - } - if (aggClass == ToPartial.class || aggClass == FromPartial.class) { - return ""; - } - if ((aggClass == Max.class || aggClass == Min.class) && type.equals(DataType.IP)) { - return "Ip"; - } - if (aggClass == Top.class && type.equals(DataType.IP)) { - return "Ip"; - } - - return switch (type) { - case DataType.BOOLEAN -> "Boolean"; - case DataType.INTEGER, DataType.COUNTER_INTEGER -> "Int"; - case DataType.LONG, DataType.DATETIME, DataType.COUNTER_LONG, DataType.DATE_NANOS -> "Long"; - case DataType.DOUBLE, DataType.COUNTER_DOUBLE -> "Double"; - case DataType.KEYWORD, DataType.IP, DataType.VERSION, DataType.TEXT, DataType.SEMANTIC_TEXT -> "BytesRef"; - case GEO_POINT -> "GeoPoint"; - case CARTESIAN_POINT -> "CartesianPoint"; - case GEO_SHAPE -> "GeoShape"; - case CARTESIAN_SHAPE -> "CartesianShape"; - case UNSUPPORTED, NULL, UNSIGNED_LONG, SHORT, BYTE, FLOAT, HALF_FLOAT, SCALED_FLOAT, OBJECT, SOURCE, DATE_PERIOD, TIME_DURATION, - DOC_DATA_TYPE, TSID_DATA_TYPE, PARTIAL_AGG -> throw new EsqlIllegalArgumentException( - "illegal agg type: " + type.typeName() - ); - }; - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 2e0f97c29ab13..2d9246cb98e07 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -17,12 +17,14 @@ import org.elasticsearch.compute.data.LocalCircuitBreaker; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.LuceneOperator; +import org.elasticsearch.compute.operator.ChangePointOperator; import org.elasticsearch.compute.operator.ColumnExtractOperator; import org.elasticsearch.compute.operator.ColumnLoadOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator.EvalOperatorFactory; import org.elasticsearch.compute.operator.FilterOperator.FilterOperatorFactory; +import org.elasticsearch.compute.operator.LimitOperator; import org.elasticsearch.compute.operator.LocalSourceOperator; import org.elasticsearch.compute.operator.LocalSourceOperator.LocalSourceFactory; import org.elasticsearch.compute.operator.MvExpandOperator; @@ -71,6 +73,7 @@ import org.elasticsearch.xpack.esql.evaluator.command.GrokEvaluatorExtracter; import org.elasticsearch.xpack.esql.expression.Order; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; +import org.elasticsearch.xpack.esql.plan.physical.ChangePointExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; @@ -110,7 +113,6 @@ import static java.util.Arrays.asList; import static java.util.stream.Collectors.joining; -import static org.elasticsearch.compute.operator.LimitOperator.Factory; import static org.elasticsearch.compute.operator.ProjectOperator.ProjectOperatorFactory; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToInt; @@ -169,7 +171,7 @@ public LocalExecutionPlanner( /** * turn the given plan into a list of drivers to execute */ - public LocalExecutionPlan plan(FoldContext foldCtx, PhysicalPlan localPhysicalPlan) { + public LocalExecutionPlan plan(String taskDescription, FoldContext foldCtx, PhysicalPlan localPhysicalPlan) { var context = new LocalExecutionPlannerContext( new ArrayList<>(), new Holder<>(DriverParallelism.SINGLE), @@ -190,7 +192,7 @@ public LocalExecutionPlan plan(FoldContext foldCtx, PhysicalPlan localPhysicalPl final TimeValue statusInterval = configuration.pragmas().statusInterval(); context.addDriverFactory( new DriverFactory( - new DriverSupplier(context.bigArrays, context.blockFactory, physicalOperation, statusInterval, settings), + new DriverSupplier(taskDescription, context.bigArrays, context.blockFactory, physicalOperation, statusInterval, settings), context.driverParallelism().get() ) ); @@ -221,6 +223,8 @@ private PhysicalOperation plan(PhysicalPlan node, LocalExecutionPlannerContext c return planLimit(limit, context); } else if (node instanceof MvExpandExec mvExpand) { return planMvExpand(mvExpand, context); + } else if (node instanceof ChangePointExec changePoint) { + return planChangePoint(changePoint, context); } // source nodes else if (node instanceof EsQueryExec esQuery) { @@ -372,7 +376,7 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte case GEO_POINT, CARTESIAN_POINT, GEO_SHAPE, CARTESIAN_SHAPE, COUNTER_LONG, COUNTER_INTEGER, COUNTER_DOUBLE, SOURCE -> TopNEncoder.DEFAULT_UNSORTABLE; // unsupported fields are encoded as BytesRef, we'll use the same encoder; all values should be null at this point - case PARTIAL_AGG, UNSUPPORTED -> TopNEncoder.UNSUPPORTED; + case PARTIAL_AGG, UNSUPPORTED, AGGREGATE_METRIC_DOUBLE -> TopNEncoder.UNSUPPORTED; }; } List orders = topNExec.order().stream().map(order -> { @@ -684,7 +688,7 @@ private PhysicalOperation planFilter(FilterExec filter, LocalExecutionPlannerCon private PhysicalOperation planLimit(LimitExec limit, LocalExecutionPlannerContext context) { PhysicalOperation source = plan(limit.child(), context); - return source.with(new Factory((Integer) limit.limit().fold(context.foldCtx)), source.layout); + return source.with(new LimitOperator.Factory((Integer) limit.limit().fold(context.foldCtx)), source.layout); } private PhysicalOperation planMvExpand(MvExpandExec mvExpandExec, LocalExecutionPlannerContext context) { @@ -698,6 +702,20 @@ private PhysicalOperation planMvExpand(MvExpandExec mvExpandExec, LocalExecution ); } + private PhysicalOperation planChangePoint(ChangePointExec changePoint, LocalExecutionPlannerContext context) { + PhysicalOperation source = plan(changePoint.child(), context); + Layout layout = source.layout.builder().append(changePoint.targetType()).append(changePoint.targetPvalue()).build(); + return source.with( + new ChangePointOperator.Factory( + layout.get(changePoint.value().id()).channel(), + changePoint.sourceText(), + changePoint.sourceLocation().getLineNumber(), + changePoint.sourceLocation().getColumnNumber() + ), + layout + ); + } + /** * Immutable physical operation. */ @@ -831,6 +849,7 @@ int pageSize(Integer estimatedRowSize) { } record DriverSupplier( + String taskDescription, BigArrays bigArrays, BlockFactory blockFactory, PhysicalOperation physicalOperation, @@ -857,6 +876,7 @@ public Driver apply(String sessionId) { success = true; return new Driver( sessionId, + taskDescription, System.currentTimeMillis(), System.nanoTime(), driverContext, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index 15b30f4dd6e30..c5139d45f4b37 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -292,7 +292,7 @@ public static ElementType toElementType(DataType dataType, MappedFieldType.Field case TSID_DATA_TYPE -> ElementType.BYTES_REF; case GEO_POINT, CARTESIAN_POINT -> fieldExtractPreference == DOC_VALUES ? ElementType.LONG : ElementType.BYTES_REF; case GEO_SHAPE, CARTESIAN_SHAPE -> fieldExtractPreference == EXTRACT_SPATIAL_BOUNDS ? ElementType.INT : ElementType.BYTES_REF; - case PARTIAL_AGG -> ElementType.COMPOSITE; + case PARTIAL_AGG, AGGREGATE_METRIC_DOUBLE -> ElementType.COMPOSITE; case SHORT, BYTE, DATE_PERIOD, TIME_DURATION, OBJECT, FLOAT, HALF_FLOAT, SCALED_FLOAT -> throw EsqlIllegalArgumentException .illegalDataType(dataType); }; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/ToAggregator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/ToAggregator.java index 62bc0a96ab873..053d4bc839f11 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/ToAggregator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/ToAggregator.java @@ -9,11 +9,9 @@ import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; -import java.util.List; - /** * Expressions that have a mapping to an {@link AggregatorFunctionSupplier}. */ public interface ToAggregator { - AggregatorFunctionSupplier supplier(List inputChannels); + AggregatorFunctionSupplier supplier(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java index f95ae0e0783e5..217737de5309b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java @@ -17,7 +17,6 @@ import org.elasticsearch.xpack.esql.plan.logical.LeafPlan; import org.elasticsearch.xpack.esql.plan.logical.Limit; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; @@ -28,7 +27,6 @@ import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; -import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; @@ -81,10 +79,6 @@ private PhysicalPlan mapUnary(UnaryPlan unary) { return new LimitExec(limit.source(), mappedChild, limit.limit()); } - if (unary instanceof OrderBy o) { - return new OrderExec(o.source(), mappedChild, o.order()); - } - if (unary instanceof TopN topN) { return new TopNExec(topN.source(), mappedChild, topN.order(), topN.limit(), null); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java index 8a4325ed84b2a..b4560b2e33555 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; +import org.elasticsearch.xpack.esql.plan.logical.join.InlineJoin; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; @@ -32,7 +33,6 @@ import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; -import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.plan.physical.UnaryExec; @@ -105,7 +105,7 @@ private PhysicalPlan mapUnary(UnaryPlan unary) { return enrichExec.child(); } if (f instanceof UnaryExec unaryExec) { - if (f instanceof LimitExec || f instanceof ExchangeExec || f instanceof OrderExec || f instanceof TopNExec) { + if (f instanceof LimitExec || f instanceof ExchangeExec || f instanceof TopNExec) { return f; } else { return unaryExec.child(); @@ -161,11 +161,6 @@ private PhysicalPlan mapUnary(UnaryPlan unary) { return new LimitExec(limit.source(), mappedChild, limit.limit()); } - if (unary instanceof OrderBy o) { - mappedChild = addExchangeForFragment(o, mappedChild); - return new OrderExec(o.source(), mappedChild, o.order()); - } - if (unary instanceof TopN topN) { mappedChild = addExchangeForFragment(topN, mappedChild); return new TopNExec(topN.source(), mappedChild, topN.order(), topN.limit(), null); @@ -184,6 +179,10 @@ private PhysicalPlan mapBinary(BinaryPlan bp) { throw new EsqlIllegalArgumentException("unsupported join type [" + config.type() + "]"); } + if (join instanceof InlineJoin) { + return new FragmentExec(bp); + } + PhysicalPlan left = map(bp.left()); // only broadcast joins supported for now - hence push down as a streaming operator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java index b8f539ea307c9..a71c491a03472 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java @@ -12,10 +12,8 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; -import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; +import org.elasticsearch.xpack.esql.plan.logical.ChangePoint; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; @@ -29,12 +27,12 @@ import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; +import org.elasticsearch.xpack.esql.plan.physical.ChangePointExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; import org.elasticsearch.xpack.esql.plan.physical.EvalExec; import org.elasticsearch.xpack.esql.plan.physical.FilterExec; import org.elasticsearch.xpack.esql.plan.physical.GrokExec; -import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; @@ -99,13 +97,18 @@ static PhysicalPlan mapUnary(UnaryPlan p, PhysicalPlan child) { } if (p instanceof MvExpand mvExpand) { - MvExpandExec result = new MvExpandExec(mvExpand.source(), child, mvExpand.target(), mvExpand.expanded()); - if (mvExpand.limit() != null) { - // MvExpand could have an inner limit - // see PushDownAndCombineLimits rule - return new LimitExec(result.source(), result, new Literal(Source.EMPTY, mvExpand.limit(), DataType.INTEGER)); - } - return result; + return new MvExpandExec(mvExpand.source(), child, mvExpand.target(), mvExpand.expanded()); + } + + if (p instanceof ChangePoint changePoint) { + return new ChangePointExec( + changePoint.source(), + child, + changePoint.value(), + changePoint.key(), + changePoint.targetType(), + changePoint.targetPvalue() + ); } return unsupported(p); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/premapper/PreMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/premapper/PreMapper.java new file mode 100644 index 0000000000000..f69754dc3ce89 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/premapper/PreMapper.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.planner.premapper; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.xpack.esql.expression.function.fulltext.QueryBuilderResolver; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plugin.TransportActionServices; + +/** + * The class is responsible for invoking any premapping steps that need to be applied to the logical plan, + * before this is being mapped to a physical one. + */ +public class PreMapper { + + private final TransportActionServices services; + + public PreMapper(TransportActionServices services) { + this.services = services; + } + + /** + * Invokes any premapping steps that need to be applied to the logical plan, before this is being mapped to a physical one. + */ + public void preMapper(LogicalPlan plan, ActionListener listener) { + queryRewrite(plan, listener.delegateFailureAndWrap((l, p) -> { + p.setOptimized(); + l.onResponse(p); + })); + } + + private void queryRewrite(LogicalPlan plan, ActionListener listener) { + QueryBuilderResolver.resolveQueryBuilders(plan, services, listener); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java index 19ed77405daa2..e41dd42c7579d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java @@ -25,9 +25,11 @@ import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.session.Configuration; +import org.elasticsearch.xpack.esql.session.EsqlCCSUtils; import java.util.ArrayList; import java.util.List; @@ -71,34 +73,56 @@ void startComputeOnRemoteCluster( ExchangeSourceHandler exchangeSource, RemoteCluster cluster, Runnable cancelQueryOnFailure, + EsqlExecutionInfo executionInfo, ActionListener listener ) { var queryPragmas = configuration.pragmas(); listener = ActionListener.runBefore(listener, exchangeSource.addEmptySink()::close); final var childSessionId = computeService.newChildSession(sessionId); final AtomicReference finalResponse = new AtomicReference<>(); + final String clusterAlias = cluster.clusterAlias(); try (var computeListener = new ComputeListener(transportService.getThreadPool(), cancelQueryOnFailure, listener.map(profiles -> { var resp = finalResponse.get(); return Objects.requireNonNullElseGet(resp, () -> new ComputeResponse(profiles)); }))) { + var openExchangeListener = computeListener.acquireAvoid(); ExchangeService.openExchange( transportService, cluster.connection, childSessionId, queryPragmas.exchangeBufferSize(), esqlExecutor, - computeListener.acquireCompute().delegateFailureAndWrap((l, unused) -> { - var remoteSink = exchangeService.newRemoteSink(rootTask, childSessionId, transportService, cluster.connection); + EsqlCCSUtils.skipUnavailableListener( + openExchangeListener, + executionInfo, + clusterAlias, + EsqlExecutionInfo.Cluster.Status.SKIPPED + ).delegateFailureAndWrap((l, unused) -> { + var listenerGroup = new RemoteListenerGroup( + transportService, + rootTask, + computeListener, + clusterAlias, + executionInfo, + openExchangeListener + ); + + var remoteSink = exchangeService.newRemoteSink( + listenerGroup.getGroupTask(), + childSessionId, + transportService, + cluster.connection + ); exchangeSource.addRemoteSink( remoteSink, - true, + executionInfo.isSkipUnavailable(clusterAlias) == false, () -> {}, queryPragmas.concurrentExchangeClients(), - computeListener.acquireAvoid() + listenerGroup.getExchangeRequestListener() ); var remotePlan = new RemoteClusterPlan(plan, cluster.concreteIndices, cluster.originalIndices); - var clusterRequest = new ClusterComputeRequest(cluster.clusterAlias, childSessionId, configuration, remotePlan); - final ActionListener clusterListener = l.map(r -> { + var clusterRequest = new ClusterComputeRequest(clusterAlias, childSessionId, configuration, remotePlan); + final ActionListener clusterListener = listenerGroup.getClusterRequestListener().map(r -> { finalResponse.set(r); return r.getProfiles(); }); @@ -106,13 +130,14 @@ void startComputeOnRemoteCluster( cluster.connection, ComputeService.CLUSTER_ACTION_NAME, clusterRequest, - rootTask, + listenerGroup.getGroupTask(), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(clusterListener, ComputeResponse::new, esqlExecutor) ); }) ); } + } List getRemoteClusters( @@ -197,8 +222,7 @@ void runComputeOnRemoteCluster( }))) { var exchangeSource = new ExchangeSourceHandler( configuration.pragmas().exchangeBufferSize(), - transportService.getThreadPool().executor(ThreadPool.Names.SEARCH), - computeListener.acquireAvoid() + transportService.getThreadPool().executor(ThreadPool.Names.SEARCH) ); try (Releasable ignored = exchangeSource.addEmptySink()) { exchangeSink.addCompletionListener(computeListener.acquireAvoid()); @@ -206,6 +230,7 @@ void runComputeOnRemoteCluster( parentTask, new ComputeContext( localSessionId, + "remote_reduce", clusterAlias, List.of(), configuration, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java index 82943d23581fd..86af106ea7e42 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java @@ -19,6 +19,7 @@ record ComputeContext( String sessionId, + String taskDescription, String clusterAlias, List searchContexts, Configuration configuration, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java index 3d358b8c7a8a2..c8b8e84fd2478 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java @@ -47,7 +47,8 @@ final class ComputeListener implements Releasable { * Acquires a new listener that doesn't collect result */ ActionListener acquireAvoid() { - return refs.acquire().delegateResponse((l, e) -> { + var listener = ActionListener.assertAtLeastOnce(refs.acquire()); + return listener.delegateResponse((l, e) -> { try { runOnFailure.run(); } finally { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 71c2a65037e9a..4279d0114130d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -155,6 +155,7 @@ public void execute( } var computeContext = new ComputeContext( newChildSession(sessionId), + "single", LOCAL_CLUSTER, List.of(), configuration, @@ -190,16 +191,16 @@ public void execute( * entire plan. */ List outputAttributes = physicalPlan.output(); + var exchangeSource = new ExchangeSourceHandler( + queryPragmas.exchangeBufferSize(), + transportService.getThreadPool().executor(ThreadPool.Names.SEARCH) + ); + listener = ActionListener.runBefore(listener, () -> exchangeService.removeExchangeSourceHandler(sessionId)); + exchangeService.addExchangeSourceHandler(sessionId, exchangeSource); try (var computeListener = new ComputeListener(transportService.getThreadPool(), cancelQueryOnFailure, listener.map(profiles -> { execInfo.markEndQuery(); // TODO: revisit this time recording model as part of INLINESTATS improvements return new Result(outputAttributes, collectedPages, profiles, execInfo); }))) { - var exchangeSource = new ExchangeSourceHandler( - queryPragmas.exchangeBufferSize(), - transportService.getThreadPool().executor(ThreadPool.Names.SEARCH), - ActionListener.runBefore(computeListener.acquireAvoid(), () -> exchangeService.removeExchangeSourceHandler(sessionId)) - ); - exchangeService.addExchangeSourceHandler(sessionId, exchangeSource); try (Releasable ignored = exchangeSource.addEmptySink()) { // run compute on the coordinator final AtomicBoolean localClusterWasInterrupted = new AtomicBoolean(); @@ -226,6 +227,7 @@ public void execute( rootTask, new ComputeContext( sessionId, + "final", LOCAL_CLUSTER, List.of(), configuration, @@ -276,6 +278,7 @@ public void execute( exchangeSource, cluster, cancelQueryOnFailure, + execInfo, computeListener.acquireCompute().map(r -> { updateExecutionInfo(execInfo, cluster.clusterAlias(), r); return r.getProfiles(); @@ -309,11 +312,10 @@ private void updateExecutionInfo(EsqlExecutionInfo executionInfo, String cluster } else { // if the cluster is an older version and does not send back took time, then calculate it here on the coordinator // and leave shard info unset, so it is not shown in the CCS metadata section of the JSON response - var tookTime = TimeValue.timeValueNanos(System.nanoTime() - executionInfo.getRelativeStartNanos()); executionInfo.swapCluster( clusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(runningToSuccess.apply(v.getStatus())) - .setTook(tookTime) + .setTook(executionInfo.tookSoFar()) .build() ); } @@ -394,7 +396,7 @@ public SourceProvider createSourceProvider() { // the planner will also set the driver parallelism in LocalExecutionPlanner.LocalExecutionPlan (used down below) // it's doing this in the planning of EsQueryExec (the source of the data) // see also EsPhysicalOperationProviders.sourcePhysicalOperation - LocalExecutionPlanner.LocalExecutionPlan localExecutionPlan = planner.plan(context.foldCtx(), plan); + LocalExecutionPlanner.LocalExecutionPlan localExecutionPlan = planner.plan(context.taskDescription(), context.foldCtx(), plan); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Local execution plan:\n{}", localExecutionPlan.describe()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java index 7020932819421..ee5b192bf3285 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java @@ -7,13 +7,11 @@ package org.elasticsearch.xpack.esql.plugin; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.OriginalIndices; -import org.elasticsearch.action.search.SearchShardsGroup; -import org.elasticsearch.action.search.SearchShardsRequest; -import org.elasticsearch.action.search.SearchShardsResponse; import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.RefCountingRunnable; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -24,12 +22,9 @@ import org.elasticsearch.compute.operator.exchange.ExchangeSourceHandler; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; -import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.SearchContext; @@ -43,7 +38,6 @@ import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.esql.action.EsqlSearchShardsAction; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; @@ -57,6 +51,9 @@ import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.xpack.esql.plugin.EsqlPlugin.ESQL_WORKER_THREAD_POOL_NAME; @@ -70,6 +67,7 @@ final class DataNodeComputeHandler implements TransportRequestHandler outListener ) { - QueryBuilder requestFilter = PlannerUtils.requestTimestampFilter(dataNodePlan); - var listener = ActionListener.runAfter(outListener, exchangeSource.addEmptySink()::close); - final long startTimeInNanos = System.nanoTime(); - lookupDataNodes(parentTask, clusterAlias, requestFilter, concreteIndices, originalIndices, ActionListener.wrap(dataNodeResult -> { - try (var computeListener = new ComputeListener(transportService.getThreadPool(), runOnTaskFailure, listener.map(profiles -> { - TimeValue took = TimeValue.timeValueNanos(System.nanoTime() - startTimeInNanos); - return new ComputeResponse( - profiles, - took, - dataNodeResult.totalShards(), - dataNodeResult.totalShards(), - dataNodeResult.skippedShards(), - 0 - ); - }))) { + DataNodeRequestSender sender = new DataNodeRequestSender(transportService, esqlExecutor, parentTask) { + @Override + protected void sendRequest( + DiscoveryNode node, + List shardIds, + Map aliasFilters, + NodeListener nodeListener + ) { + final AtomicLong pagesFetched = new AtomicLong(); + var listener = ActionListener.wrap(nodeListener::onResponse, e -> nodeListener.onFailure(e, pagesFetched.get() > 0)); + final Transport.Connection connection; + try { + connection = transportService.getConnection(node); + } catch (Exception e) { + listener.onFailure(e); + return; + } + var queryPragmas = configuration.pragmas(); + var childSessionId = computeService.newChildSession(sessionId); // For each target node, first open a remote exchange on the remote node, then link the exchange source to // the new remote exchange sink, and initialize the computation on the target node via data-node-request. - for (DataNode node : dataNodeResult.dataNodes()) { - var queryPragmas = configuration.pragmas(); - var childSessionId = computeService.newChildSession(sessionId); - ActionListener nodeListener = computeListener.acquireCompute().map(ComputeResponse::getProfiles); - ExchangeService.openExchange( - transportService, - node.connection, - childSessionId, - queryPragmas.exchangeBufferSize(), - esqlExecutor, - nodeListener.delegateFailureAndWrap((l, unused) -> { - var remoteSink = exchangeService.newRemoteSink(parentTask, childSessionId, transportService, node.connection); + ExchangeService.openExchange( + transportService, + connection, + childSessionId, + queryPragmas.exchangeBufferSize(), + esqlExecutor, + listener.delegateFailureAndWrap((l, unused) -> { + final AtomicReference nodeResponseRef = new AtomicReference<>(); + try ( + var computeListener = new ComputeListener(threadPool, runOnTaskFailure, l.map(ignored -> nodeResponseRef.get())) + ) { + final var remoteSink = exchangeService.newRemoteSink(parentTask, childSessionId, transportService, connection); exchangeSource.addRemoteSink( remoteSink, true, - () -> {}, + pagesFetched::incrementAndGet, queryPragmas.concurrentExchangeClients(), computeListener.acquireAvoid() ); - final boolean sameNode = transportService.getLocalNode().getId().equals(node.connection.getNode().getId()); + final boolean sameNode = transportService.getLocalNode().getId().equals(connection.getNode().getId()); var dataNodeRequest = new DataNodeRequest( childSessionId, configuration, clusterAlias, - node.shardIds, - node.aliasFilters, + shardIds, + aliasFilters, dataNodePlan, originalIndices.indices(), originalIndices.indicesOptions(), sameNode == false && queryPragmas.nodeLevelReduction() ); transportService.sendChildRequest( - node.connection, + connection, ComputeService.DATA_ACTION_NAME, dataNodeRequest, parentTask, TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(nodeListener, ComputeResponse::new, esqlExecutor) + new ActionListenerResponseHandler<>(computeListener.acquireCompute().map(r -> { + nodeResponseRef.set(r); + return r.profiles(); + }), DataNodeComputeResponse::new, esqlExecutor) ); - }) - ); - } - } - }, listener::onFailure)); - } - - private void acquireSearchContexts( - String clusterAlias, - List shardIds, - Configuration configuration, - Map aliasFilters, - ActionListener> listener - ) { - final List targetShards = new ArrayList<>(); - try { - for (ShardId shardId : shardIds) { - var indexShard = searchService.getIndicesService().indexServiceSafe(shardId.getIndex()).getShard(shardId.id()); - targetShards.add(indexShard); - } - } catch (Exception e) { - listener.onFailure(e); - return; - } - final var doAcquire = ActionRunnable.supply(listener, () -> { - final List searchContexts = new ArrayList<>(targetShards.size()); - boolean success = false; - try { - for (IndexShard shard : targetShards) { - var aliasFilter = aliasFilters.getOrDefault(shard.shardId().getIndex(), AliasFilter.EMPTY); - var shardRequest = new ShardSearchRequest( - shard.shardId(), - configuration.absoluteStartedTimeInMillis(), - aliasFilter, - clusterAlias - ); - // TODO: `searchService.createSearchContext` allows opening search contexts without limits, - // we need to limit the number of active search contexts here or in SearchService - SearchContext context = searchService.createSearchContext(shardRequest, SearchService.NO_TIMEOUT); - searchContexts.add(context); - } - for (SearchContext searchContext : searchContexts) { - searchContext.preProcess(); - } - success = true; - return searchContexts; - } finally { - if (success == false) { - IOUtils.close(searchContexts); - } - } - }); - final AtomicBoolean waitedForRefreshes = new AtomicBoolean(); - try (RefCountingRunnable refs = new RefCountingRunnable(() -> { - if (waitedForRefreshes.get()) { - esqlExecutor.execute(doAcquire); - } else { - doAcquire.run(); - } - })) { - for (IndexShard targetShard : targetShards) { - final Releasable ref = refs.acquire(); - targetShard.ensureShardSearchActive(await -> { - try (ref) { - if (await) { - waitedForRefreshes.set(true); } - } - }); - } - } - } - - record DataNode(Transport.Connection connection, List shardIds, Map aliasFilters) { - - } - - /** - * Result from lookupDataNodes where can_match is performed to determine what shards can be skipped - * and which target nodes are needed for running the ES|QL query - * - * @param dataNodes list of DataNode to perform the ES|QL query on - * @param totalShards Total number of shards (from can_match phase), including skipped shards - * @param skippedShards Number of skipped shards (from can_match phase) - */ - record DataNodeResult(List dataNodes, int totalShards, int skippedShards) {} - - /** - * Performs can_match and find the target nodes for the given target indices and filter. - *

    - * Ideally, the search_shards API should be called before the field-caps API; however, this can lead - * to a situation where the column structure (i.e., matched data types) differs depending on the query. - */ - private void lookupDataNodes( - Task parentTask, - String clusterAlias, - QueryBuilder filter, - Set concreteIndices, - OriginalIndices originalIndices, - ActionListener listener - ) { - ActionListener searchShardsListener = listener.map(resp -> { - Map nodes = new HashMap<>(); - for (DiscoveryNode node : resp.getNodes()) { - nodes.put(node.getId(), node); - } - Map> nodeToShards = new HashMap<>(); - Map> nodeToAliasFilters = new HashMap<>(); - int totalShards = 0; - int skippedShards = 0; - for (SearchShardsGroup group : resp.getGroups()) { - var shardId = group.shardId(); - if (group.allocatedNodes().isEmpty()) { - throw new ShardNotFoundException(group.shardId(), "no shard copies found {}", group.shardId()); - } - if (concreteIndices.contains(shardId.getIndexName()) == false) { - continue; - } - totalShards++; - if (group.skipped()) { - skippedShards++; - continue; - } - String targetNode = group.allocatedNodes().get(0); - nodeToShards.computeIfAbsent(targetNode, k -> new ArrayList<>()).add(shardId); - AliasFilter aliasFilter = resp.getAliasFilters().get(shardId.getIndex().getUUID()); - if (aliasFilter != null) { - nodeToAliasFilters.computeIfAbsent(targetNode, k -> new HashMap<>()).put(shardId.getIndex(), aliasFilter); - } - } - List dataNodes = new ArrayList<>(nodeToShards.size()); - for (Map.Entry> e : nodeToShards.entrySet()) { - DiscoveryNode node = nodes.get(e.getKey()); - Map aliasFilters = nodeToAliasFilters.getOrDefault(e.getKey(), Map.of()); - dataNodes.add(new DataNode(transportService.getConnection(node), e.getValue(), aliasFilters)); + }) + ); } - return new DataNodeResult(dataNodes, totalShards, skippedShards); - }); - SearchShardsRequest searchShardsRequest = new SearchShardsRequest( - originalIndices.indices(), - originalIndices.indicesOptions(), - filter, - null, - null, - false, - clusterAlias - ); - transportService.sendChildRequest( - transportService.getLocalNode(), - EsqlSearchShardsAction.TYPE.name(), - searchShardsRequest, - parentTask, - TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(searchShardsListener, SearchShardsResponse::new, esqlExecutor) + }; + sender.startComputeOnDataNodes( + clusterAlias, + concreteIndices, + originalIndices, + PlannerUtils.requestTimestampFilter(dataNodePlan), + runOnTaskFailure, + ActionListener.releaseAfter(outListener, exchangeSource.addEmptySink()) ); } @@ -318,12 +182,16 @@ private class DataNodeRequestExecutor { private final ComputeListener computeListener; private final int maxConcurrentShards; private final ExchangeSink blockingSink; // block until we have completed on all shards or the coordinator has enough data + private final boolean failFastOnShardFailure; + private final Map shardLevelFailures; DataNodeRequestExecutor( DataNodeRequest request, CancellableTask parentTask, ExchangeSinkHandler exchangeSink, int maxConcurrentShards, + boolean failFastOnShardFailure, + Map shardLevelFailures, ComputeListener computeListener ) { this.request = request; @@ -331,6 +199,8 @@ private class DataNodeRequestExecutor { this.exchangeSink = exchangeSink; this.computeListener = computeListener; this.maxConcurrentShards = maxConcurrentShards; + this.failFastOnShardFailure = failFastOnShardFailure; + this.shardLevelFailures = shardLevelFailures; this.blockingSink = exchangeSink.createExchangeSink(() -> {}); } @@ -346,6 +216,7 @@ private void runBatch(int startBatchIndex) { final String clusterAlias = request.clusterAlias(); final var sessionId = request.sessionId(); final int endBatchIndex = Math.min(startBatchIndex + maxConcurrentShards, request.shardIds().size()); + final AtomicInteger pagesProduced = new AtomicInteger(); List shardIds = request.shardIds().subList(startBatchIndex, endBatchIndex); ActionListener> batchListener = new ActionListener<>() { final ActionListener> ref = computeListener.acquireCompute(); @@ -361,28 +232,108 @@ public void onResponse(List result) { @Override public void onFailure(Exception e) { - try { - exchangeService.finishSinkHandler(request.sessionId(), e); - } finally { - ref.onFailure(e); + if (pagesProduced.get() == 0 && failFastOnShardFailure == false) { + for (ShardId shardId : shardIds) { + addShardLevelFailure(shardId, e); + } + onResponse(List.of()); + } else { + try { + exchangeService.finishSinkHandler(request.sessionId(), e); + } finally { + ref.onFailure(e); + } } } }; acquireSearchContexts(clusterAlias, shardIds, configuration, request.aliasFilters(), ActionListener.wrap(searchContexts -> { assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.SEARCH, ESQL_WORKER_THREAD_POOL_NAME); + if (searchContexts.isEmpty()) { + batchListener.onResponse(List.of()); + return; + } var computeContext = new ComputeContext( sessionId, + "data", clusterAlias, searchContexts, configuration, configuration.newFoldContext(), null, - () -> exchangeSink.createExchangeSink(() -> {}) + () -> exchangeSink.createExchangeSink(pagesProduced::incrementAndGet) ); computeService.runCompute(parentTask, computeContext, request.plan(), batchListener); }, batchListener::onFailure)); } + private void acquireSearchContexts( + String clusterAlias, + List shardIds, + Configuration configuration, + Map aliasFilters, + ActionListener> listener + ) { + final List targetShards = new ArrayList<>(); + for (ShardId shardId : shardIds) { + try { + var indexShard = searchService.getIndicesService().indexServiceSafe(shardId.getIndex()).getShard(shardId.id()); + targetShards.add(indexShard); + } catch (Exception e) { + if (addShardLevelFailure(shardId, e) == false) { + listener.onFailure(e); + return; + } + } + } + final var doAcquire = ActionRunnable.supply(listener, () -> { + final List searchContexts = new ArrayList<>(targetShards.size()); + SearchContext context = null; + for (IndexShard shard : targetShards) { + try { + var aliasFilter = aliasFilters.getOrDefault(shard.shardId().getIndex(), AliasFilter.EMPTY); + var shardRequest = new ShardSearchRequest( + shard.shardId(), + configuration.absoluteStartedTimeInMillis(), + aliasFilter, + clusterAlias + ); + // TODO: `searchService.createSearchContext` allows opening search contexts without limits, + // we need to limit the number of active search contexts here or in SearchService + context = searchService.createSearchContext(shardRequest, SearchService.NO_TIMEOUT); + context.preProcess(); + searchContexts.add(context); + } catch (Exception e) { + if (addShardLevelFailure(shard.shardId(), e)) { + IOUtils.close(context); + } else { + IOUtils.closeWhileHandlingException(context, () -> IOUtils.close(searchContexts)); + throw e; + } + } + } + return searchContexts; + }); + final AtomicBoolean waitedForRefreshes = new AtomicBoolean(); + try (RefCountingRunnable refs = new RefCountingRunnable(() -> { + if (waitedForRefreshes.get()) { + esqlExecutor.execute(doAcquire); + } else { + doAcquire.run(); + } + })) { + for (IndexShard targetShard : targetShards) { + final Releasable ref = refs.acquire(); + targetShard.ensureShardSearchActive(await -> { + try (ref) { + if (await) { + waitedForRefreshes.set(true); + } + } + }); + } + } + } + private void onBatchCompleted(int lastBatchIndex) { if (lastBatchIndex < request.shardIds().size() && exchangeSink.isFinished() == false) { runBatch(lastBatchIndex); @@ -395,6 +346,14 @@ private void onBatchCompleted(int lastBatchIndex) { blockingSink.finish(); } } + + private boolean addShardLevelFailure(ShardId shardId, Exception e) { + if (failFastOnShardFailure) { + return false; + } + shardLevelFailures.put(shardId, e); + return true; + } } private void runComputeOnDataNode( @@ -402,13 +361,15 @@ private void runComputeOnDataNode( String externalId, PhysicalPlan reducePlan, DataNodeRequest request, - ActionListener listener + boolean failFastOnShardFailure, + ActionListener listener ) { + final Map shardLevelFailures = new HashMap<>(); try ( ComputeListener computeListener = new ComputeListener( transportService.getThreadPool(), computeService.cancelQueryOnFailure(task), - listener.map(ComputeResponse::new) + listener.map(profiles -> new DataNodeComputeResponse(profiles, shardLevelFailures)) ) ) { var parentListener = computeListener.acquireAvoid(); @@ -420,6 +381,8 @@ private void runComputeOnDataNode( task, internalSink, request.configuration().pragmas().maxConcurrentShardsPerNode(), + failFastOnShardFailure, + shardLevelFailures, computeListener ); dataNodeRequestExecutor.start(); @@ -428,13 +391,14 @@ private void runComputeOnDataNode( task.addListener( () -> exchangeService.finishSinkHandler(externalId, new TaskCancelledException(task.getReasonCancelled())) ); - var exchangeSource = new ExchangeSourceHandler(1, esqlExecutor, computeListener.acquireAvoid()); + var exchangeSource = new ExchangeSourceHandler(1, esqlExecutor); exchangeSource.addRemoteSink(internalSink::fetchPageAsync, true, () -> {}, 1, ActionListener.noop()); var reductionListener = computeListener.acquireCompute(); computeService.runCompute( task, new ComputeContext( request.sessionId(), + "node_reduce", request.clusterAlias(), List.of(), request.configuration(), @@ -465,7 +429,7 @@ private void runComputeOnDataNode( @Override public void messageReceived(DataNodeRequest request, TransportChannel channel, Task task) { - final ActionListener listener = new ChannelActionListener<>(channel); + final ActionListener listener = new ChannelActionListener<>(channel); final PhysicalPlan reductionPlan; if (request.plan() instanceof ExchangeSinkExec plan) { reductionPlan = ComputeService.reductionPlan(plan, request.runNodeLevelReduction()); @@ -485,6 +449,8 @@ public void messageReceived(DataNodeRequest request, TransportChannel channel, T request.indicesOptions(), request.runNodeLevelReduction() ); - runComputeOnDataNode((CancellableTask) task, sessionId, reductionPlan, request, listener); + // the sender doesn't support retry on shard failures, so we need to fail fast here. + final boolean failFastOnShardFailures = channel.getVersion().before(TransportVersions.ESQL_RETRY_ON_SHARD_LEVEL_FAILURE); + runComputeOnDataNode((CancellableTask) task, sessionId, reductionPlan, request, failFastOnShardFailures, listener); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeResponse.java new file mode 100644 index 0000000000000..34a92fb135277 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeResponse.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.operator.DriverProfile; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.transport.TransportResponse; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The compute result of {@link DataNodeRequest} + */ +final class DataNodeComputeResponse extends TransportResponse { + private final List profiles; + private final Map shardLevelFailures; + + DataNodeComputeResponse(List profiles, Map shardLevelFailures) { + this.profiles = profiles; + this.shardLevelFailures = shardLevelFailures; + } + + DataNodeComputeResponse(StreamInput in) throws IOException { + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_RETRY_ON_SHARD_LEVEL_FAILURE)) { + this.profiles = in.readCollectionAsImmutableList(DriverProfile::new); + this.shardLevelFailures = in.readMap(ShardId::new, StreamInput::readException); + } else { + this.profiles = Objects.requireNonNullElse(new ComputeResponse(in).getProfiles(), List.of()); + this.shardLevelFailures = Map.of(); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_RETRY_ON_SHARD_LEVEL_FAILURE)) { + out.writeCollection(profiles, (o, v) -> v.writeTo(o)); + out.writeMap(shardLevelFailures, (o, v) -> v.writeTo(o), StreamOutput::writeException); + } else { + if (shardLevelFailures.isEmpty() == false) { + throw new IllegalStateException("shard level failures are not supported in old versions"); + } + new ComputeResponse(profiles).writeTo(out); + } + } + + List profiles() { + return profiles; + } + + Map shardLevelFailures() { + return shardLevelFailures; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSender.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSender.java new file mode 100644 index 0000000000000..2d5b4169c0215 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSender.java @@ -0,0 +1,360 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.NoShardAvailableActionException; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.search.SearchShardsGroup; +import org.elasticsearch.action.search.SearchShardsRequest; +import org.elasticsearch.action.search.SearchShardsResponse; +import org.elasticsearch.action.support.TransportActions; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.compute.operator.DriverProfile; +import org.elasticsearch.compute.operator.FailureCollector; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.internal.AliasFilter; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.esql.action.EsqlSearchShardsAction; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.Set; +import java.util.concurrent.Executor; +import java.util.concurrent.Semaphore; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.locks.ReentrantLock; + +/** + * Handles computes within a single cluster by dispatching {@link DataNodeRequest} to data nodes + * and executing these computes on the data nodes. + */ +abstract class DataNodeRequestSender { + private final TransportService transportService; + private final Executor esqlExecutor; + private final CancellableTask rootTask; + private final ReentrantLock sendingLock = new ReentrantLock(); + private final Queue pendingShardIds = ConcurrentCollections.newQueue(); + private final Map nodePermits = new HashMap<>(); + private final Map shardFailures = ConcurrentCollections.newConcurrentMap(); + private final AtomicBoolean changed = new AtomicBoolean(); + private boolean reportedFailure = false; // guarded by sendingLock + + DataNodeRequestSender(TransportService transportService, Executor esqlExecutor, CancellableTask rootTask) { + this.transportService = transportService; + this.esqlExecutor = esqlExecutor; + this.rootTask = rootTask; + } + + final void startComputeOnDataNodes( + String clusterAlias, + Set concreteIndices, + OriginalIndices originalIndices, + QueryBuilder requestFilter, + Runnable runOnTaskFailure, + ActionListener listener + ) { + final long startTimeInNanos = System.nanoTime(); + searchShards(rootTask, clusterAlias, requestFilter, concreteIndices, originalIndices, ActionListener.wrap(targetShards -> { + try (var computeListener = new ComputeListener(transportService.getThreadPool(), runOnTaskFailure, listener.map(profiles -> { + TimeValue took = TimeValue.timeValueNanos(System.nanoTime() - startTimeInNanos); + return new ComputeResponse( + profiles, + took, + targetShards.totalShards(), + targetShards.totalShards(), + targetShards.skippedShards(), + 0 + ); + }))) { + for (TargetShard shard : targetShards.shards.values()) { + for (DiscoveryNode node : shard.remainingNodes) { + nodePermits.putIfAbsent(node, new Semaphore(1)); + } + } + pendingShardIds.addAll(targetShards.shards.keySet()); + trySendingRequestsForPendingShards(targetShards, computeListener); + } + }, listener::onFailure)); + } + + private void trySendingRequestsForPendingShards(TargetShards targetShards, ComputeListener computeListener) { + changed.set(true); + final ActionListener listener = computeListener.acquireAvoid(); + try { + while (sendingLock.tryLock()) { + try { + if (changed.compareAndSet(true, false) == false) { + break; + } + for (ShardId shardId : pendingShardIds) { + if (targetShards.getShard(shardId).remainingNodes.isEmpty()) { + shardFailures.compute( + shardId, + (k, v) -> new ShardFailure( + true, + v == null ? new NoShardAvailableActionException(shardId, "no shard copies found") : v.failure + ) + ); + } + } + if (reportedFailure || shardFailures.values().stream().anyMatch(shardFailure -> shardFailure.fatal)) { + reportedFailure = true; + reportFailures(computeListener); + } else { + var nodeRequests = selectNodeRequests(targetShards); + for (NodeRequest request : nodeRequests) { + sendOneNodeRequest(targetShards, computeListener, request); + } + } + } finally { + sendingLock.unlock(); + } + } + } finally { + listener.onResponse(null); + } + } + + private void reportFailures(ComputeListener computeListener) { + assert sendingLock.isHeldByCurrentThread(); + assert reportedFailure; + Iterator it = shardFailures.values().iterator(); + Set seen = Collections.newSetFromMap(new IdentityHashMap<>()); + while (it.hasNext()) { + ShardFailure failure = it.next(); + if (seen.add(failure.failure)) { + computeListener.acquireAvoid().onFailure(failure.failure); + } + it.remove(); + } + } + + private void sendOneNodeRequest(TargetShards targetShards, ComputeListener computeListener, NodeRequest request) { + final ActionListener> listener = computeListener.acquireCompute(); + sendRequest(request.node, request.shardIds, request.aliasFilters, new NodeListener() { + void onAfter(List profiles) { + nodePermits.get(request.node).release(); + trySendingRequestsForPendingShards(targetShards, computeListener); + listener.onResponse(profiles); + } + + @Override + public void onResponse(DataNodeComputeResponse response) { + // remove failures of successful shards + for (ShardId shardId : request.shardIds()) { + if (response.shardLevelFailures().containsKey(shardId) == false) { + shardFailures.remove(shardId); + } + } + for (Map.Entry e : response.shardLevelFailures().entrySet()) { + final ShardId shardId = e.getKey(); + trackShardLevelFailure(shardId, false, e.getValue()); + pendingShardIds.add(shardId); + } + onAfter(response.profiles()); + } + + @Override + public void onFailure(Exception e, boolean receivedData) { + for (ShardId shardId : request.shardIds) { + trackShardLevelFailure(shardId, receivedData, e); + pendingShardIds.add(shardId); + } + onAfter(List.of()); + } + }); + } + + abstract void sendRequest(DiscoveryNode node, List shardIds, Map aliasFilters, NodeListener nodeListener); + + interface NodeListener { + void onResponse(DataNodeComputeResponse response); + + void onFailure(Exception e, boolean receivedData); + } + + private static Exception unwrapFailure(Exception e) { + e = e instanceof TransportException te ? FailureCollector.unwrapTransportException(te) : e; + if (TransportActions.isShardNotAvailableException(e)) { + return NoShardAvailableActionException.forOnShardFailureWrapper(e.getMessage()); + } else { + return e; + } + } + + private void trackShardLevelFailure(ShardId shardId, boolean fatal, Exception originalEx) { + final Exception e = unwrapFailure(originalEx); + // Retain only one meaningful exception and avoid suppressing previous failures to minimize memory usage, especially when handling + // many shards. + shardFailures.compute(shardId, (k, current) -> { + boolean mergedFatal = fatal || ExceptionsHelper.unwrap(e, TaskCancelledException.class) != null; + if (current == null) { + return new ShardFailure(mergedFatal, e); + } + mergedFatal |= current.fatal; + if (e instanceof NoShardAvailableActionException || ExceptionsHelper.unwrap(e, TaskCancelledException.class) != null) { + return new ShardFailure(mergedFatal, current.failure); + } + return new ShardFailure(mergedFatal, e); + }); + } + + /** + * Result from {@link #searchShards(Task, String, QueryBuilder, Set, OriginalIndices, ActionListener)} where can_match is performed to + * determine what shards can be skipped and which target nodes are needed for running the ES|QL query + * + * @param shards List of target shards to perform the ES|QL query on + * @param totalShards Total number of shards (from can_match phase), including skipped shards + * @param skippedShards Number of skipped shards (from can_match phase) + */ + record TargetShards(Map shards, int totalShards, int skippedShards) { + TargetShard getShard(ShardId shardId) { + return shards.get(shardId); + } + } + + /** + * (Remaining) allocated nodes of a given shard id and its alias filter + */ + record TargetShard(ShardId shardId, List remainingNodes, AliasFilter aliasFilter) { + + } + + record NodeRequest(DiscoveryNode node, List shardIds, Map aliasFilters) { + + } + + private record ShardFailure(boolean fatal, Exception failure) { + + } + + /** + * Selects the next nodes to send requests to. Limits to at most one outstanding request per node. + * If there is already a request in-flight to a node, another request will not be sent to the same node + * until the first request completes. Instead, the next node in the remaining nodes will be tried. + */ + private List selectNodeRequests(TargetShards targetShards) { + assert sendingLock.isHeldByCurrentThread(); + final Map> nodeToShardIds = new HashMap<>(); + final Iterator shardsIt = pendingShardIds.iterator(); + while (shardsIt.hasNext()) { + ShardId shardId = shardsIt.next(); + ShardFailure failure = shardFailures.get(shardId); + if (failure != null && failure.fatal) { + shardsIt.remove(); + continue; + } + TargetShard shard = targetShards.getShard(shardId); + Iterator nodesIt = shard.remainingNodes.iterator(); + DiscoveryNode selectedNode = null; + while (nodesIt.hasNext()) { + DiscoveryNode node = nodesIt.next(); + if (nodeToShardIds.containsKey(node) || nodePermits.get(node).tryAcquire()) { + nodesIt.remove(); + shardsIt.remove(); + selectedNode = node; + break; + } + } + if (selectedNode != null) { + nodeToShardIds.computeIfAbsent(selectedNode, unused -> new ArrayList<>()).add(shard.shardId); + } + } + final List nodeRequests = new ArrayList<>(nodeToShardIds.size()); + for (var e : nodeToShardIds.entrySet()) { + List shardIds = e.getValue(); + Map aliasFilters = new HashMap<>(); + for (ShardId shardId : shardIds) { + var aliasFilter = targetShards.getShard(shardId).aliasFilter; + if (aliasFilter != null) { + aliasFilters.put(shardId.getIndex(), aliasFilter); + } + } + nodeRequests.add(new NodeRequest(e.getKey(), shardIds, aliasFilters)); + } + return nodeRequests; + } + + /** + * Performs can_match and find the target nodes for the given target indices and filter. + *

    + * Ideally, the search_shards API should be called before the field-caps API; however, this can lead + * to a situation where the column structure (i.e., matched data types) differs depending on the query. + */ + void searchShards( + Task parentTask, + String clusterAlias, + QueryBuilder filter, + Set concreteIndices, + OriginalIndices originalIndices, + ActionListener listener + ) { + ActionListener searchShardsListener = listener.map(resp -> { + Map nodes = new HashMap<>(); + for (DiscoveryNode node : resp.getNodes()) { + nodes.put(node.getId(), node); + } + int totalShards = 0; + int skippedShards = 0; + Map shards = new HashMap<>(); + for (SearchShardsGroup group : resp.getGroups()) { + var shardId = group.shardId(); + if (concreteIndices.contains(shardId.getIndexName()) == false) { + continue; + } + totalShards++; + if (group.skipped()) { + skippedShards++; + continue; + } + List allocatedNodes = new ArrayList<>(group.allocatedNodes().size()); + for (String n : group.allocatedNodes()) { + allocatedNodes.add(nodes.get(n)); + } + AliasFilter aliasFilter = resp.getAliasFilters().get(shardId.getIndex().getUUID()); + shards.put(shardId, new TargetShard(shardId, allocatedNodes, aliasFilter)); + } + return new TargetShards(shards, totalShards, skippedShards); + }); + SearchShardsRequest searchShardsRequest = new SearchShardsRequest( + originalIndices.indices(), + originalIndices.indicesOptions(), + filter, + null, + null, + false, + clusterAlias + ); + transportService.sendChildRequest( + transportService.getLocalNode(), + EsqlSearchShardsAction.TYPE.name(), + searchShardsRequest, + parentTask, + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(searchShardsListener, SearchShardsResponse::new, esqlExecutor) + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/RemoteListenerGroup.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/RemoteListenerGroup.java new file mode 100644 index 0000000000000..9ef085257b87b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/RemoteListenerGroup.java @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.compute.operator.DriverProfile; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; +import org.elasticsearch.xpack.esql.session.EsqlCCSUtils; + +import java.util.List; +import java.util.Map; +import java.util.function.Supplier; + +/** + * Create group task for this cluster. This group task ensures that two branches of the computation: + * the exchange sink and the cluster request, belong to the same group and each of them can cancel the other. + * runAfter listeners below ensure that the group is finalized when both branches are done. + * The group task is the child of the root task, so if the root task is cancelled, the group task is cancelled too. + */ +class RemoteListenerGroup { + private final CancellableTask groupTask; + private final ActionListener exchangeRequestListener; + private final ActionListener> clusterRequestListener; + private final TaskManager taskManager; + private final String clusterAlias; + private final EsqlExecutionInfo executionInfo; + private final TransportService transportService; + + RemoteListenerGroup( + TransportService transportService, + Task rootTask, + ComputeListener computeListener, + String clusterAlias, + EsqlExecutionInfo executionInfo, + ActionListener delegate + ) { + this.transportService = transportService; + this.taskManager = transportService.getTaskManager(); + this.clusterAlias = clusterAlias; + this.executionInfo = executionInfo; + groupTask = createGroupTask(rootTask, () -> rootTask.getDescription() + "[" + clusterAlias + "]"); + CountDown countDown = new CountDown(2); + // The group is done when both the sink and the cluster request are done + Runnable finishGroup = () -> { + if (countDown.countDown()) { + taskManager.unregister(groupTask); + delegate.onResponse(null); + } + }; + // Cancel the group on sink failure + exchangeRequestListener = createCancellingListener("exchange sink failure", computeListener.acquireAvoid(), finishGroup); + + // Cancel the group on cluster request failure + clusterRequestListener = createCancellingListener("exchange cluster action failure", computeListener.acquireCompute(), finishGroup); + } + + /** + * Create a listener that: + * 1. Cancels the group task on failure + * 2. Marks the cluster as partial if the error is ignorable, otherwise propagates the error + */ + private ActionListener createCancellingListener(String reason, ActionListener delegate, Runnable finishGroup) { + return ActionListener.runAfter(delegate.delegateResponse((inner, e) -> { + taskManager.cancelTaskAndDescendants(groupTask, reason, true, ActionListener.running(() -> { + EsqlCCSUtils.skipUnavailableListener(delegate, executionInfo, clusterAlias, EsqlExecutionInfo.Cluster.Status.PARTIAL) + .onFailure(e); + })); + }), finishGroup); + } + + public CancellableTask getGroupTask() { + return groupTask; + } + + public ActionListener getExchangeRequestListener() { + return exchangeRequestListener; + } + + public ActionListener> getClusterRequestListener() { + return clusterRequestListener; + } + + private CancellableTask createGroupTask(Task parentTask, Supplier description) { + return (CancellableTask) taskManager.register( + "transport", + "esql_compute_group", + new ComputeGroupTaskRequest(parentTask.taskInfo(transportService.getLocalNode().getId(), false).taskId(), description) + ); + } + + private static class ComputeGroupTaskRequest extends TransportRequest { + private final Supplier parentDescription; + + ComputeGroupTaskRequest(TaskId parentTask, Supplier description) { + this.parentDescription = description; + setParentTask(parentTask); + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + assert parentTaskId.isSet(); + return new CancellableTask(id, type, action, "", parentTaskId, headers); + } + + @Override + public String getDescription() { + return "group [" + parentDescription.get() + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportActionServices.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportActionServices.java new file mode 100644 index 0000000000000..ad112542e000a --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportActionServices.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.search.SearchService; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.usage.UsageService; + +public record TransportActionServices( + TransportService transportService, + SearchService searchService, + ExchangeService exchangeService, + ClusterService clusterService, + IndexNameExpressionResolver indexNameExpressionResolver, + UsageService usageService +) {} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java index a4007a520ed30..f5f51029ae8a6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java @@ -7,7 +7,8 @@ package org.elasticsearch.xpack.esql.plugin; -import org.elasticsearch.ResourceNotFoundException; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.support.ActionFilters; @@ -16,10 +17,8 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.compute.EsqlRefCountingListener; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.exchange.ExchangeService; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; @@ -32,12 +31,11 @@ import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.esql.action.EsqlAsyncStopAction; +import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.elasticsearch.xpack.esql.action.EsqlQueryTask; import java.io.IOException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.xpack.core.ClientHelper.ASYNC_SEARCH_ORIGIN; @@ -55,6 +53,8 @@ public class TransportEsqlAsyncStopAction extends HandledTransportAction listener) { String asyncIdStr = asyncId.getEncoded(); - TransportEsqlQueryAction.EsqlQueryListener asyncListener = queryAction.getAsyncListener(asyncIdStr); - if (asyncListener == null) { + EsqlQueryTask asyncTask = getEsqlQueryTask(asyncId); + GetAsyncResultRequest getAsyncResultRequest = new GetAsyncResultRequest(asyncIdStr); + if (asyncTask == null) { // This should mean one of the two things: either bad request ID, or the query has already finished // In both cases, let regular async get deal with it. - var getAsyncResultRequest = new GetAsyncResultRequest(asyncIdStr); - // TODO: this should not be happening, but if the listener is not registered and the query is not finished, - // we give it some time to finish - getAsyncResultRequest.setWaitForCompletionTimeout(new TimeValue(1, TimeUnit.SECONDS)); + logger.debug("Async stop for task {}, no task present - passing to GetAsyncResultRequest", asyncIdStr); getResultsAction.execute(task, getAsyncResultRequest, listener); return; } - try { - EsqlQueryTask asyncTask = AsyncTaskIndexService.getTask(taskManager, asyncId, EsqlQueryTask.class); - if (false == security.currentUserHasAccessToTask(asyncTask)) { - throw new ResourceNotFoundException(asyncId + " not found"); + logger.debug("Async stop for task {} - stopping", asyncIdStr); + final EsqlExecutionInfo esqlExecutionInfo = asyncTask.executionInfo(); + if (esqlExecutionInfo != null) { + esqlExecutionInfo.markAsPartial(); + } + Runnable getResults = () -> getResultsAction.execute(task, getAsyncResultRequest, listener); + exchangeService.finishSessionEarly(sessionID(asyncId), ActionListener.running(() -> { + if (asyncTask.addCompletionListener(() -> ActionListener.running(getResults)) == false) { + getResults.run(); } + })); + } + + private EsqlQueryTask getEsqlQueryTask(AsyncExecutionId asyncId) { + try { + return AsyncTaskIndexService.getTaskAndCheckAuthentication(taskManager, security, asyncId, EsqlQueryTask.class); } catch (IOException e) { - throw new ResourceNotFoundException(asyncId + " not found", e); - } - // Here we will wait for both the response to become available and for the finish operation to complete - var responseHolder = new AtomicReference(); - try (var refs = new EsqlRefCountingListener(listener.map(unused -> responseHolder.get()))) { - asyncListener.addListener(refs.acquire().map(r -> { - responseHolder.set(r); - return null; - })); - asyncListener.markAsPartial(); - exchangeService.finishSessionEarly(sessionID(asyncId), refs.acquire()); + return null; } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index a32b4591943f4..5b0dfa14014a6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -13,14 +13,12 @@ import org.elasticsearch.action.admin.cluster.stats.CCSUsageTelemetry; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.exchange.ExchangeService; @@ -53,7 +51,6 @@ import org.elasticsearch.xpack.esql.execution.PlanExecutor; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.session.EsqlSession.PlanRunner; -import org.elasticsearch.xpack.esql.session.QueryBuilderResolver; import org.elasticsearch.xpack.esql.session.Result; import java.io.IOException; @@ -81,10 +78,8 @@ public class TransportEsqlQueryAction extends HandledTransportAction asyncTaskManagementService; private final RemoteClusterService remoteClusterService; - private final QueryBuilderResolver queryBuilderResolver; private final UsageService usageService; - // Listeners for active async queries, key being the async task execution ID - private final Map asyncListeners = ConcurrentCollections.newConcurrentMap(); + private final TransportActionServices services; @Inject @SuppressWarnings("this-escape") @@ -153,8 +148,16 @@ public TransportEsqlQueryAction( bigArrays ); this.remoteClusterService = transportService.getRemoteClusterService(); - this.queryBuilderResolver = new QueryBuilderResolver(searchService, clusterService, transportService, indexNameExpressionResolver); this.usageService = usageService; + + this.services = new TransportActionServices( + transportService, + searchService, + exchangeService, + clusterService, + indexNameExpressionResolver, + usageService + ); } @Override @@ -183,41 +186,11 @@ private void doExecuteForked(Task task, EsqlQueryRequest request, ActionListener } } - // Subscribable listener that can keep track of the EsqlExecutionInfo - // Used to mark an async query as partial if it is stopped - public static class EsqlQueryListener extends SubscribableListener { - private EsqlExecutionInfo executionInfo; - - public EsqlQueryListener(EsqlExecutionInfo executionInfo) { - this.executionInfo = executionInfo; - } - - public EsqlExecutionInfo getExecutionInfo() { - return executionInfo; - } - - public void markAsPartial() { - if (executionInfo != null) { - executionInfo.markAsPartial(); - } - } - } - @Override public void execute(EsqlQueryRequest request, EsqlQueryTask task, ActionListener listener) { // set EsqlExecutionInfo on async-search task so that it is accessible to GET _query/async while the query is still running task.setExecutionInfo(createEsqlExecutionInfo(request)); - // Since the request is async here, we need to wrap the listener in a SubscribableListener so that we can collect the results from - // other endpoints, such as _query/async/stop - EsqlQueryListener subListener = new EsqlQueryListener(task.executionInfo()); - String asyncExecutionId = task.getExecutionId().getEncoded(); - subListener.addListener(ActionListener.runAfter(listener, () -> asyncListeners.remove(asyncExecutionId))); - asyncListeners.put(asyncExecutionId, subListener); - ActionListener.run(subListener, l -> innerExecute(task, request, l)); - } - - public EsqlQueryListener getAsyncListener(String executionId) { - return asyncListeners.get(executionId); + ActionListener.run(listener, l -> innerExecute(task, request, l)); } private void innerExecute(Task task, EsqlQueryRequest request, ActionListener listener) { @@ -258,8 +231,18 @@ private void innerExecute(Task task, EsqlQueryRequest request, ActionListener { + // If we had any skipped or partial clusters, the result is partial + if (executionInfo.getClusters() + .values() + .stream() + .anyMatch( + c -> c.getStatus() == EsqlExecutionInfo.Cluster.Status.SKIPPED + || c.getStatus() == EsqlExecutionInfo.Cluster.Status.PARTIAL + )) { + executionInfo.markAsPartial(); + } recordCCSTelemetry(task, executionInfo, request, null); listener.onResponse(toResponse(task, request, configuration, result)); }, ex -> { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java index 4067fc5a4e065..4e67bb4f7de55 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -34,14 +33,12 @@ public class TransportEsqlStatsAction extends TransportNodesAction< Void> { // the plan executor holds the metrics - private final FeatureService featureService; private final PlanExecutor planExecutor; @Inject public TransportEsqlStatsAction( TransportService transportService, ClusterService clusterService, - FeatureService featureService, ThreadPool threadPool, ActionFilters actionFilters, PlanExecutor planExecutor @@ -54,7 +51,6 @@ public TransportEsqlStatsAction( EsqlStatsRequest.NodeStatsRequest::new, threadPool.executor(ThreadPool.Names.MANAGEMENT) ); - this.featureService = featureService; this.planExecutor = planExecutor; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java index a0a9d36c11000..558873fa6e526 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java @@ -180,7 +180,8 @@ protected org.apache.lucene.search.Query doToQuery(SearchExecutionContext contex source.source().getLineNumber(), source.source().getColumnNumber(), source.text() - ) + ), + "single-value function encountered multi-value" ); org.apache.lucene.search.Query rewrite = singleValueQuery.rewrite(context.searcher()); if (rewrite instanceof MatchAllDocsQuery) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlCCSUtils.java similarity index 85% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtils.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlCCSUtils.java index 6be243456e040..64e5c6647e9ca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlCCSUtils.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.IndicesExpressionGrouper; import org.elasticsearch.license.XPackLicenseState; @@ -25,6 +26,7 @@ import org.elasticsearch.transport.RemoteTransportException; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; +import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo.Cluster; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.TableInfo; import org.elasticsearch.xpack.esql.index.IndexResolution; @@ -35,11 +37,12 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; -class EsqlSessionCCSUtils { +public class EsqlCCSUtils { - private EsqlSessionCCSUtils() {} + private EsqlCCSUtils() {} static Map determineUnavailableRemoteClusters(List failures) { Map unavailableRemotes = new HashMap<>(); @@ -171,16 +174,7 @@ static void updateExecutionInfoWithUnavailableClusters(EsqlExecutionInfo execInf entry.getValue().getException() ); if (skipUnavailable) { - execInfo.swapCluster( - clusterAlias, - (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SKIPPED) - .setTotalShards(0) - .setSuccessfulShards(0) - .setSkippedShards(0) - .setFailedShards(0) - .setFailures(List.of(new ShardSearchFailure(e))) - .build() - ); + markClusterWithFinalStateAndNoShards(execInfo, clusterAlias, EsqlExecutionInfo.Cluster.Status.SKIPPED, e); } else { throw e; } @@ -338,4 +332,60 @@ public static void checkForCcsLicense( } } } + + /** + * Mark cluster with a final status (success or failure). + * Most metrics are set to 0 if not set yet, except for "took" which is set to the total time taken so far. + * The status must be the final status of the cluster, not RUNNING. + */ + public static void markClusterWithFinalStateAndNoShards( + EsqlExecutionInfo executionInfo, + String clusterAlias, + Cluster.Status status, + @Nullable Exception ex + ) { + assert status != Cluster.Status.RUNNING : "status must be a final state, not RUNNING"; + executionInfo.swapCluster(clusterAlias, (k, v) -> { + Cluster.Builder builder = new Cluster.Builder(v).setStatus(status) + .setTook(executionInfo.tookSoFar()) + .setTotalShards(Objects.requireNonNullElse(v.getTotalShards(), 0)) + .setSuccessfulShards(Objects.requireNonNullElse(v.getTotalShards(), 0)) + .setSkippedShards(Objects.requireNonNullElse(v.getTotalShards(), 0)) + .setFailedShards(Objects.requireNonNullElse(v.getTotalShards(), 0)); + if (ex != null) { + builder.setFailures(List.of(new ShardSearchFailure(ex))); + } + return builder.build(); + }); + } + + /** + * We will ignore the error if it's remote unavailable and the cluster is marked to skip unavailable. + */ + public static boolean shouldIgnoreRuntimeError(EsqlExecutionInfo executionInfo, String clusterAlias, Exception e) { + if (executionInfo.isSkipUnavailable(clusterAlias) == false) { + return false; + } + + return ExceptionsHelper.isRemoteUnavailableException(e); + } + + /** + * Wrap a listener so that it will skip errors that are ignorable + */ + public static ActionListener skipUnavailableListener( + ActionListener delegate, + EsqlExecutionInfo executionInfo, + String clusterAlias, + EsqlExecutionInfo.Cluster.Status status + ) { + return delegate.delegateResponse((l, e) -> { + if (shouldIgnoreRuntimeError(executionInfo, clusterAlias, e)) { + markClusterWithFinalStateAndNoShards(executionInfo, clusterAlias, status, e); + l.onResponse(null); + } else { + l.onFailure(e); + } + }); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 0505955e450d7..04ee667833757 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -73,7 +73,9 @@ import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.mapper.Mapper; -import org.elasticsearch.xpack.esql.stats.PlanningMetrics; +import org.elasticsearch.xpack.esql.planner.premapper.PreMapper; +import org.elasticsearch.xpack.esql.plugin.TransportActionServices; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import java.util.ArrayList; import java.util.Arrays; @@ -109,12 +111,12 @@ public interface PlanRunner { private final Verifier verifier; private final EsqlFunctionRegistry functionRegistry; private final LogicalPlanOptimizer logicalPlanOptimizer; + private final PreMapper preMapper; private final Mapper mapper; private final PhysicalPlanOptimizer physicalPlanOptimizer; - private final PlanningMetrics planningMetrics; + private final PlanTelemetry planTelemetry; private final IndicesExpressionGrouper indicesExpressionGrouper; - private final QueryBuilderResolver queryBuilderResolver; public EsqlSession( String sessionId, @@ -126,9 +128,9 @@ public EsqlSession( LogicalPlanOptimizer logicalPlanOptimizer, Mapper mapper, Verifier verifier, - PlanningMetrics planningMetrics, + PlanTelemetry planTelemetry, IndicesExpressionGrouper indicesExpressionGrouper, - QueryBuilderResolver queryBuilderResolver + TransportActionServices services ) { this.sessionId = sessionId; this.configuration = configuration; @@ -140,9 +142,9 @@ public EsqlSession( this.mapper = mapper; this.logicalPlanOptimizer = logicalPlanOptimizer; this.physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(configuration)); - this.planningMetrics = planningMetrics; + this.planTelemetry = planTelemetry; this.indicesExpressionGrouper = indicesExpressionGrouper; - this.queryBuilderResolver = queryBuilderResolver; + this.preMapper = new PreMapper(services); } public String sessionId() { @@ -159,19 +161,15 @@ public void execute(EsqlQueryRequest request, EsqlExecutionInfo executionInfo, P parse(request.query(), request.params()), executionInfo, request.filter(), - new EsqlSessionCCSUtils.CssPartialErrorsActionListener(executionInfo, listener) { + new EsqlCCSUtils.CssPartialErrorsActionListener(executionInfo, listener) { @Override public void onResponse(LogicalPlan analyzedPlan) { - try { - var optimizedPlan = optimizedPlan(analyzedPlan); - queryBuilderResolver.resolveQueryBuilders( - optimizedPlan, - listener, - (newPlan, next) -> executeOptimizedPlan(request, executionInfo, planRunner, newPlan, next) - ); - } catch (Exception e) { - listener.onFailure(e); - } + preMapper.preMapper( + analyzedPlan, + listener.delegateFailureAndWrap( + (l, p) -> executeOptimizedPlan(request, executionInfo, planRunner, optimizedPlan(p), l) + ) + ); } } ); @@ -190,7 +188,7 @@ public void executeOptimizedPlan( ) { PhysicalPlan physicalPlan = logicalPlanToPhysicalPlan(optimizedPlan, request); // TODO: this could be snuck into the underlying listener - EsqlSessionCCSUtils.updateExecutionInfoAtEndOfPlanning(executionInfo); + EsqlCCSUtils.updateExecutionInfoAtEndOfPlanning(executionInfo); // execute any potential subplans executeSubPlans(physicalPlan, planRunner, executionInfo, request, listener); } @@ -280,7 +278,7 @@ private LocalRelation resultToPlan(LogicalPlan plan, Result result) { } private LogicalPlan parse(String query, QueryParams params) { - var parsed = new EsqlParser().createStatement(query, params); + var parsed = new EsqlParser().createStatement(query, params, planTelemetry); LOGGER.debug("Parsed logical plan:\n{}", parsed); return parsed; } @@ -297,7 +295,6 @@ public void analyzedPlan( } Function analyzeAction = (l) -> { - planningMetrics.gatherPreAnalysisMetrics(parsed); Analyzer analyzer = new Analyzer( new AnalyzerContext(configuration, functionRegistry, l.indices, l.lookupIndices, l.enrichResolution), verifier @@ -318,7 +315,7 @@ public void analyzedPlan( .collect(Collectors.toSet()); final List indices = preAnalysis.indices; - EsqlSessionCCSUtils.checkForCcsLicense(executionInfo, indices, indicesExpressionGrouper, verifier.licenseState()); + EsqlCCSUtils.checkForCcsLicense(executionInfo, indices, indicesExpressionGrouper, verifier.licenseState()); final Set targetClusters = enrichPolicyResolver.groupIndicesPerCluster( indices.stream() @@ -433,7 +430,7 @@ private void preAnalyzeIndices( } // if the preceding call to the enrich policy API found unavailable clusters, recreate the index expression to search // based only on available clusters (which could now be an empty list) - String indexExpressionToResolve = EsqlSessionCCSUtils.createIndexExpressionFromAvailableClusters(executionInfo); + String indexExpressionToResolve = EsqlCCSUtils.createIndexExpressionFromAvailableClusters(executionInfo); if (indexExpressionToResolve.isEmpty()) { // if this was a pure remote CCS request (no local indices) and all remotes are offline, return an empty IndexResolution listener.onResponse( @@ -467,8 +464,8 @@ private boolean analyzeCCSIndices( ActionListener l ) { IndexResolution indexResolution = result.indices; - EsqlSessionCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); - EsqlSessionCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, indexResolution.unavailableClusters()); + EsqlCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); + EsqlCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, indexResolution.unavailableClusters()); if (executionInfo.isCrossClusterSearch() && executionInfo.getClusterStates(EsqlExecutionInfo.Cluster.Status.RUNNING).findAny().isEmpty()) { // for a CCS, if all clusters have been marked as SKIPPED, nothing to search so send a sentinel Exception @@ -624,10 +621,11 @@ static PreAnalysisResult fieldNames(LogicalPlan parsed, Set enrichPolicy // for example "from test | eval x = salary | stats max = max(x) by gender" // remove the UnresolvedAttribute "x", since that is an Alias defined in "eval" AttributeSet planRefs = p.references(); + Set fieldNames = planRefs.names(); p.forEachExpressionDown(Alias.class, alias -> { // do not remove the UnresolvedAttribute that has the same name as its alias, ie "rename id = id" // or the UnresolvedAttributes that are used in Functions that have aliases "STATS id = MAX(id)" - if (planRefs.names().contains(alias.name())) { + if (fieldNames.contains(alias.name())) { return; } references.removeIf(attr -> matchByName(attr, alias.name(), keepCommandReferences.contains(attr))); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java index b11a8580a1e18..3e59b5218e7f8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.esql.action.EsqlResolveFieldsAction; +import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.DateEsField; import org.elasticsearch.xpack.esql.core.type.EsField; @@ -50,7 +51,7 @@ public class IndexResolver { public static final Set ALL_FIELDS = Set.of("*"); - public static final Set INDEX_METADATA_FIELD = Set.of("_index"); + public static final Set INDEX_METADATA_FIELD = Set.of(MetadataAttribute.INDEX); public static final String UNMAPPED = "unmapped"; public static final IndicesOptions FIELD_CAPS_INDICES_OPTIONS = IndicesOptions.builder() @@ -139,7 +140,7 @@ public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResp fields.put(name, field); } - Map unavailableRemotes = EsqlSessionCCSUtils.determineUnavailableRemoteClusters( + Map unavailableRemotes = EsqlCCSUtils.determineUnavailableRemoteClusters( fieldCapsResponse.getFailures() ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/QueryBuilderResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/QueryBuilderResolver.java deleted file mode 100644 index 7db81069f9d3c..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/QueryBuilderResolver.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.session; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ResolvedIndices; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryRewriteContext; -import org.elasticsearch.index.query.Rewriteable; -import org.elasticsearch.search.SearchService; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.esql.core.util.Holder; -import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; -import org.elasticsearch.xpack.esql.plan.logical.EsRelation; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; - -import java.io.IOException; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.function.BiConsumer; - -import static org.elasticsearch.xpack.esql.planner.TranslatorHandler.TRANSLATOR_HANDLER; - -/** - * Some {@link FullTextFunction} implementations such as {@link org.elasticsearch.xpack.esql.expression.function.fulltext.Match} - * will be translated to a {@link QueryBuilder} that require a rewrite phase on the coordinator. - * {@link QueryBuilderResolver#resolveQueryBuilders(LogicalPlan, ActionListener, BiConsumer)} will rewrite the plan by replacing - * {@link FullTextFunction} expression with new ones that hold rewritten {@link QueryBuilder}s. - */ -public class QueryBuilderResolver { - private final SearchService searchService; - private final ClusterService clusterService; - private final TransportService transportService; - private final IndexNameExpressionResolver indexNameExpressionResolver; - - public QueryBuilderResolver( - SearchService searchService, - ClusterService clusterService, - TransportService transportService, - IndexNameExpressionResolver indexNameExpressionResolver - ) { - this.searchService = searchService; - this.clusterService = clusterService; - this.transportService = transportService; - this.indexNameExpressionResolver = indexNameExpressionResolver; - } - - public void resolveQueryBuilders( - LogicalPlan plan, - ActionListener listener, - BiConsumer> callback - ) { - if (plan.optimized() == false) { - listener.onFailure(new IllegalStateException("Expected optimized plan before query builder rewrite.")); - return; - } - - Set unresolved = fullTextFunctions(plan); - Set indexNames = indexNames(plan); - - if (indexNames == null || indexNames.isEmpty() || unresolved.isEmpty()) { - callback.accept(plan, listener); - return; - } - QueryRewriteContext ctx = queryRewriteContext(indexNames); - FullTextFunctionsRewritable rewritable = new FullTextFunctionsRewritable(unresolved); - Rewriteable.rewriteAndFetch(rewritable, ctx, new ActionListener() { - @Override - public void onResponse(FullTextFunctionsRewritable fullTextFunctionsRewritable) { - try { - LogicalPlan newPlan = planWithResolvedQueryBuilders(plan, fullTextFunctionsRewritable.results()); - callback.accept(newPlan, listener); - } catch (Exception e) { - onFailure(e); - } - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); - } - - private Set fullTextFunctions(LogicalPlan plan) { - Set functions = new HashSet<>(); - plan.forEachExpressionDown(FullTextFunction.class, func -> functions.add(func)); - return functions; - } - - public Set indexNames(LogicalPlan plan) { - Holder> indexNames = new Holder<>(); - plan.forEachDown(EsRelation.class, esRelation -> indexNames.set(esRelation.concreteIndices())); - return indexNames.get(); - } - - public LogicalPlan planWithResolvedQueryBuilders(LogicalPlan plan, Map newQueryBuilders) { - LogicalPlan newPlan = plan.transformExpressionsDown(FullTextFunction.class, m -> { - if (newQueryBuilders.keySet().contains(m)) { - return m.replaceQueryBuilder(newQueryBuilders.get(m)); - } - return m; - }); - // The given plan was already analyzed and optimized, so we set the resulted plan to optimized as well. - newPlan.setOptimized(); - return newPlan; - } - - private QueryRewriteContext queryRewriteContext(Set indexNames) { - ResolvedIndices resolvedIndices = ResolvedIndices.resolveWithIndexNamesAndOptions( - indexNames.toArray(String[]::new), - IndexResolver.FIELD_CAPS_INDICES_OPTIONS, - clusterService.state(), - indexNameExpressionResolver, - transportService.getRemoteClusterService(), - System.currentTimeMillis() - ); - - return searchService.getRewriteContext(() -> System.currentTimeMillis(), resolvedIndices, null); - } - - private class FullTextFunctionsRewritable implements Rewriteable { - - private final Map queryBuilderMap; - - FullTextFunctionsRewritable(Map queryBuilderMap) { - this.queryBuilderMap = queryBuilderMap; - } - - FullTextFunctionsRewritable(Set functions) { - this.queryBuilderMap = new HashMap<>(); - - for (FullTextFunction func : functions) { - queryBuilderMap.put(func, TRANSLATOR_HANDLER.asQuery(func).asBuilder()); - } - } - - @Override - public FullTextFunctionsRewritable rewrite(QueryRewriteContext ctx) throws IOException { - Map results = new HashMap<>(); - - boolean hasChanged = false; - for (var entry : queryBuilderMap.entrySet()) { - var initial = entry.getValue(); - var rewritten = initial.rewrite(ctx); - hasChanged |= rewritten != initial; - - results.put(entry.getKey(), rewritten); - } - - return hasChanged ? new FullTextFunctionsRewritable(results) : this; - } - - public Map results() { - return queryBuilderMap; - } - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetrics.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetrics.java deleted file mode 100644 index 7b452e50fd525..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetrics.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.stats; - -import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; - -import java.util.HashMap; -import java.util.Locale; -import java.util.Map; - -/** - * This class is responsible for collecting metrics related to ES|QL planning. - */ -public class PlanningMetrics { - private Map commands = new HashMap<>(); - private Map functions = new HashMap<>(); - - public void gatherPreAnalysisMetrics(LogicalPlan plan) { - plan.forEachDown(p -> add(commands, p.commandName())); - plan.forEachExpressionDown(UnresolvedFunction.class, p -> add(functions, p.name().toUpperCase(Locale.ROOT))); - } - - private void add(Map map, String key) { - Integer cmd = map.get(key); - map.put(key, cmd == null ? 1 : cmd + 1); - } - - public Map commands() { - return commands; - } - - public Map functions() { - return functions; - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/FeatureMetric.java similarity index 98% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/FeatureMetric.java index 4cae2a9c247f3..3a36f5b0d7c04 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/FeatureMetric.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Dissect; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/Metrics.java similarity index 99% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/Metrics.java index 092fecb3142db..b8962b47809a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/Metrics.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.util.Maps; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetry.java new file mode 100644 index 0000000000000..10b48c243d3b1 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetry.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.telemetry; + +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; +import org.elasticsearch.xpack.esql.core.expression.function.Function; +import org.elasticsearch.xpack.esql.core.util.Check; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; + +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; + +/** + * This class is responsible for collecting metrics related to ES|QL planning. + */ +public class PlanTelemetry { + private final EsqlFunctionRegistry functionRegistry; + private final Map commands = new HashMap<>(); + private final Map functions = new HashMap<>(); + + public PlanTelemetry(EsqlFunctionRegistry functionRegistry) { + this.functionRegistry = functionRegistry; + } + + private void add(Map map, String key) { + map.compute(key.toUpperCase(Locale.ROOT), (k, count) -> count == null ? 1 : count + 1); + } + + public void command(TelemetryAware command) { + Check.notNull(command.telemetryLabel(), "TelemetryAware [{}] has no telemetry label", command); + add(commands, command.telemetryLabel()); + } + + public void function(String name) { + var functionName = functionRegistry.resolveAlias(name); + if (functionRegistry.functionExists(functionName)) { + // The metrics have been collected initially with their uppercase spelling + add(functions, functionName); + } + } + + public void function(Class clazz) { + add(functions, functionRegistry.functionName(clazz)); + } + + public Map commands() { + return commands; + } + + public Map functions() { + return functions; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetricsManager.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetryManager.java similarity index 89% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetricsManager.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetryManager.java index a2d00a1f530e9..2cd536daf389c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetricsManager.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetryManager.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import org.elasticsearch.telemetry.metric.LongCounter; import org.elasticsearch.telemetry.metric.MeterRegistry; @@ -17,7 +17,7 @@ * * @see METERING */ -public class PlanningMetricsManager { +public class PlanTelemetryManager { // APM counters private final LongCounter featuresCounter; @@ -59,7 +59,7 @@ public class PlanningMetricsManager { */ public static final String SUCCESS = "success"; - public PlanningMetricsManager(MeterRegistry meterRegistry) { + public PlanTelemetryManager(MeterRegistry meterRegistry) { featuresCounter = meterRegistry.registerLongCounter( FEATURE_METRICS, "ESQL features, total number of queries that use them", @@ -77,9 +77,9 @@ public PlanningMetricsManager(MeterRegistry meterRegistry) { /** * Publishes the collected metrics to the meter registry */ - public void publish(PlanningMetrics metrics, boolean success) { - metrics.commands().entrySet().forEach(x -> incCommand(x.getKey(), x.getValue(), success)); - metrics.functions().entrySet().forEach(x -> incFunction(x.getKey(), x.getValue(), success)); + public void publish(PlanTelemetry metrics, boolean success) { + metrics.commands().forEach((key, value) -> incCommand(key, value, success)); + metrics.functions().forEach((key, value) -> incFunction(key, value, success)); } private void incCommand(String name, int count, boolean success) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/QueryMetric.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/QueryMetric.java similarity index 93% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/QueryMetric.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/QueryMetric.java index e862006d058ac..567b4b0a84937 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/QueryMetric.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/QueryMetric.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import java.util.Locale; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 350befc219f6e..1e21aa3774af7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -74,6 +74,7 @@ import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.physical.ChangePointExec; import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.OutputExec; @@ -90,7 +91,7 @@ import org.elasticsearch.xpack.esql.session.EsqlSession.PlanRunner; import org.elasticsearch.xpack.esql.session.Result; import org.elasticsearch.xpack.esql.stats.DisabledSearchStats; -import org.elasticsearch.xpack.esql.stats.PlanningMetrics; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import org.junit.After; import org.junit.Before; import org.mockito.Mockito; @@ -514,9 +515,9 @@ private ActualResults executePlan(BigArrays bigArrays) throws Exception { new LogicalPlanOptimizer(new LogicalOptimizerContext(configuration, foldCtx)), mapper, TEST_VERIFIER, - new PlanningMetrics(), + new PlanTelemetry(functionRegistry), null, - EsqlTestUtils.MOCK_QUERY_BUILDER_RESOLVER + EsqlTestUtils.MOCK_TRANSPORT_ACTION_SERVICES ); TestPhysicalOperationProviders physicalOperationProviders = testOperationProviders(foldCtx, testDatasets); @@ -564,12 +565,9 @@ private Throwable reworkException(Throwable th) { // Asserts that the serialization and deserialization of the plan creates an equivalent plan. private void opportunisticallyAssertPlanSerialization(PhysicalPlan plan) { - - // skip plans with localSourceExec - if (plan.anyMatch(p -> p instanceof LocalSourceExec || p instanceof HashJoinExec)) { + if (plan.anyMatch(p -> p instanceof LocalSourceExec || p instanceof HashJoinExec || p instanceof ChangePointExec)) { return; } - SerializationTestUtils.assertSerialization(plan, configuration); } @@ -615,7 +613,7 @@ void executeSubPlan( bigArrays, ByteSizeValue.ofBytes(randomLongBetween(1, BlockFactory.DEFAULT_MAX_BLOCK_PRIMITIVE_ARRAY_SIZE.getBytes() * 2)) ); - ExchangeSourceHandler exchangeSource = new ExchangeSourceHandler(between(1, 64), executor, ActionListener.noop()); + ExchangeSourceHandler exchangeSource = new ExchangeSourceHandler(between(1, 64), executor); ExchangeSinkHandler exchangeSink = new ExchangeSinkHandler(blockFactory, between(1, 64), threadPool::relativeTimeInMillis); LocalExecutionPlanner executionPlanner = new LocalExecutionPlanner( @@ -639,6 +637,7 @@ void executeSubPlan( // replace fragment inside the coordinator plan List drivers = new ArrayList<>(); LocalExecutionPlan coordinatorNodeExecutionPlan = executionPlanner.plan( + "final", foldCtx, new OutputExec(coordinatorPlan, collectedPages::add) ); @@ -660,7 +659,7 @@ void executeSubPlan( throw new AssertionError("expected no failure", e); }) ); - LocalExecutionPlan dataNodeExecutionPlan = executionPlanner.plan(foldCtx, csvDataNodePhysicalPlan); + LocalExecutionPlan dataNodeExecutionPlan = executionPlanner.plan("data", foldCtx, csvDataNodePhysicalPlan); drivers.addAll(dataNodeExecutionPlan.createDrivers(getTestName())); Randomness.shuffle(drivers); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/IdentifierGenerator.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/IdentifierGenerator.java index a1ae1f43ef877..df8cacff67f44 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/IdentifierGenerator.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/IdentifierGenerator.java @@ -7,8 +7,16 @@ package org.elasticsearch.xpack.esql; +import org.elasticsearch.common.Strings; import org.elasticsearch.test.ESTestCase; +import static org.elasticsearch.test.ESTestCase.randomBoolean; +import static org.elasticsearch.test.ESTestCase.randomFrom; +import static org.elasticsearch.test.ESTestCase.randomInt; +import static org.elasticsearch.test.ESTestCase.randomIntBetween; +import static org.elasticsearch.test.ESTestCase.randomList; +import static org.elasticsearch.test.ESTestCase.randomValueOtherThan; + public class IdentifierGenerator { /** @@ -22,7 +30,7 @@ public static String randomIdentifier() { * Generates one or several coma separated index patterns */ public static String randomIndexPatterns(Feature... features) { - return maybeQuote(String.join(",", ESTestCase.randomList(1, 5, () -> randomIndexPattern(features)))); + return maybeQuote(String.join(",", randomList(1, 5, () -> randomIndexPattern(features)))); } /** @@ -40,45 +48,62 @@ public static String randomIndexPattern(Feature... features) { index.append('.'); } index.append(randomCharacterFrom(validFirstCharacters)); - for (int i = 0; i < ESTestCase.randomIntBetween(1, 100); i++) { + for (int i = 0; i < randomIntBetween(1, 100); i++) { index.append(randomCharacterFrom(validCharacters)); } if (canAdd(Features.WILDCARD_PATTERN, features)) { - if (ESTestCase.randomBoolean()) { + if (randomBoolean()) { index.append('*'); } else { - index.insert(ESTestCase.randomIntBetween(0, index.length() - 1), '*'); + for (int i = 0; i < randomIntBetween(1, 3); i++) { + index.insert(randomIntBetween(0, index.length()), '*'); + } } - } else if (canAdd(Features.DATE_MATH, features)) { + } + if (canAdd(Features.DATE_MATH, features)) { // https://www.elastic.co/guide/en/elasticsearch/reference/8.17/api-conventions.html#api-date-math-index-names index.insert(0, "<"); index.append("-{now/"); - index.append(ESTestCase.randomFrom("d", "M", "M-1M")); - if (ESTestCase.randomBoolean()) { - index.append("{").append(ESTestCase.randomFrom("yyyy.MM", "yyyy.MM.dd")).append("}"); + index.append(randomFrom("d", "M", "M-1M")); + if (randomBoolean()) { + index.append("{").append(switch (randomIntBetween(0, 2)) { + case 0 -> "yyyy.MM"; + case 1 -> "yyyy.MM.dd"; + default -> "yyyy.MM.dd|" + Strings.format("%+03d", randomValueOtherThan(0, () -> randomIntBetween(-18, 18))) + ":00"; + }).append("}"); } index.append("}>"); } + if (canAdd(Features.EXCLUDE_PATTERN, features)) { + index.insert(0, "-"); + } + + var pattern = index.toString(); + if (pattern.contains("|")) { + pattern = quote(pattern); + } + pattern = maybeQuote(pattern); - var pattern = maybeQuote(index.toString()); if (canAdd(Features.CROSS_CLUSTER, features)) { - var cluster = randomIdentifier(); + var cluster = maybeQuote(randomIdentifier()); pattern = maybeQuote(cluster + ":" + pattern); } + return pattern; } private static char randomCharacterFrom(String str) { - return str.charAt(ESTestCase.randomInt(str.length() - 1)); + return str.charAt(randomInt(str.length() - 1)); } public interface Feature {} public enum Features implements Feature { CROSS_CLUSTER, + HIDDEN_INDEX, WILDCARD_PATTERN, - DATE_MATH, - HIDDEN_INDEX + EXCLUDE_PATTERN, + DATE_MATH } private record ExcludedFeature(Feature feature) implements Feature {} @@ -96,18 +121,16 @@ private static boolean canAdd(Feature feature, Feature... features) { return false; } } - return ESTestCase.randomBoolean(); + return randomBoolean(); } public static String maybeQuote(String term) { - if (term.contains("\"")) { - return term; - } - return switch (ESTestCase.randomIntBetween(0, 5)) { - case 0 -> "\"" + term + "\""; - case 1 -> "\"\"\"" + term + "\"\"\""; - default -> term;// no quotes are more likely - }; + return randomBoolean() && term.contains("\"") == false ? quote(term) : term; + } + + public static String quote(String term) { + var quote = randomFrom("\"", "\"\"\""); + return quote + term + quote; } public static String unquoteIndexPattern(String term) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java index dcb83dadfcf96..698291a54fa68 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java @@ -144,10 +144,6 @@ public void testNamedParams() throws IOException { } public void testNamedParamsForIdentifiersPatterns() throws IOException { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); String query = randomAlphaOfLengthBetween(1, 100); boolean columnar = randomBoolean(); Locale locale = randomLocale(random()); @@ -260,10 +256,6 @@ public void testInvalidParams() throws IOException { } public void testInvalidParamsForIdentifiersPatterns() throws IOException { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); String query = randomAlphaOfLengthBetween(1, 100); boolean columnar = randomBoolean(); Locale locale = randomLocale(random()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java index ebfe1c8147073..cc4e70632d678 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.esql.action; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.compute.data.BlockWritables; @@ -49,6 +51,7 @@ private List randomDriverProfiles() { private DriverProfile randomDriverProfile() { return new DriverProfile( + RandomStrings.randomAsciiLettersOfLength(random(), 5), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 69e6d97c6daed..065495cbad937 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geo.ShapeTestUtils; +import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; import org.elasticsearch.transport.RemoteClusterAware; @@ -175,7 +176,8 @@ private ColumnInfoImpl randomColumnInfo() { t -> false == DataType.isPrimitiveAndSupported(t) || t == DataType.DATE_PERIOD || t == DataType.TIME_DURATION - || t == DataType.PARTIAL_AGG, + || t == DataType.PARTIAL_AGG + || t == DataType.AGGREGATE_METRIC_DOUBLE, () -> randomFrom(DataType.types()) ).widenSmallNumeric(); return new ColumnInfoImpl(randomAlphaOfLength(10), type.esType()); @@ -214,6 +216,13 @@ private Page randomPage(List columns) { case CARTESIAN_SHAPE -> ((BytesRefBlock.Builder) builder).appendBytesRef( CARTESIAN.asWkb(ShapeTestUtils.randomGeometry(randomBoolean())) ); + case AGGREGATE_METRIC_DOUBLE -> { + BlockLoader.AggregateMetricDoubleBuilder aggBuilder = (BlockLoader.AggregateMetricDoubleBuilder) builder; + aggBuilder.min().appendDouble(randomDouble()); + aggBuilder.max().appendDouble(randomDouble()); + aggBuilder.sum().appendDouble(randomDouble()); + aggBuilder.count().appendInt(randomInt()); + } case NULL -> builder.appendNull(); case SOURCE -> { try { @@ -714,6 +723,7 @@ public void testProfileXContent() { new EsqlQueryResponse.Profile( List.of( new DriverProfile( + "test", 1723489812649L, 1723489819929L, 20021, @@ -748,6 +758,7 @@ public void testProfileXContent() { "profile" : { "drivers" : [ { + "task_description" : "test", "start_millis" : 1723489812649, "stop_millis" : 1723489819929, "took_nanos" : 20021, @@ -939,6 +950,13 @@ static Page valuesToPage(BlockFactory blockFactory, List columns BytesRef wkb = stringToSpatial(value.toString()); ((BytesRefBlock.Builder) builder).appendBytesRef(wkb); } + case AGGREGATE_METRIC_DOUBLE -> { + BlockLoader.AggregateMetricDoubleBuilder aggBuilder = (BlockLoader.AggregateMetricDoubleBuilder) builder; + aggBuilder.min().appendDouble(((Number) value).doubleValue()); + aggBuilder.max().appendDouble(((Number) value).doubleValue()); + aggBuilder.sum().appendDouble(((Number) value).doubleValue()); + aggBuilder.count().appendInt(((Number) value).intValue()); + } } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 151a91b587c1b..187c83cd5f249 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1961,7 +1961,7 @@ public void testUnsupportedTypesInStats() { found value [x] type [unsigned_long] line 2:96: first argument of [percentile(x, 10)] must be [numeric except unsigned_long],\ found value [x] type [unsigned_long] - line 2:115: argument of [sum(x)] must be [numeric except unsigned_long or counter types],\ + line 2:115: argument of [sum(x)] must be [aggregate_metric_double or numeric except unsigned_long or counter types],\ found value [x] type [unsigned_long]"""); verifyUnsupported(""" @@ -1976,7 +1976,8 @@ public void testUnsupportedTypesInStats() { line 2:29: argument of [median_absolute_deviation(x)] must be [numeric except unsigned_long or counter types],\ found value [x] type [version] line 2:59: first argument of [percentile(x, 10)] must be [numeric except unsigned_long], found value [x] type [version] - line 2:78: argument of [sum(x)] must be [numeric except unsigned_long or counter types], found value [x] type [version]"""); + line 2:78: argument of [sum(x)] must be [aggregate_metric_double or numeric except unsigned_long or counter types],\ + found value [x] type [version]"""); } public void testInOnText() { @@ -2343,10 +2344,6 @@ public void testCoalesceWithMixedNumericTypes() { } public void testNamedParamsForIdentifiers() { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); assertProjectionWithMapping( """ from test @@ -2437,10 +2434,6 @@ public void testNamedParamsForIdentifiers() { } public void testInvalidNamedParamsForIdentifiers() { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); // missing field assertError( """ @@ -2510,10 +2503,6 @@ public void testInvalidNamedParamsForIdentifiers() { } public void testNamedParamsForIdentifierPatterns() { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); assertProjectionWithMapping( """ from test @@ -2544,10 +2533,6 @@ public void testNamedParamsForIdentifierPatterns() { } public void testInvalidNamedParamsForIdentifierPatterns() { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); // missing pattern assertError( """ diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 291a10d570093..8be008be7a81b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.analysis; import org.elasticsearch.Build; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.VerificationException; @@ -36,11 +37,24 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.paramAsConstant; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.loadMapping; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE; import static org.elasticsearch.xpack.esql.core.type.DataType.COUNTER_DOUBLE; import static org.elasticsearch.xpack.esql.core.type.DataType.COUNTER_INTEGER; import static org.elasticsearch.xpack.esql.core.type.DataType.COUNTER_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_NANOS; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.FLOAT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -759,7 +773,7 @@ public void testUnsignedLongNegation() { public void testSumOnDate() { assertEquals( - "1:19: argument of [sum(hire_date)] must be [numeric except unsigned_long or counter types]," + "1:19: argument of [sum(hire_date)] must be [aggregate_metric_double or numeric except unsigned_long or counter types]," + " found value [hire_date] type [datetime]", error("from test | stats sum(hire_date)") ); @@ -1186,9 +1200,9 @@ public void testWeightedAvg() { public void testMatchInsideEval() throws Exception { assumeTrue("Match operator is available just for snapshots", Build.current().isSnapshot()); - assertEquals( - "1:36: [:] operator is only supported in WHERE commands", + "1:36: [:] operator is only supported in WHERE commands\n" + + "line 1:36: [:] operator cannot operate on [title], which is not a field from an index mapping", error("row title = \"brown fox\" | eval x = title:\"fox\" ") ); } @@ -1217,6 +1231,25 @@ public void testMatchFunctionAndOperatorHaveCorrectErrorMessages() throws Except assertEquals("1:24: [:] operator cannot be used after LIMIT", error("from test | limit 10 | where first_name : \"Anna\"")); } + // These should pass eventually once we lift some restrictions on match function + public void testMatchWithNonIndexedColumnCurrentlyUnsupported() { + assertEquals( + "1:67: [MATCH] function cannot operate on [initial], which is not a field from an index mapping", + error("from test | eval initial = substring(first_name, 1) | where match(initial, \"A\")") + ); + assertEquals( + "1:67: [MATCH] function cannot operate on [text], which is not a field from an index mapping", + error("from test | eval text=concat(first_name, last_name) | where match(text, \"cat\")") + ); + } + + public void testMatchFunctionIsNotNullable() { + assertEquals( + "1:48: [MATCH] function cannot operate on [text::keyword], which is not a field from an index mapping", + error("row n = null | eval text = n + 5 | where match(text::keyword, \"Anna\")") + ); + } + public void testQueryStringFunctionsNotAllowedAfterCommands() throws Exception { // Source commands assertEquals("1:13: [QSTR] function cannot be used after SHOW", error("show info | where qstr(\"8.16.0\")")); @@ -1984,6 +2017,57 @@ public void testCategorizeWithFilteredAggregations() { ); } + public void testChangePoint() { + assumeTrue("change_point must be enabled", EsqlCapabilities.Cap.CHANGE_POINT.isEnabled()); + var airports = AnalyzerTestUtils.analyzer(loadMapping("mapping-airports.json", "airports")); + assertEquals("1:30: Unknown column [blahblah]", error("FROM airports | CHANGE_POINT blahblah ON scalerank", airports)); + assertEquals("1:43: Unknown column [blahblah]", error("FROM airports | CHANGE_POINT scalerank ON blahblah", airports)); + // TODO: nicer error message for missing default column "@timestamp" + assertEquals("1:17: Unknown column [@timestamp]", error("FROM airports | CHANGE_POINT scalerank", airports)); + } + + public void testChangePoint_keySortable() { + assumeTrue("change_point must be enabled", EsqlCapabilities.Cap.CHANGE_POINT.isEnabled()); + List sortableTypes = List.of(BOOLEAN, DOUBLE, DATE_NANOS, DATETIME, INTEGER, IP, KEYWORD, LONG, UNSIGNED_LONG, VERSION); + List unsortableTypes = List.of(CARTESIAN_POINT, CARTESIAN_SHAPE, GEO_POINT, GEO_SHAPE); + for (DataType type : sortableTypes) { + query(Strings.format("ROW key=NULL::%s, value=0\n | CHANGE_POINT value ON key", type)); + } + for (DataType type : unsortableTypes) { + assertEquals( + "2:4: change point key [key] must be sortable", + error(Strings.format("ROW key=NULL::%s, value=0\n | CHANGE_POINT value ON key", type)) + ); + } + } + + public void testChangePoint_valueNumeric() { + assumeTrue("change_point must be enabled", EsqlCapabilities.Cap.CHANGE_POINT.isEnabled()); + List numericTypes = List.of(DOUBLE, INTEGER, LONG, UNSIGNED_LONG); + List nonNumericTypes = List.of( + BOOLEAN, + CARTESIAN_POINT, + CARTESIAN_SHAPE, + DATE_NANOS, + DATETIME, + GEO_POINT, + GEO_SHAPE, + IP, + KEYWORD, + VERSION + ); + for (DataType type : numericTypes) { + query(Strings.format("ROW key=0, value=NULL::%s\n | CHANGE_POINT value ON key", type)); + } + for (DataType type : nonNumericTypes) { + assertEquals( + "2:4: change point value [value] must be numeric", + error(Strings.format("ROW key=0, value=NULL::%s\n | CHANGE_POINT value ON key", type)) + ); + } + assertEquals("2:4: change point value [value] must be numeric", error("ROW key=0, value=NULL\n | CHANGE_POINT value ON key")); + } + public void testSortByAggregate() { assertEquals("1:18: Aggregate functions are not allowed in SORT [COUNT]", error("ROW a = 1 | SORT count(*)")); assertEquals("1:28: Aggregate functions are not allowed in SORT [COUNT]", error("ROW a = 1 | SORT to_string(count(*))")); @@ -2017,13 +2101,7 @@ public void testMatchOptions() { query("FROM test | WHERE match(first_name, \"Jean\", {\"auto_generate_synonyms_phrase_query\": true})"); // Check all data types for available options - DataType[] optionTypes = new DataType[] { - DataType.INTEGER, - DataType.LONG, - DataType.FLOAT, - DataType.DOUBLE, - DataType.KEYWORD, - DataType.BOOLEAN }; + DataType[] optionTypes = new DataType[] { INTEGER, LONG, FLOAT, DOUBLE, KEYWORD, BOOLEAN }; for (Map.Entry allowedOptions : Match.ALLOWED_OPTIONS.entrySet()) { String optionName = allowedOptions.getKey(); DataType optionType = allowedOptions.getValue(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichOperatorStatusTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichOperatorStatusTests.java index 4fc67f85cc062..5f30cc860cae1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichOperatorStatusTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichOperatorStatusTests.java @@ -41,25 +41,25 @@ protected EnrichLookupOperator.Status mutateInstance(EnrichLookupOperator.Status randomValueOtherThan(in.receivedPages(), ESTestCase::randomNonNegativeLong), in.completedPages(), in.totalTerms, - in.totalTimeInMillis() + in.procesNanos() ); case 1 -> new EnrichLookupOperator.Status( in.receivedPages(), randomValueOtherThan(in.completedPages(), ESTestCase::randomNonNegativeLong), in.totalTerms, - in.totalTimeInMillis() + in.procesNanos() ); case 2 -> new EnrichLookupOperator.Status( in.receivedPages(), in.completedPages(), randomValueOtherThan(in.totalTerms, ESTestCase::randomNonNegativeLong), - in.totalTimeInMillis() + in.procesNanos() ); case 3 -> new EnrichLookupOperator.Status( in.receivedPages(), in.completedPages(), in.totalTerms, - randomValueOtherThan(in.totalTimeInMillis(), ESTestCase::randomNonNegativeLong) + randomValueOtherThan(in.procesNanos(), ESTestCase::randomNonNegativeLong) ); default -> throw new AssertionError("unknown "); }; @@ -70,10 +70,10 @@ public void testToXContent() { String json = Strings.toString(status, true, true); assertThat(json, equalTo(""" { + "process_nanos" : 10000, + "process_time" : "10micros", "received_pages" : 100, "completed_pages" : 50, - "total_time_in_millis" : 10000, - "total_time" : "10s", "total_terms" : 120 }""")); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperatorStatusTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperatorStatusTests.java index a204e93b0d16a..641eb72afb010 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperatorStatusTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperatorStatusTests.java @@ -38,18 +38,18 @@ protected LookupFromIndexOperator.Status createTestInstance() { protected LookupFromIndexOperator.Status mutateInstance(LookupFromIndexOperator.Status in) throws IOException { long receivedPages = in.receivedPages(); long completedPages = in.completedPages(); - long totalTimeInMillis = in.totalTimeInMillis(); + long procesNanos = in.procesNanos(); long totalTerms = in.totalTerms(); long emittedPages = in.emittedPages(); switch (randomIntBetween(0, 4)) { case 0 -> receivedPages = randomValueOtherThan(receivedPages, ESTestCase::randomNonNegativeLong); case 1 -> completedPages = randomValueOtherThan(completedPages, ESTestCase::randomNonNegativeLong); - case 2 -> totalTimeInMillis = randomValueOtherThan(totalTimeInMillis, ESTestCase::randomNonNegativeLong); + case 2 -> procesNanos = randomValueOtherThan(procesNanos, ESTestCase::randomNonNegativeLong); case 3 -> totalTerms = randomValueOtherThan(totalTerms, ESTestCase::randomNonNegativeLong); case 4 -> emittedPages = randomValueOtherThan(emittedPages, ESTestCase::randomNonNegativeLong); default -> throw new UnsupportedOperationException(); } - return new LookupFromIndexOperator.Status(receivedPages, completedPages, totalTimeInMillis, totalTerms, emittedPages); + return new LookupFromIndexOperator.Status(receivedPages, completedPages, procesNanos, totalTerms, emittedPages); } public void testToXContent() { @@ -57,10 +57,10 @@ public void testToXContent() { String json = Strings.toString(status, true, true); assertThat(json, equalTo(""" { + "process_nanos" : 10000, + "process_time" : "10micros", "received_pages" : 100, "completed_pages" : 50, - "total_time_in_millis" : 10000, - "total_time" : "10s", "emitted_pages" : 88, "total_terms" : 120 }""")); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java index 87ea6315d4f3b..f8ba9c1251088 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java @@ -61,7 +61,7 @@ public abstract class AbstractAggregationTestCase extends AbstractFunctionTestCa * Use if possible, as this method may get updated with new checks in the future. *

    */ - protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecks( + protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecksNoErrors( List suppliers, boolean entirelyNullPreservesType, PositionalErrorMessageSupplier positionalErrorMessageSupplier @@ -74,13 +74,24 @@ protected static Iterable parameterSuppliersFromTypedDataWithDefaultCh ); } - // TODO: Remove and migrate everything to the method with all the parameters /** - * @deprecated Use {@link #parameterSuppliersFromTypedDataWithDefaultChecks(List, boolean, PositionalErrorMessageSupplier)} instead. - * This method doesn't add all the default checks. + * Converts a list of test cases into a list of parameter suppliers. + * Also, adds a default set of extra test cases. + *

    + * Use if possible, as this method may get updated with new checks in the future. + *

    + * + * @param entirelyNullPreservesType See {@link #anyNullIsNull(boolean, List)} */ - @Deprecated - protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecks(List suppliers) { + protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecksNoErrors( + // TODO remove after removing parameterSuppliersFromTypedDataWithDefaultChecks rename this to that. + List suppliers, + boolean entirelyNullPreservesType + ) { + return parameterSuppliersFromTypedData(anyNullIsNull(entirelyNullPreservesType, randomizeBytesRefsOffset(suppliers))); + } + + protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(List suppliers) { return parameterSuppliersFromTypedData(withNoRowsExpectingNull(randomizeBytesRefsOffset(suppliers))); } @@ -399,15 +410,15 @@ private Expression resolveSurrogates(Expression expression) { } private Aggregator aggregator(Expression expression, List inputChannels, AggregatorMode mode) { - AggregatorFunctionSupplier aggregatorFunctionSupplier = ((ToAggregator) expression).supplier(inputChannels); + AggregatorFunctionSupplier aggregatorFunctionSupplier = ((ToAggregator) expression).supplier(); - return new Aggregator(aggregatorFunctionSupplier.aggregator(driverContext()), mode); + return new Aggregator(aggregatorFunctionSupplier.aggregator(driverContext(), inputChannels), mode); } private GroupingAggregator groupingAggregator(Expression expression, List inputChannels, AggregatorMode mode) { - AggregatorFunctionSupplier aggregatorFunctionSupplier = ((ToAggregator) expression).supplier(inputChannels); + AggregatorFunctionSupplier aggregatorFunctionSupplier = ((ToAggregator) expression).supplier(); - return new GroupingAggregator(aggregatorFunctionSupplier.groupingAggregator(driverContext()), mode); + return new GroupingAggregator(aggregatorFunctionSupplier.groupingAggregator(driverContext(), inputChannels), mode); } /** diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index f089e81621990..65e0a2f1b20ac 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -924,7 +924,7 @@ protected static void renderDocs(String name) throws IOException { description.returnType(), description.description(), description.variadic(), - description.isAggregation() + description.type() ); } renderTypes(name, description.args()); @@ -1216,7 +1216,11 @@ private static void renderKibanaFunctionDefinition( builder.field("operator", info.operator()); assertThat(isAggregation(), equalTo(false)); } else { - builder.field("type", isAggregation() ? "agg" : "eval"); + builder.field("type", switch (info.type()) { + case SCALAR -> "scalar"; + case AGGREGATE -> "agg"; + case GROUPING -> "grouping"; + }); } builder.field("name", name); builder.field("description", removeAsciidocLinks(info.description())); @@ -1253,7 +1257,7 @@ private static void renderKibanaFunctionDefinition( builder.startObject(); builder.field("name", arg.name()); if (arg.mapArg()) { - builder.field("type", "function named parameters"); + builder.field("type", "function_named_parameters"); builder.field( "mapParams", arg.mapParams() diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java index 429e6685a201c..05202159a1bcd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java @@ -51,33 +51,6 @@ * which can be automatically tested against several scenarios (null handling, concurrency, etc). */ public abstract class AbstractScalarFunctionTestCase extends AbstractFunctionTestCase { - - /** - * Converts a list of test cases into a list of parameter suppliers. - * Also, adds a default set of extra test cases. - *

    - * Use if possible, as this method may get updated with new checks in the future. - *

    - * - * @param entirelyNullPreservesType See {@link #anyNullIsNull(boolean, List)} - * @deprecated use {@link #parameterSuppliersFromTypedDataWithDefaultChecksNoErrors} - * and make a subclass of {@link ErrorsForCasesWithoutExamplesTestCase}. - * It's a long faster. - */ - @Deprecated - protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecks( - boolean entirelyNullPreservesType, - List suppliers, - PositionalErrorMessageSupplier positionalErrorMessageSupplier - ) { - return parameterSuppliersFromTypedData( - errorsForCasesWithoutExamples( - anyNullIsNull(entirelyNullPreservesType, randomizeBytesRefsOffset(suppliers)), - positionalErrorMessageSupplier - ) - ); - } - /** * Converts a list of test cases into a list of parameter suppliers. * Also, adds a default set of extra test cases. @@ -113,30 +86,6 @@ protected static Iterable parameterSuppliersFromTypedDataWithDefaultCh return parameterSuppliersFromTypedData(anyNullIsNull(randomizeBytesRefsOffset(suppliers), nullsExpectedType, evaluatorToString)); } - /** - * Converts a list of test cases into a list of parameter suppliers. - * Also, adds a default set of extra test cases. - *

    - * Use if possible, as this method may get updated with new checks in the future. - *

    - * - * @param nullsExpectedType See {@link #anyNullIsNull(List, ExpectedType, ExpectedEvaluatorToString)} - * @param evaluatorToString See {@link #anyNullIsNull(List, ExpectedType, ExpectedEvaluatorToString)} - */ - protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecks( - ExpectedType nullsExpectedType, - ExpectedEvaluatorToString evaluatorToString, - List suppliers, - PositionalErrorMessageSupplier positionalErrorMessageSupplier - ) { - return parameterSuppliersFromTypedData( - errorsForCasesWithoutExamples( - anyNullIsNull(randomizeBytesRefsOffset(suppliers), nullsExpectedType, evaluatorToString), - positionalErrorMessageSupplier - ) - ); - } - public final void testEvaluate() { assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); boolean readFloating = randomBoolean(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java index e507640c7b23c..cf2de30e44456 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.Limit; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.stats.Metrics; +import org.elasticsearch.xpack.esql.telemetry.Metrics; import java.util.List; import java.util.Objects; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgErrorTests.java new file mode 100644 index 0000000000000..16f80e4564cff --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class AvgErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(AvgTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Avg(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "numeric except unsigned_long or counter types")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgTests.java index ac599c7ff05f8..75d95c3eeac96 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgTests.java @@ -53,7 +53,7 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers, true, (v, p) -> "numeric except unsigned_long or counter types"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers, true); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java index e0b8c1356d087..c632909c7d8e1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java @@ -93,7 +93,6 @@ public static Iterable parameters() { } // "No rows" expects 0 here instead of null - // return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); return parameterSuppliersFromTypedData(randomizeBytesRefsOffset(suppliers)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java index 0485714959f63..3d14bc1b4bca7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java @@ -82,7 +82,6 @@ public static Iterable parameters() { } // "No rows" expects 0 here instead of null - // return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); return parameterSuppliersFromTypedData(randomizeBytesRefsOffset(suppliers)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxErrorTests.java new file mode 100644 index 0000000000000..15fb2c053b981 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxErrorTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MaxErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MaxTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Max(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + typeErrorMessage(false, validPerPosition, signature, (v, p) -> "representable except unsigned_long and spatial types") + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java index ae5b3691b0a7d..edae496d27a93 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java @@ -157,11 +157,7 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks( - suppliers, - false, - (v, p) -> "representable except unsigned_long and spatial types" - ); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers, false); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviationTests.java index ea36170866b0e..047e204c0e0c2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviationTests.java @@ -39,7 +39,7 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true) ).flatMap(List::stream).map(MedianAbsoluteDeviationTests::makeSupplier).toList(); - return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers, true); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianTests.java index 0f7ed1b3e9b10..1c2c06c1ede94 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianTests.java @@ -73,7 +73,7 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers, true); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinErrorTests.java new file mode 100644 index 0000000000000..a9b4730f12fac --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinErrorTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MinErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MinTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Min(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + typeErrorMessage(false, validPerPosition, signature, (v, p) -> "representable except unsigned_long and spatial types") + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java index ad2953f057635..0016876b1a198 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java @@ -157,11 +157,7 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks( - suppliers, - false, - (v, p) -> "representable except unsigned_long and spatial types" - ); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers, false); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileErrorTests.java new file mode 100644 index 0000000000000..b2f701f41792b --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class PercentileErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(PercentileTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Percentile(source, args.get(0), args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, p) -> "numeric except unsigned_long")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileTests.java index 1bbac376edcf3..0033f98222903 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileTests.java @@ -53,7 +53,7 @@ public static Iterable parameters() { } } - return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers, false, (v, p) -> "numeric except unsigned_long"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers, false); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java index b92b32aa7ad09..a99cb8f60e3fa 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java @@ -47,7 +47,6 @@ public static Iterable parameters() { ).flatMap(List::stream).map(SpatialCentroidTests::makeSupplier).toList(); // The withNoRowsExpectingNull() cases don't work here, as this aggregator doesn't return nulls. - // return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); return parameterSuppliersFromTypedData(randomizeBytesRefsOffset(suppliers)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtentTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtentTests.java index 225e10f99c853..9a0a62ce2d06e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtentTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtentTests.java @@ -48,7 +48,6 @@ public static Iterable parameters() { ).flatMap(List::stream).map(SpatialExtentTests::makeSupplier).toList(); // The withNoRowsExpectingNull() cases don't work here, as this aggregator doesn't return nulls. - // return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); return parameterSuppliersFromTypedData(randomizeBytesRefsOffset(suppliers)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java index 85b96e29d1f6a..409bb5bcba6fb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java @@ -41,7 +41,7 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true) ).flatMap(List::stream).map(StdDevTests::makeSupplier).collect(Collectors.toCollection(() -> suppliers)); - return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers, true); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SumTests.java index 4f14dafc8b30d..6730c2591ebbf 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SumTests.java @@ -77,7 +77,7 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java index f236e4d8faf98..1d18d66110fe0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java @@ -280,7 +280,7 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesErrorTests.java new file mode 100644 index 0000000000000..f9dafc954b6f5 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class ValuesErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(ValuesTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Values(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "any type except unsigned_long and spatial types")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java index 5f35f8cada397..80e6a7fc09d56 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java @@ -55,11 +55,7 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.stringCases(1, 20, DataType.SEMANTIC_TEXT) ).flatMap(List::stream).map(ValuesTests::makeSupplier).collect(Collectors.toCollection(() -> suppliers)); - return parameterSuppliersFromTypedDataWithDefaultChecks( - suppliers, - false, - (v, p) -> "any type except unsigned_long and spatial types" - ); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers, false); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvgTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvgTests.java index 2c2ffc97f268c..1ad6cdf4c2494 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvgTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvgTests.java @@ -90,7 +90,7 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index b196bd49f6bb2..2fa82b9f1caa2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -59,7 +59,7 @@ public class CaseTests extends AbstractScalarFunctionTestCase { DataType.NULL ).collect(Collectors.toList()); if (Build.current().isSnapshot()) { - t.addAll(DataType.UNDER_CONSTRUCTION.keySet()); + t.addAll(DataType.UNDER_CONSTRUCTION.keySet().stream().filter(type -> type != DataType.AGGREGATE_METRIC_DOUBLE).toList()); } TYPES = unmodifiableList(t); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDoubleTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDoubleTests.java new file mode 100644 index 0000000000000..94d9bd5f64cbd --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDoubleTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matchers; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +@FunctionName("from_aggregate_metric_double") +public class FromAggregateMetricDoubleTests extends AbstractScalarFunctionTestCase { + public FromAggregateMetricDoubleTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @Override + protected Expression build(Source source, List args) { + assumeTrue("Test sometimes wraps literals as fields", args.get(1).foldable()); + return new FromAggregateMetricDouble(source, args.get(0), args.get(1)); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + DataType dataType = DataType.AGGREGATE_METRIC_DOUBLE; + for (int i = 0; i < 4; i++) { + int index = i; + suppliers.add(new TestCaseSupplier(List.of(dataType, DataType.INTEGER), () -> { + var agg_metric = new AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral( + randomDoubleBetween(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, true), + randomDoubleBetween(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, true), + randomDoubleBetween(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, true), + randomIntBetween(Integer.MIN_VALUE, Integer.MAX_VALUE) + ); + Double expectedValue = index == AggregateMetricDoubleBlockBuilder.Metric.MIN.getIndex() ? agg_metric.min() + : index == AggregateMetricDoubleBlockBuilder.Metric.MAX.getIndex() ? agg_metric.max() + : index == AggregateMetricDoubleBlockBuilder.Metric.SUM.getIndex() ? agg_metric.sum() + : (Double) agg_metric.count().doubleValue(); + + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(agg_metric, dataType, "agg_metric"), + new TestCaseSupplier.TypedData(index, DataType.INTEGER, "subfield_index").forceLiteral() + ), + "FromAggregateMetricDoubleEvaluator[field=Attribute[channel=0],subfieldIndex=" + index + "]", + index == AggregateMetricDoubleBlockBuilder.Metric.COUNT.getIndex() ? DataType.INTEGER : DataType.DOUBLE, + index == AggregateMetricDoubleBlockBuilder.Metric.COUNT.getIndex() ? Matchers.equalTo(agg_metric.count()) + : expectedValue == null ? Matchers.nullValue() + : Matchers.closeTo(expectedValue, Math.abs(expectedValue * 0.00001)) + ); + })); + } + + return parameterSuppliersFromTypedData( + anyNullIsNull( + suppliers, + (nullPosition, nullValueDataType, original) -> nullPosition == 1 ? DataType.NULL : original.expectedType(), + (nullPosition, nullData, original) -> nullData.isForceLiteral() ? Matchers.equalTo("LiteralsEvaluator[lit=null]") : original + ) + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowTests.java index c667747a8ba75..ed2c45f8c2321 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowTests.java @@ -32,7 +32,7 @@ public NowTests(@Name("TestCase") Supplier testCaseSu @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedDataWithDefaultChecks( + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors( true, List.of( new TestCaseSupplier( @@ -45,8 +45,7 @@ public static Iterable parameters() { equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli()) ) ) - ), - (valid, position) -> "" + ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumErrorTests.java new file mode 100644 index 0000000000000..4f1f8f911c306 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvPSeriesWeightedSumErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvPSeriesWeightedSumTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvPSeriesWeightedSum(source, args.get(0), args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, p) -> "double")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumTests.java index 0c905b28ac931..47669cba71894 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumTests.java @@ -35,7 +35,7 @@ public static Iterable parameters() { doubles(cases); - return parameterSuppliersFromTypedDataWithDefaultChecks( + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors( (nullPosition, nullValueDataType, original) -> nullValueDataType == DataType.NULL ? DataType.NULL : original.expectedType(), (nullPosition, nullData, original) -> { if (nullData.isForceLiteral()) { @@ -43,8 +43,7 @@ public static Iterable parameters() { } return nullData.type() == DataType.NULL ? equalTo("LiteralsEvaluator[lit=null]") : original; }, - cases, - (valid, position) -> "double" + cases ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileErrorTests.java new file mode 100644 index 0000000000000..25e7100b7c418 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvPercentileErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvPercentileTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvPercentile(source, args.get(0), args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, p) -> "numeric except unsigned_long")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileTests.java index 0a419d44e3448..9c506ee0b5954 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileTests.java @@ -352,13 +352,12 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks( + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors( (nullPosition, nullValueDataType, original) -> nullValueDataType == DataType.NULL && nullPosition == 0 ? DataType.NULL : original.expectedType(), (nullPosition, nullData, original) -> original, - cases, - (v, p) -> "numeric except unsigned_long" + cases ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumErrorTests.java new file mode 100644 index 0000000000000..bd8168f274f09 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvSumErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvSumTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvSum(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java index 89b148144fc83..19bb915b405db 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java @@ -65,7 +65,7 @@ public static Iterable parameters() { data.add(asLongUnsigned(UNSIGNED_LONG_MAX)); return data; })); - return parameterSuppliersFromTypedDataWithDefaultChecks(false, cases, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(false, cases); } private static TestCaseSupplier arithmeticExceptionCase(DataType dataType, Supplier dataSupplier) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeErrorTests.java new file mode 100644 index 0000000000000..1c5b867bef73b --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeErrorTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class StEnvelopeErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(StEnvelopeTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new StEnvelope(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + typeErrorMessage(false, validPerPosition, signature, (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape") + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeTests.java index 9f629d9127673..6b0449788b1c8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeTests.java @@ -55,11 +55,7 @@ public static Iterable parameters() { StEnvelopeTests::valueOfCartesian, List.of() ); - return parameterSuppliersFromTypedDataWithDefaultChecks( - false, - suppliers, - (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape" - ); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(false, suppliers); } private static BytesRef valueOfGeo(BytesRef wkb) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXErrorTests.java new file mode 100644 index 0000000000000..77e85ea9c1882 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class StXErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(StXTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new StX(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "geo_point or cartesian_point")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxErrorTests.java new file mode 100644 index 0000000000000..e209304305aee --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxErrorTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class StXMaxErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(StXMaxTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new StXMax(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + typeErrorMessage(false, validPerPosition, signature, (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape") + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxTests.java index 9205879fa1cb9..aa7ced1d4251d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxTests.java @@ -42,11 +42,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryCartesianPoint(suppliers, expectedCartesian, DOUBLE, StXMaxTests::valueOfCartesian, List.of()); TestCaseSupplier.forUnaryGeoShape(suppliers, expectedGeo, DOUBLE, StXMaxTests::valueOfGeo, List.of()); TestCaseSupplier.forUnaryCartesianShape(suppliers, expectedCartesian, DOUBLE, StXMaxTests::valueOfCartesian, List.of()); - return parameterSuppliersFromTypedDataWithDefaultChecks( - true, - suppliers, - (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape" - ); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } private static double valueOfGeo(BytesRef wkb) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinErrorTests.java new file mode 100644 index 0000000000000..7673d3663df18 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinErrorTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class StXMinErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(StXMinTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new StXMin(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + typeErrorMessage(false, validPerPosition, signature, (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape") + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinTests.java index 3603bff9656fe..f728f50cc6260 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinTests.java @@ -42,11 +42,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryCartesianPoint(suppliers, expectedCartesian, DOUBLE, StXMinTests::valueOfCartesian, List.of()); TestCaseSupplier.forUnaryGeoShape(suppliers, expectedGeo, DOUBLE, StXMinTests::valueOfGeo, List.of()); TestCaseSupplier.forUnaryCartesianShape(suppliers, expectedCartesian, DOUBLE, StXMinTests::valueOfCartesian, List.of()); - return parameterSuppliersFromTypedDataWithDefaultChecks( - true, - suppliers, - (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape" - ); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } private static double valueOfGeo(BytesRef wkb) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXTests.java index 96cddfdd64099..4e14c23a1bba4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXTests.java @@ -36,7 +36,7 @@ public static Iterable parameters() { final List suppliers = new ArrayList<>(); TestCaseSupplier.forUnaryGeoPoint(suppliers, expectedEvaluator, DOUBLE, StXTests::valueOf, List.of()); TestCaseSupplier.forUnaryCartesianPoint(suppliers, expectedEvaluator, DOUBLE, StXTests::valueOf, List.of()); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "geo_point or cartesian_point"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } private static double valueOf(BytesRef wkb) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYErrorTests.java new file mode 100644 index 0000000000000..ddad9f3e4902f --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class StYErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(StYTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new StY(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "geo_point or cartesian_point")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxErrorTests.java new file mode 100644 index 0000000000000..0090da0bc4238 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxErrorTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class StYMaxErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(StYMaxTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new StYMax(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + typeErrorMessage(false, validPerPosition, signature, (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape") + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxTests.java index cb2a03c3a9473..9aeda6b106236 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxTests.java @@ -42,11 +42,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryCartesianPoint(suppliers, expectedCartesian, DOUBLE, StYMaxTests::valueOfCartesian, List.of()); TestCaseSupplier.forUnaryGeoShape(suppliers, expectedGeo, DOUBLE, StYMaxTests::valueOfGeo, List.of()); TestCaseSupplier.forUnaryCartesianShape(suppliers, expectedCartesian, DOUBLE, StYMaxTests::valueOfCartesian, List.of()); - return parameterSuppliersFromTypedDataWithDefaultChecks( - true, - suppliers, - (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape" - ); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } private static double valueOfGeo(BytesRef wkb) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinErrorTests.java new file mode 100644 index 0000000000000..29ffac0bac1ff --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinErrorTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class StYMinErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(StYMinTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new StYMin(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + typeErrorMessage(false, validPerPosition, signature, (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape") + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinTests.java index 0c191f6dc4c5b..db577b536048b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinTests.java @@ -42,11 +42,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryCartesianPoint(suppliers, expectedCartesian, DOUBLE, StYMinTests::valueOfCartesian, List.of()); TestCaseSupplier.forUnaryGeoShape(suppliers, expectedGeo, DOUBLE, StYMinTests::valueOfGeo, List.of()); TestCaseSupplier.forUnaryCartesianShape(suppliers, expectedCartesian, DOUBLE, StYMinTests::valueOfCartesian, List.of()); - return parameterSuppliersFromTypedDataWithDefaultChecks( - true, - suppliers, - (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape" - ); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } private static double valueOfGeo(BytesRef wkb) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYTests.java index 165dbb2c0ab77..33ee6f6c4cdce 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYTests.java @@ -36,7 +36,7 @@ public static Iterable parameters() { final List suppliers = new ArrayList<>(); TestCaseSupplier.forUnaryGeoPoint(suppliers, expectedEvaluator, DOUBLE, StYTests::valueOf, List.of()); TestCaseSupplier.forUnaryCartesianPoint(suppliers, expectedEvaluator, DOUBLE, StYTests::valueOf, List.of()); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "geo_point or cartesian_point"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } private static double valueOf(BytesRef wkb) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatErrorTests.java new file mode 100644 index 0000000000000..48cbe2fbc1007 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatErrorTests.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class RepeatErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + + @Override + protected List cases() { + return paramsToSuppliers(RepeatTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Repeat(source, args.get(0), args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, p) -> switch (p) { + case 0 -> "string"; + case 1 -> "integer"; + default -> ""; + })); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java index 5eb654b0d8235..2f1c2e7853c7e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java @@ -122,11 +122,7 @@ public static Iterable parameters() { .withFoldingException(IllegalArgumentException.class, "Number parameter cannot be negative, found [" + number + "]"); })); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, cases, (v, p) -> switch (p) { - case 0 -> "string"; - case 1 -> "integer"; - default -> ""; - }); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, cases); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseErrorTests.java new file mode 100644 index 0000000000000..e77bc574a2acf --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class ReverseErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(ReverseTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Reverse(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "string")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseTests.java index 397fb8064626c..8c4f77535c7b0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseTests.java @@ -39,7 +39,7 @@ public static Iterable parameters() { } } - return parameterSuppliersFromTypedDataWithDefaultChecks(false, suppliers, (v, p) -> "string"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(false, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java index 7f04f076ed15f..53e666738e182 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.junit.AfterClass; @@ -147,8 +148,8 @@ public String appendix() { } @Override - public boolean isAggregation() { - return orig.isAggregation(); + public FunctionType type() { + return orig.type(); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index 310d680cfbf41..98f3d1d2d8d8e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -66,6 +66,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.THREE; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TWO; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.asLimit; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getFieldAttribute; import static org.elasticsearch.xpack.esql.EsqlTestUtils.greaterThanOf; import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; @@ -75,7 +76,6 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -196,10 +196,11 @@ public void testMissingFieldInSort() { /** * Expects - * EsqlProject[[first_name{f}#9, last_name{r}#18]] - * \_MvExpand[last_name{f}#12,last_name{r}#18,1000] - * \_Limit[1000[INTEGER]] - * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] + * EsqlProject[[first_name{f}#7, last_name{r}#17]] + * \_Limit[1000[INTEGER],true] + * \_MvExpand[last_name{f}#10,last_name{r}#17] + * \_Limit[1000[INTEGER],false] + * \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] */ public void testMissingFieldInMvExpand() { var plan = plan(""" @@ -215,9 +216,9 @@ public void testMissingFieldInMvExpand() { var projections = project.projections(); assertThat(Expressions.names(projections), contains("first_name", "last_name")); - var mvExpand = as(project.child(), MvExpand.class); - assertThat(mvExpand.limit(), equalTo(1000)); - var limit2 = as(mvExpand.child(), Limit.class); + var limit1 = asLimit(project.child(), 1000, true); + var mvExpand = as(limit1.child(), MvExpand.class); + var limit2 = asLimit(mvExpand.child(), 1000, false); as(limit2.child(), EsRelation.class); } @@ -248,11 +249,6 @@ public UnaryPlan replaceChild(LogicalPlan newChild) { return new MockFieldAttributeCommand(source(), newChild, field); } - @Override - public String commandName() { - return "MOCK"; - } - @Override public boolean expressionsResolved() { return true; @@ -269,7 +265,6 @@ protected NodeInfo info() { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/110150") public void testMissingFieldInNewCommand() { var testStats = statsForMissingField("last_name"); localPlan( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index aae2d012fc3a6..8bdd7a4e1645f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -74,9 +74,9 @@ import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.esql.rule.Rule; import org.elasticsearch.xpack.esql.session.Configuration; -import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.SearchContextStats; import org.elasticsearch.xpack.esql.stats.SearchStats; +import org.elasticsearch.xpack.esql.telemetry.Metrics; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import org.elasticsearch.xpack.kql.query.KqlQueryBuilder; import org.junit.Before; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 7ceaaa740b802..6fb1cb9ca14ef 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -96,6 +96,7 @@ import org.elasticsearch.xpack.esql.index.IndexResolution; import org.elasticsearch.xpack.esql.optimizer.rules.logical.LiteralsOnTheRight; import org.elasticsearch.xpack.esql.optimizer.rules.logical.OptimizerRules; +import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneRedundantOrderBy; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PushDownAndCombineLimits; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PushDownEnrich; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PushDownEval; @@ -149,6 +150,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.THREE; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TWO; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.asLimit; import static org.elasticsearch.xpack.esql.EsqlTestUtils.emptySource; import static org.elasticsearch.xpack.esql.EsqlTestUtils.fieldAttribute; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getFieldAttribute; @@ -1315,6 +1317,8 @@ public void testCombineLimits() { } public void testPushdownLimitsPastLeftJoin() { + var rule = new PushDownAndCombineLimits(); + var leftChild = emptySource(); var rightChild = new LocalRelation(Source.EMPTY, List.of(fieldAttribute()), LocalSupplier.EMPTY); assertNotEquals(leftChild, rightChild); @@ -1329,9 +1333,16 @@ public void testPushdownLimitsPastLeftJoin() { var limit = new Limit(EMPTY, L(10), join); - var optimizedPlan = new PushDownAndCombineLimits().rule(limit, logicalOptimizerCtx); + var optimizedPlan = rule.apply(limit, logicalOptimizerCtx); + + assertEquals( + new Limit(limit.source(), limit.limit(), join.replaceChildren(limit.replaceChild(join.left()), join.right()), true), + optimizedPlan + ); - assertEquals(join.replaceChildren(limit.replaceChild(join.left()), join.right()), optimizedPlan); + var optimizedTwice = rule.apply(optimizedPlan, logicalOptimizerCtx); + // We mustn't create the limit after the JOIN multiple times when the rule is applied multiple times, that'd lead to infinite loops. + assertEquals(optimizedPlan, optimizedTwice); } public void testMultipleCombineLimits() { @@ -1829,10 +1840,9 @@ public void testCombineOrderByThroughFilter() { /** * Expected - * TopN[[Order[first_name{f}#170,ASC,LAST]],1000[INTEGER]] - * \_MvExpand[first_name{f}#170] - * \_TopN[[Order[emp_no{f}#169,ASC,LAST]],1000[INTEGER]] - * \_EsRelation[test][avg_worked_seconds{f}#167, birth_date{f}#168, emp_n..] + * TopN[[Order[first_name{r}#5575,ASC,LAST]],1000[INTEGER]] + * \_MvExpand[first_name{f}#5565,first_name{r}#5575,null] + * \_EsRelation[test][_meta_field{f}#5570, emp_no{f}#5564, first_name{f}#..] */ public void testDontCombineOrderByThroughMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -1844,17 +1854,16 @@ public void testDontCombineOrderByThroughMvExpand() { var topN = as(plan, TopN.class); assertThat(orderNames(topN), contains("first_name")); var mvExpand = as(topN.child(), MvExpand.class); - topN = as(mvExpand.child(), TopN.class); - assertThat(orderNames(topN), contains("emp_no")); - as(topN.child(), EsRelation.class); + as(mvExpand.child(), EsRelation.class); } /** * Expected - * MvExpand[x{r}#4,x{r}#18,1000] - * \_EsqlProject[[first_name{f}#9 AS x]] - * \_Limit[1000[INTEGER]] - * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] + * Limit[1000[INTEGER],true] + * \_MvExpand[x{r}#4,x{r}#19] + * \_EsqlProject[[first_name{f}#9 AS x]] + * \_Limit[1000[INTEGER],false] + * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] */ public void testCopyDefaultLimitPastMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -1864,20 +1873,44 @@ public void testCopyDefaultLimitPastMvExpand() { | mv_expand x """); - var mvExpand = as(plan, MvExpand.class); - assertThat(mvExpand.limit(), equalTo(1000)); + var limit = asLimit(plan, 1000, true); + var mvExpand = as(limit.child(), MvExpand.class); var keep = as(mvExpand.child(), EsqlProject.class); - var limitPastMvExpand = as(keep.child(), Limit.class); - assertThat(limitPastMvExpand.limit().fold(FoldContext.small()), equalTo(1000)); + var limitPastMvExpand = asLimit(keep.child(), 1000, false); as(limitPastMvExpand.child(), EsRelation.class); } /** * Expected - * MvExpand[first_name{f}#7,first_name{r}#16,10] - * \_EsqlProject[[first_name{f}#7, last_name{f}#10]] - * \_Limit[1[INTEGER]] - * \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] + * Limit[1000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] + * |_EsqlProject[[languages{f}#10 AS language_code]] + * | \_Limit[1000[INTEGER],false] + * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] + */ + public void testCopyDefaultLimitPastLookupJoin() { + LogicalPlan plan = optimizedPlan(""" + from test + | rename languages AS language_code + | keep language_code + | lookup join languages_lookup ON language_code + """); + + var limit = asLimit(plan, 1000, true); + var join = as(limit.child(), Join.class); + var keep = as(join.left(), EsqlProject.class); + var limitPastMvExpand = asLimit(keep.child(), 1000, false); + as(limitPastMvExpand.child(), EsRelation.class); + } + + /** + * Expected + * Limit[10[INTEGER],true] + * \_MvExpand[first_name{f}#7,first_name{r}#17] + * \_EsqlProject[[first_name{f}#7, last_name{f}#10]] + * \_Limit[1[INTEGER],false] + * \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] */ public void testDontPushDownLimitPastMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -1885,28 +1918,56 @@ public void testDontPushDownLimitPastMvExpand() { | limit 1 | keep first_name, last_name | mv_expand first_name - | limit 10"""); + | limit 10 + """); - var mvExpand = as(plan, MvExpand.class); - assertThat(mvExpand.limit(), equalTo(10)); + var limit = asLimit(plan, 10, true); + var mvExpand = as(limit.child(), MvExpand.class); var project = as(mvExpand.child(), EsqlProject.class); - var limit = as(project.child(), Limit.class); - assertThat(limit.limit().fold(FoldContext.small()), equalTo(1)); - as(limit.child(), EsRelation.class); + var limit2 = asLimit(project.child(), 1, false); + as(limit2.child(), EsRelation.class); + } + + /** + * Expected + * Limit[10[INTEGER],true] + * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#19]] + * |_EsqlProject[[languages{f}#11 AS language_code, last_name{f}#12]] + * | \_Limit[1[INTEGER],false] + * | \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#19, language_name{f}#20] + */ + public void testDontPushDownLimitPastLookupJoin() { + LogicalPlan plan = optimizedPlan(""" + from test + | limit 1 + | rename languages AS language_code + | keep language_code, last_name + | lookup join languages_lookup on language_code + | limit 10 + """); + + var limit = asLimit(plan, 10, true); + var join = as(limit.child(), Join.class); + var project = as(join.left(), EsqlProject.class); + var limit2 = asLimit(project.child(), 1, false); + as(limit2.child(), EsRelation.class); } /** * Expected - * EsqlProject[[emp_no{f}#19, first_name{r}#29, languages{f}#22, lll{r}#9, salary{r}#30]] - * \_TopN[[Order[salary{r}#30,DESC,FIRST]],5[INTEGER]] - * \_MvExpand[salary{f}#24,salary{r}#30,5] - * \_Eval[[languages{f}#22 + 5[INTEGER] AS lll]] - * \_Limit[5[INTEGER]] - * \_Filter[languages{f}#22 > 1[INTEGER]] - * \_MvExpand[first_name{f}#20,first_name{r}#29,10] - * \_TopN[[Order[emp_no{f}#19,DESC,FIRST]],10[INTEGER]] - * \_Filter[emp_no{f}#19 ≤ 10006[INTEGER]] - * \_EsRelation[test][_meta_field{f}#25, emp_no{f}#19, first_name{f}#20, ..] + * EsqlProject[[emp_no{f}#19, first_name{r}#30, languages{f}#22, lll{r}#9, salary{r}#31]] + * \_TopN[[Order[salary{r}#31,DESC,FIRST]],5[INTEGER]] + * \_Limit[5[INTEGER],true] + * \_MvExpand[salary{f}#24,salary{r}#31] + * \_Eval[[languages{f}#22 + 5[INTEGER] AS lll]] + * \_Limit[5[INTEGER],false] + * \_Filter[languages{f}#22 > 1[INTEGER]] + * \_Limit[10[INTEGER],true] + * \_MvExpand[first_name{f}#20,first_name{r}#30] + * \_TopN[[Order[emp_no{f}#19,DESC,FIRST]],10[INTEGER]] + * \_Filter[emp_no{f}#19 ≤ 10006[INTEGER]] + * \_EsRelation[test][_meta_field{f}#25, emp_no{f}#19, first_name{f}#20, ..] */ public void testMultipleMvExpandWithSortAndLimit() { LogicalPlan plan = optimizedPlan(""" @@ -1921,19 +1982,20 @@ public void testMultipleMvExpandWithSortAndLimit() { | limit 5 | sort first_name | keep emp_no, first_name, languages, lll, salary - | sort salary desc"""); + | sort salary desc + """); var keep = as(plan, EsqlProject.class); var topN = as(keep.child(), TopN.class); assertThat(topN.limit().fold(FoldContext.small()), equalTo(5)); assertThat(orderNames(topN), contains("salary")); - var mvExp = as(topN.child(), MvExpand.class); - assertThat(mvExp.limit(), equalTo(5)); + var limit5Before = asLimit(topN.child(), 5, true); + var mvExp = as(limit5Before.child(), MvExpand.class); var eval = as(mvExp.child(), Eval.class); - var limit5 = as(eval.child(), Limit.class); + var limit5 = asLimit(eval.child(), 5, false); var filter = as(limit5.child(), Filter.class); - mvExp = as(filter.child(), MvExpand.class); - assertThat(mvExp.limit(), equalTo(10)); + var limit10Before = asLimit(filter.child(), 10, true); + mvExp = as(limit10Before.child(), MvExpand.class); topN = as(mvExp.child(), TopN.class); assertThat(topN.limit().fold(FoldContext.small()), equalTo(10)); filter = as(topN.child(), Filter.class); @@ -1942,11 +2004,69 @@ public void testMultipleMvExpandWithSortAndLimit() { /** * Expected - * EsqlProject[[emp_no{f}#350, first_name{f}#351, salary{f}#352]] - * \_TopN[[Order[salary{f}#352,ASC,LAST], Order[first_name{f}#351,ASC,LAST]],5[INTEGER]] - * \_MvExpand[first_name{f}#351] - * \_TopN[[Order[emp_no{f}#350,ASC,LAST]],10000[INTEGER]] - * \_EsRelation[employees][emp_no{f}#350, first_name{f}#351, salary{f}#352] + * EsqlProject[[emp_no{f}#24, first_name{f}#25, languages{f}#27, lll{r}#11, salary{f}#29, language_name{f}#38]] + * \_TopN[[Order[salary{f}#29,DESC,FIRST]],5[INTEGER]] + * \_Limit[5[INTEGER],true] + * \_Join[LEFT,[language_code{r}#14],[language_code{r}#14],[language_code{f}#37]] + * |_Project[[_meta_field{f}#30, emp_no{f}#24, first_name{f}#25, gender{f}#26, hire_date{f}#31, job{f}#32, job.raw{f}#33, l + * anguages{f}#27, last_name{f}#28, long_noidx{f}#34, salary{f}#29, language_name{f}#36, lll{r}#11, salary{f}#29 AS language_code]] + * | \_Eval[[languages{f}#27 + 5[INTEGER] AS lll]] + * | \_Limit[5[INTEGER],false] + * | \_Filter[languages{f}#27 > 1[INTEGER]] + * | \_Limit[10[INTEGER],true] + * | \_Join[LEFT,[language_code{r}#6],[language_code{r}#6],[language_code{f}#35]] + * | |_Project[[_meta_field{f}#30, emp_no{f}#24, first_name{f}#25, gender{f}#26, hire_date{f}#31, job{f}#32, + * | | | job.raw{f}#33, languages{f}#27, last_name{f}#28, long_noidx{f}#34, salary{f}#29, + * | | | languages{f}#27 AS language_code]] + * | | \_TopN[[Order[emp_no{f}#24,DESC,FIRST]],10[INTEGER]] + * | | \_Filter[emp_no{f}#24 ≤ 10006[INTEGER]] + * | | \_EsRelation[test][_meta_field{f}#30, emp_no{f}#24, first_name{f}#25, ..] + * | \_EsRelation[languages_lookup][LOOKUP][language_code{f}#35, language_name{f}#36] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#37, language_name{f}#38] + */ + public void testMultipleLookupJoinWithSortAndLimit() { + LogicalPlan plan = optimizedPlan(""" + from test + | where emp_no <= 10006 + | sort emp_no desc + | eval language_code = languages + | lookup join languages_lookup on language_code + | limit 10 + | where languages > 1 + | eval lll = languages + 5 + | eval language_code = salary::integer + | lookup join languages_lookup on language_code + | limit 5 + | sort first_name + | keep emp_no, first_name, languages, lll, salary, language_name + | sort salary desc + """); + + var keep = as(plan, EsqlProject.class); + var topN = as(keep.child(), TopN.class); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(5)); + assertThat(orderNames(topN), contains("salary")); + var limit5Before = asLimit(topN.child(), 5, true); + var join = as(limit5Before.child(), Join.class); + var project = as(join.left(), Project.class); + var eval = as(project.child(), Eval.class); + var limit5 = asLimit(eval.child(), 5, false); + var filter = as(limit5.child(), Filter.class); + var limit10Before = asLimit(filter.child(), 10, true); + join = as(limit10Before.child(), Join.class); + project = as(join.left(), Project.class); + topN = as(project.child(), TopN.class); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(10)); + assertThat(orderNames(topN), contains("emp_no")); + filter = as(topN.child(), Filter.class); + as(filter.child(), EsRelation.class); + } + + /** + * EsqlProject[[emp_no{f}#10, first_name{r}#21, salary{f}#15]] + * \_TopN[[Order[salary{f}#15,ASC,LAST], Order[first_name{r}#21,ASC,LAST]],5[INTEGER]] + * \_MvExpand[first_name{f}#11,first_name{r}#21,null] + * \_EsRelation[test][_meta_field{f}#16, emp_no{f}#10, first_name{f}#11, ..] */ public void testPushDownLimitThroughMultipleSort_AfterMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -1962,20 +2082,16 @@ public void testPushDownLimitThroughMultipleSort_AfterMvExpand() { assertThat(topN.limit().fold(FoldContext.small()), equalTo(5)); assertThat(orderNames(topN), contains("salary", "first_name")); var mvExp = as(topN.child(), MvExpand.class); - topN = as(mvExp.child(), TopN.class); - assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); - assertThat(orderNames(topN), contains("emp_no")); - as(topN.child(), EsRelation.class); + as(mvExp.child(), EsRelation.class); } /** * Expected - * EsqlProject[[emp_no{f}#361, first_name{f}#362, salary{f}#363]] - * \_TopN[[Order[first_name{f}#362,ASC,LAST]],5[INTEGER]] - * \_TopN[[Order[salary{f}#363,ASC,LAST]],5[INTEGER]] - * \_MvExpand[first_name{f}#362] - * \_TopN[[Order[emp_no{f}#361,ASC,LAST]],10000[INTEGER]] - * \_EsRelation[employees][emp_no{f}#361, first_name{f}#362, salary{f}#363] + * EsqlProject[[emp_no{f}#2560, first_name{r}#2571, salary{f}#2565]] + * \_TopN[[Order[first_name{r}#2571,ASC,LAST]],5[INTEGER]] + * \_TopN[[Order[salary{f}#2565,ASC,LAST]],5[INTEGER]] + * \_MvExpand[first_name{f}#2561,first_name{r}#2571,null] + * \_EsRelation[test][_meta_field{f}#2566, emp_no{f}#2560, first_name{f}#..] */ public void testPushDownLimitThroughMultipleSort_AfterMvExpand2() { LogicalPlan plan = optimizedPlan(""" @@ -1995,10 +2111,7 @@ public void testPushDownLimitThroughMultipleSort_AfterMvExpand2() { assertThat(topN.limit().fold(FoldContext.small()), equalTo(5)); assertThat(orderNames(topN), contains("salary")); var mvExp = as(topN.child(), MvExpand.class); - topN = as(mvExp.child(), TopN.class); - assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); - assertThat(orderNames(topN), contains("emp_no")); - as(topN.child(), EsRelation.class); + as(mvExp.child(), EsRelation.class); } /** @@ -2038,12 +2151,13 @@ public void testDontPushDownLimitPastAggregate_AndMvExpand() { * TODO: Push down the filter correctly https://github.com/elastic/elasticsearch/issues/115311 * * Expected - * Limit[5[INTEGER]] - * \_Filter[ISNOTNULL(first_name{r}#22)] - * \_Aggregate[STANDARD,[first_name{r}#22],[MAX(salary{f}#17,true[BOOLEAN]) AS max_s, first_name{r}#22]] - * \_MvExpand[first_name{f}#13,first_name{r}#22,50] - * \_Limit[50[INTEGER]] - * \_EsRelation[test][_meta_field{f}#18, emp_no{f}#12, first_name{f}#13, ..] + * Limit[5[INTEGER],false] + * \_Filter[ISNOTNULL(first_name{r}#23)] + * \_Aggregate[STANDARD,[first_name{r}#23],[MAX(salary{f}#17,true[BOOLEAN]) AS max_s, first_name{r}#23]] + * \_Limit[50[INTEGER],true] + * \_MvExpand[first_name{f}#13,first_name{r}#23] + * \_Limit[50[INTEGER],false] + * \_EsRelation[test][_meta_field{f}#18, emp_no{f}#12, first_name{f}#13, ..] */ public void testPushDown_TheRightLimit_PastMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -2055,14 +2169,48 @@ public void testPushDown_TheRightLimit_PastMvExpand() { | where first_name is not null | limit 5"""); - var limit = as(plan, Limit.class); - assertThat(limit.limit().fold(FoldContext.small()), equalTo(5)); + var limit = asLimit(plan, 5, false); var filter = as(limit.child(), Filter.class); var agg = as(filter.child(), Aggregate.class); - var mvExp = as(agg.child(), MvExpand.class); - assertThat(mvExp.limit(), equalTo(50)); - limit = as(mvExp.child(), Limit.class); - assertThat(limit.limit().fold(FoldContext.small()), equalTo(50)); + var limit50Before = asLimit(agg.child(), 50, true); + var mvExp = as(limit50Before.child(), MvExpand.class); + limit = asLimit(mvExp.child(), 50, false); + as(limit.child(), EsRelation.class); + } + + /** + * TODO: Push down the filter correctly https://github.com/elastic/elasticsearch/issues/115311 + * + * Expected + * Limit[5[INTEGER],false] + * \_Filter[ISNOTNULL(first_name{f}#15)] + * \_Aggregate[STANDARD,[first_name{f}#15],[MAX(salary{f}#19,true[BOOLEAN]) AS max_s, first_name{f}#15]] + * \_Limit[50[INTEGER],true] + * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#25]] + * |_EsqlProject[[_meta_field{f}#20, emp_no{f}#14, first_name{f}#15, gender{f}#16, hire_date{f}#21, job{f}#22, job.raw{f}#23, l + * anguages{f}#17 AS language_code, last_name{f}#18, long_noidx{f}#24, salary{f}#19]] + * | \_Limit[50[INTEGER],false] + * | \_EsRelation[test][_meta_field{f}#20, emp_no{f}#14, first_name{f}#15, ..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#25] + */ + public void testPushDown_TheRightLimit_PastLookupJoin() { + LogicalPlan plan = optimizedPlan(""" + from test + | rename languages as language_code + | lookup join languages_lookup on language_code + | limit 50 + | keep emp_no, first_name, salary + | stats max_s = max(salary) by first_name + | where first_name is not null + | limit 5"""); + + var limit = asLimit(plan, 5, false); + var filter = as(limit.child(), Filter.class); + var agg = as(filter.child(), Aggregate.class); + var limit50Before = asLimit(agg.child(), 50, true); + var join = as(limit50Before.child(), Join.class); + var project = as(join.left(), Project.class); + limit = asLimit(project.child(), 50, false); as(limit.child(), EsRelation.class); } @@ -2072,8 +2220,7 @@ public void testPushDown_TheRightLimit_PastMvExpand() { * \_TopN[[Order[salary{f}#12,ASC,LAST]],5[INTEGER]] * \_Eval[[100[INTEGER] AS b]] * \_MvExpand[first_name{f}#11] - * \_TopN[[Order[first_name{f}#11,ASC,LAST]],10000[INTEGER]] - * \_EsRelation[employees][emp_no{f}#10, first_name{f}#11, salary{f}#12] + * \_EsRelation[employees][emp_no{f}#10, first_name{f}#11, salary{f}#12] */ public void testPushDownLimit_PastEvalAndMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -2091,22 +2238,18 @@ public void testPushDownLimit_PastEvalAndMvExpand() { assertThat(orderNames(topN), contains("salary")); var eval = as(topN.child(), Eval.class); var mvExp = as(eval.child(), MvExpand.class); - topN = as(mvExp.child(), TopN.class); - assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); - assertThat(orderNames(topN), contains("first_name")); - as(topN.child(), EsRelation.class); + as(mvExp.child(), EsRelation.class); } /** * Expected - * EsqlProject[[emp_no{f}#12, first_name{r}#22, salary{f}#17]] - * \_TopN[[Order[salary{f}#17,ASC,LAST], Order[first_name{r}#22,ASC,LAST]],1000[INTEGER]] - * \_Filter[gender{f}#14 == [46][KEYWORD] AND WILDCARDLIKE(first_name{r}#22)] - * \_MvExpand[first_name{f}#13,first_name{r}#22,null] - * \_TopN[[Order[emp_no{f}#12,ASC,LAST]],10000[INTEGER]] - * \_EsRelation[test][_meta_field{f}#18, emp_no{f}#12, first_name{f}#13, ..] - */ - public void testAddDefaultLimit_BeforeMvExpand_WithFilterOnExpandedField_ResultTruncationDefaultSize() { + * EsqlProject[[emp_no{f}#5885, first_name{r}#5896, salary{f}#5890]] + * \_TopN[[Order[salary{f}#5890,ASC,LAST], Order[first_name{r}#5896,ASC,LAST]],1000[INTEGER]] + * \_Filter[gender{f}#5887 == [46][KEYWORD] AND WILDCARDLIKE(first_name{r}#5896)] + * \_MvExpand[first_name{f}#5886,first_name{r}#5896,null] + * \_EsRelation[test][_meta_field{f}#5891, emp_no{f}#5885, first_name{f}#..] + */ + public void testRedundantSort_BeforeMvExpand_WithFilterOnExpandedField_ResultTruncationDefaultSize() { LogicalPlan plan = optimizedPlan(""" from test | sort emp_no @@ -2123,18 +2266,17 @@ public void testAddDefaultLimit_BeforeMvExpand_WithFilterOnExpandedField_ResultT var filter = as(topN.child(), Filter.class); assertThat(filter.condition(), instanceOf(And.class)); var mvExp = as(filter.child(), MvExpand.class); - topN = as(mvExp.child(), TopN.class); // TODO is it correct? Double-check AddDefaultTopN rule - assertThat(orderNames(topN), contains("emp_no")); - as(topN.child(), EsRelation.class); + as(mvExp.child(), EsRelation.class); } /** * Expected * - * MvExpand[first_name{f}#7,first_name{r}#16,10] - * \_TopN[[Order[emp_no{f}#6,DESC,FIRST]],10[INTEGER]] - * \_Filter[emp_no{f}#6 ≤ 10006[INTEGER]] - * \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] + * Limit[10[INTEGER],true] + * \_MvExpand[first_name{f}#7,first_name{r}#17] + * \_TopN[[Order[emp_no{f}#6,DESC,FIRST]],10[INTEGER]] + * \_Filter[emp_no{f}#6 ≤ 10006[INTEGER]] + * \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] */ public void testFilterWithSortBeforeMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -2144,8 +2286,8 @@ public void testFilterWithSortBeforeMvExpand() { | mv_expand first_name | limit 10"""); - var mvExp = as(plan, MvExpand.class); - assertThat(mvExp.limit(), equalTo(10)); + var limit = asLimit(plan, 10, true); + var mvExp = as(limit.child(), MvExpand.class); var topN = as(mvExp.child(), TopN.class); assertThat(topN.limit().fold(FoldContext.small()), equalTo(10)); assertThat(orderNames(topN), contains("emp_no")); @@ -2153,6 +2295,36 @@ public void testFilterWithSortBeforeMvExpand() { as(filter.child(), EsRelation.class); } + /** + * Expected + * Limit[10[INTEGER],true] + * \_Join[LEFT,[language_code{r}#6],[language_code{r}#6],[language_code{f}#19]] + * |_EsqlProject[[_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, gender{f}#10, hire_date{f}#15, job{f}#16, job.raw{f}#17, lan + * guages{f}#11 AS language_code, last_name{f}#12, long_noidx{f}#18, salary{f}#13]] + * | \_TopN[[Order[emp_no{f}#8,DESC,FIRST]],10[INTEGER]] + * | \_Filter[emp_no{f}#8 ≤ 10006[INTEGER]] + * | \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#19, language_name{f}#20] + */ + public void testFilterWithSortBeforeLookupJoin() { + LogicalPlan plan = optimizedPlan(""" + from test + | where emp_no <= 10006 + | sort emp_no desc + | rename languages as language_code + | lookup join languages_lookup on language_code + | limit 10"""); + + var limit = asLimit(plan, 10, true); + var join = as(limit.child(), Join.class); + var project = as(join.left(), Project.class); + var topN = as(project.child(), TopN.class); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(10)); + assertThat(orderNames(topN), contains("emp_no")); + var filter = as(topN.child(), Filter.class); + as(filter.child(), EsRelation.class); + } + /** * Expected * @@ -2177,19 +2349,23 @@ public void testMultiMvExpand_SortDownBelow() { var mvExpand = as(topN.child(), MvExpand.class); var filter = as(mvExpand.child(), Filter.class); mvExpand = as(filter.child(), MvExpand.class); - var topN2 = as(mvExpand.child(), TopN.class); // TODO is it correct? Double-check AddDefaultTopN rule - as(topN2.child(), EsRelation.class); + as(mvExpand.child(), EsRelation.class); } /** * Expected * - * MvExpand[c{r}#7,c{r}#16,10000] - * \_EsqlProject[[c{r}#7, a{r}#3]] - * \_TopN[[Order[a{r}#3,ASC,FIRST]],7300[INTEGER]] - * \_MvExpand[b{r}#5,b{r}#15,7300] - * \_Limit[7300[INTEGER]] - * \_Row[[null[NULL] AS a, 123[INTEGER] AS b, 234[INTEGER] AS c]] + * Limit[10000[INTEGER],true] + * \_MvExpand[c{r}#7,c{r}#16] + * \_EsqlProject[[c{r}#7, a{r}#3]] + * \_TopN[[Order[a{r}#3,ASC,FIRST]],7300[INTEGER]] + * \_Limit[7300[INTEGER],true] + * \_MvExpand[b{r}#5,b{r}#15] + * \_Limit[7300[INTEGER],false] + * \_LocalRelation[[a{r}#3, b{r}#5, c{r}#7],[ConstantNullBlock[positions=1], + * IntVectorBlock[vector=ConstantIntVector[positions=1, value=123]], + * IntVectorBlock[vector=ConstantIntVector[positions=1, value=234]]]] + * */ public void testLimitThenSortBeforeMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -2200,15 +2376,53 @@ public void testLimitThenSortBeforeMvExpand() { | sort a NULLS FIRST | mv_expand c"""); - var mvExpand = as(plan, MvExpand.class); - assertThat(mvExpand.limit(), equalTo(10000)); + var limit10kBefore = asLimit(plan, 10000, true); + var mvExpand = as(limit10kBefore.child(), MvExpand.class); var project = as(mvExpand.child(), EsqlProject.class); var topN = as(project.child(), TopN.class); assertThat(topN.limit().fold(FoldContext.small()), equalTo(7300)); assertThat(orderNames(topN), contains("a")); - mvExpand = as(topN.child(), MvExpand.class); - var limit = as(mvExpand.child(), Limit.class); - assertThat(limit.limit().fold(FoldContext.small()), equalTo(7300)); + var limit7300Before = asLimit(topN.child(), 7300, true); + mvExpand = as(limit7300Before.child(), MvExpand.class); + var limit = asLimit(mvExpand.child(), 7300, false); + as(limit.child(), LocalRelation.class); + } + + /** + * Expects + * Limit[10000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#14],[language_code{r}#14],[language_code{f}#18]] + * |_EsqlProject[[c{r}#7 AS language_code, a{r}#3]] + * | \_TopN[[Order[a{r}#3,ASC,FIRST]],7300[INTEGER]] + * | \_Limit[7300[INTEGER],true] + * | \_Join[LEFT,[language_code{r}#5],[language_code{r}#5],[language_code{f}#16]] + * | |_Limit[7300[INTEGER],false] + * | | \_LocalRelation[[a{r}#3, language_code{r}#5, c{r}#7],[ConstantNullBlock[positions=1], + * IntVectorBlock[vector=ConstantIntVector[positions=1, value=123]], + * IntVectorBlock[vector=ConstantIntVector[positions=1, value=234]]]] + * | \_EsRelation[languages_lookup][LOOKUP][language_code{f}#16] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] + */ + public void testLimitThenSortBeforeLookupJoin() { + LogicalPlan plan = optimizedPlan(""" + row a = null, language_code = 123, c = 234 + | lookup join languages_lookup on language_code + | limit 7300 + | keep c, a + | sort a NULLS FIRST + | rename c as language_code + | lookup join languages_lookup on language_code + """); + + var limit10kBefore = asLimit(plan, 10000, true); + var join = as(limit10kBefore.child(), Join.class); + var project = as(join.left(), EsqlProject.class); + var topN = as(project.child(), TopN.class); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(7300)); + assertThat(orderNames(topN), contains("a")); + var limit7300Before = asLimit(topN.child(), 7300, true); + join = as(limit7300Before.child(), Join.class); + var limit = asLimit(join.left(), 7300, false); as(limit.child(), LocalRelation.class); } @@ -2230,20 +2444,18 @@ public void testRemoveUnusedSortBeforeMvExpand_DefaultLimit10000() { assertThat(orderNames(topN), contains("first_name")); assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); var mvExpand = as(topN.child(), MvExpand.class); - var topN2 = as(mvExpand.child(), TopN.class); // TODO is it correct? Double-check AddDefaultTopN rule - as(topN2.child(), EsRelation.class); + as(mvExpand.child(), EsRelation.class); } /** * Expected - * EsqlProject[[emp_no{f}#104, first_name{f}#105, salary{f}#106]] - * \_TopN[[Order[salary{f}#106,ASC,LAST], Order[first_name{f}#105,ASC,LAST]],15[INTEGER]] - * \_Filter[gender{f}#215 == [46][KEYWORD] AND WILDCARDLIKE(first_name{f}#105)] - * \_MvExpand[first_name{f}#105] - * \_TopN[[Order[emp_no{f}#104,ASC,LAST]],10000[INTEGER]] - * \_EsRelation[employees][emp_no{f}#104, first_name{f}#105, salary{f}#106] - */ - public void testAddDefaultLimit_BeforeMvExpand_WithFilterOnExpandedField() { + * EsqlProject[[emp_no{f}#3517, first_name{r}#3528, salary{f}#3522]] + * \_TopN[[Order[salary{f}#3522,ASC,LAST], Order[first_name{r}#3528,ASC,LAST]],15[INTEGER]] + * \_Filter[gender{f}#3519 == [46][KEYWORD] AND WILDCARDLIKE(first_name{r}#3528)] + * \_MvExpand[first_name{f}#3518,first_name{r}#3528,null] + * \_EsRelation[test][_meta_field{f}#3523, emp_no{f}#3517, first_name{f}#..] + */ + public void testRedundantSort_BeforeMvExpand_WithFilterOnExpandedField() { LogicalPlan plan = optimizedPlan(""" from test | sort emp_no @@ -2261,24 +2473,18 @@ public void testAddDefaultLimit_BeforeMvExpand_WithFilterOnExpandedField() { var filter = as(topN.child(), Filter.class); assertThat(filter.condition(), instanceOf(And.class)); var mvExp = as(filter.child(), MvExpand.class); - topN = as(mvExp.child(), TopN.class); - // the filter acts on first_name (the one used in mv_expand), so the limit 15 is not pushed down past mv_expand - // instead the default limit is added - assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); - assertThat(orderNames(topN), contains("emp_no")); - as(topN.child(), EsRelation.class); + as(mvExp.child(), EsRelation.class); } /** * Expected - * EsqlProject[[emp_no{f}#104, first_name{f}#105, salary{f}#106]] - * \_TopN[[Order[salary{f}#106,ASC,LAST], Order[first_name{f}#105,ASC,LAST]],15[INTEGER]] - * \_Filter[gender{f}#215 == [46][KEYWORD] AND salary{f}#106 > 60000[INTEGER]] - * \_MvExpand[first_name{f}#105] - * \_TopN[[Order[emp_no{f}#104,ASC,LAST]],10000[INTEGER]] - * \_EsRelation[employees][emp_no{f}#104, first_name{f}#105, salary{f}#106] - */ - public void testAddDefaultLimit_BeforeMvExpand_WithFilter_NOT_OnExpandedField() { + * EsqlProject[[emp_no{f}#3421, first_name{r}#3432, salary{f}#3426]] + * \_TopN[[Order[salary{f}#3426,ASC,LAST], Order[first_name{r}#3432,ASC,LAST]],15[INTEGER]] + * \_Filter[gender{f}#3423 == [46][KEYWORD] AND salary{f}#3426 > 60000[INTEGER]] + * \_MvExpand[first_name{f}#3422,first_name{r}#3432,null] + * \_EsRelation[test][_meta_field{f}#3427, emp_no{f}#3421, first_name{f}#..] + */ + public void testRedundantSort_BeforeMvExpand_WithFilter_NOT_OnExpandedField() { LogicalPlan plan = optimizedPlan(""" from test | sort emp_no @@ -2296,24 +2502,18 @@ public void testAddDefaultLimit_BeforeMvExpand_WithFilter_NOT_OnExpandedField() var filter = as(topN.child(), Filter.class); assertThat(filter.condition(), instanceOf(And.class)); var mvExp = as(filter.child(), MvExpand.class); - topN = as(mvExp.child(), TopN.class); - // the filters after mv_expand do not act on the expanded field values, as such the limit 15 is the one being pushed down - // otherwise that limit wouldn't have pushed down and the default limit was instead being added by default before mv_expanded - assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); - assertThat(orderNames(topN), contains("emp_no")); - as(topN.child(), EsRelation.class); + as(mvExp.child(), EsRelation.class); } /** * Expected - * EsqlProject[[emp_no{f}#116, first_name{f}#117 AS x, salary{f}#119]] - * \_TopN[[Order[salary{f}#119,ASC,LAST], Order[first_name{f}#117,ASC,LAST]],15[INTEGER]] - * \_Filter[gender{f}#118 == [46][KEYWORD] AND WILDCARDLIKE(first_name{f}#117)] - * \_MvExpand[first_name{f}#117] - * \_TopN[[Order[gender{f}#118,ASC,LAST]],10000[INTEGER]] - * \_EsRelation[employees][emp_no{f}#116, first_name{f}#117, gender{f}#118, sa..] - */ - public void testAddDefaultLimit_BeforeMvExpand_WithFilterOnExpandedFieldAlias() { + * EsqlProject[[emp_no{f}#2085, first_name{r}#2096 AS x, salary{f}#2090]] + * \_TopN[[Order[salary{f}#2090,ASC,LAST], Order[first_name{r}#2096,ASC,LAST]],15[INTEGER]] + * \_Filter[gender{f}#2087 == [46][KEYWORD] AND WILDCARDLIKE(first_name{r}#2096)] + * \_MvExpand[first_name{f}#2086,first_name{r}#2096,null] + * \_EsRelation[test][_meta_field{f}#2091, emp_no{f}#2085, first_name{f}#..] + */ + public void testRedundantSort_BeforeMvExpand_WithFilterOnExpandedFieldAlias() { LogicalPlan plan = optimizedPlan(""" from test | sort gender @@ -2332,36 +2532,56 @@ public void testAddDefaultLimit_BeforeMvExpand_WithFilterOnExpandedFieldAlias() var filter = as(topN.child(), Filter.class); assertThat(filter.condition(), instanceOf(And.class)); var mvExp = as(filter.child(), MvExpand.class); - topN = as(mvExp.child(), TopN.class); - // the filter uses an alias ("x") to the expanded field ("first_name"), so the default limit is used and not the one provided - assertThat(topN.limit().fold(FoldContext.small()), equalTo(10000)); - assertThat(orderNames(topN), contains("gender")); - as(topN.child(), EsRelation.class); + as(mvExp.child(), EsRelation.class); } /** * Expected: - * MvExpand[a{r}#1402,a{r}#1406,1000] - * \_TopN[[Order[a{r}#1402,ASC,LAST]],1000[INTEGER]] - * \_Row[[1[INTEGER] AS a]] + * Limit[1000[INTEGER],true] + * \_MvExpand[a{r}#3,a{r}#7] + * \_TopN[[Order[a{r}#3,ASC,LAST]],1000[INTEGER]] + * \_LocalRelation[[a{r}#3],[IntVectorBlock[vector=ConstantIntVector[positions=1, value=1]]]] */ public void testSortMvExpand() { LogicalPlan plan = optimizedPlan(""" row a = 1 | sort a - | mv_expand a"""); + | mv_expand a + """); - var expand = as(plan, MvExpand.class); - assertThat(expand.limit(), equalTo(1000)); + var limit = asLimit(plan, 1000, true); + var expand = as(limit.child(), MvExpand.class); var topN = as(expand.child(), TopN.class); var row = as(topN.child(), LocalRelation.class); } /** * Expected: - * MvExpand[emp_no{f}#5,emp_no{r}#15,20] - * \_TopN[[Order[emp_no{f}#5,ASC,LAST]],20[INTEGER]] - * \_EsRelation[test][_meta_field{f}#11, emp_no{f}#5, first_name{f}#6, ge..] + * Limit[1000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#3],[language_code{r}#3],[language_code{f}#6]] + * |_TopN[[Order[language_code{r}#3,ASC,LAST]],1000[INTEGER]] + * | \_LocalRelation[[language_code{r}#3],[IntVectorBlock[vector=ConstantIntVector[positions=1, value=1]]]] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#6, language_name{f}#7] + */ + public void testSortLookupJoin() { + LogicalPlan plan = optimizedPlan(""" + row language_code = 1 + | sort language_code + | lookup join languages_lookup on language_code + """); + + var limit = asLimit(plan, 1000, true); + var join = as(limit.child(), Join.class); + var topN = as(join.left(), TopN.class); + var row = as(topN.child(), LocalRelation.class); + } + + /** + * Expected: + * Limit[20[INTEGER],true] + * \_MvExpand[emp_no{f}#5,emp_no{r}#16] + * \_TopN[[Order[emp_no{f}#5,ASC,LAST]],20[INTEGER]] + * \_EsRelation[test][_meta_field{f}#11, emp_no{f}#5, first_name{f}#6, ge..] */ public void testSortMvExpandLimit() { LogicalPlan plan = optimizedPlan(""" @@ -2370,8 +2590,8 @@ public void testSortMvExpandLimit() { | mv_expand emp_no | limit 20"""); - var expand = as(plan, MvExpand.class); - assertThat(expand.limit(), equalTo(20)); + var limit = asLimit(plan, 20, true); + var expand = as(limit.child(), MvExpand.class); var topN = as(expand.child(), TopN.class); assertThat(topN.limit().fold(FoldContext.small()), is(20)); var row = as(topN.child(), EsRelation.class); @@ -2379,9 +2599,37 @@ public void testSortMvExpandLimit() { /** * Expected: - * MvExpand[b{r}#5,b{r}#9,1000] - * \_Limit[1000[INTEGER]] - * \_Row[[1[INTEGER] AS a, -15[INTEGER] AS b]] + * Limit[20[INTEGER],true] + * \_Join[LEFT,[language_code{r}#5],[language_code{r}#5],[language_code{f}#18]] + * |_EsqlProject[[_meta_field{f}#13, emp_no{f}#7 AS language_code, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, jo + * b.raw{f}#16, languages{f}#10, last_name{f}#11, long_noidx{f}#17, salary{f}#12]] + * | \_TopN[[Order[emp_no{f}#7,ASC,LAST]],20[INTEGER]] + * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] + */ + public void testSortLookupJoinLimit() { + LogicalPlan plan = optimizedPlan(""" + from test + | sort emp_no + | rename emp_no as language_code + | lookup join languages_lookup on language_code + | limit 20"""); + + var limit = asLimit(plan, 20, true); + var join = as(limit.child(), Join.class); + var project = as(join.left(), Project.class); + var topN = as(project.child(), TopN.class); + assertThat(topN.limit().fold(FoldContext.small()), is(20)); + var row = as(topN.child(), EsRelation.class); + } + + /** + * Expected: + * Limit[1000[INTEGER],true] + * \_MvExpand[b{r}#5,b{r}#9] + * \_Limit[1000[INTEGER],false] + * \_LocalRelation[[a{r}#3, b{r}#5],[IntVectorBlock[vector=ConstantIntVector[positions=1, value=1]], + * IntVectorBlock[vector=ConstantIntVector[positions=1, value=-15]]]] * * see https://github.com/elastic/elasticsearch/issues/102084 */ @@ -2389,15 +2637,90 @@ public void testWhereMvExpand() { LogicalPlan plan = optimizedPlan(""" row a = 1, b = -15 | where b < 3 - | mv_expand b"""); + | mv_expand b + """); - var expand = as(plan, MvExpand.class); - assertThat(expand.limit(), equalTo(1000)); - var limit2 = as(expand.child(), Limit.class); - assertThat(limit2.limit().fold(FoldContext.small()), is(1000)); + var limit = asLimit(plan, 1000, true); + var expand = as(limit.child(), MvExpand.class); + var limit2 = asLimit(expand.child(), 1000, false); var row = as(limit2.child(), LocalRelation.class); } + /** + * Expected: + * Limit[1000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#5],[language_code{r}#5],[language_code{f}#8]] + * |_Limit[1000[INTEGER],false] + * | \_LocalRelation[[a{r}#3, language_code{r}#5],[IntVectorBlock[vector=ConstantIntVector[positions=1, value=1]], IntVectorBlock[ve + * ctor=ConstantIntVector[positions=1, value=-15]]]] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#8, language_name{f}#9] + */ + public void testWhereLookupJoin() { + LogicalPlan plan = optimizedPlan(""" + row a = 1, language_code = -15 + | where language_code < 3 + | lookup join languages_lookup on language_code + """); + + var limit = asLimit(plan, 1000, true); + var join = as(limit.child(), Join.class); + var limit2 = asLimit(join.left(), 1000, false); + var row = as(limit2.child(), LocalRelation.class); + } + + /** + * Expects + * TopN[[Order[language_code{r}#7,ASC,LAST]],1[INTEGER]] + * \_Limit[1[INTEGER],true] + * \_MvExpand[language_code{r}#3,language_code{r}#7] + * \_Limit[1[INTEGER],false] + * \_LocalRelation[[language_code{r}#3],[IntVectorBlock[vector=ConstantIntVector[positions=1, value=1]]]] + * + * Notice that the `TopN` at the very top has limit 1, not 3! + */ + public void testDescendantLimitMvExpand() { + LogicalPlan plan = optimizedPlan(""" + ROW language_code = 1 + | MV_EXPAND language_code + | LIMIT 1 + | SORT language_code + | LIMIT 3 + """); + + var topn = as(plan, TopN.class); + var limitAfter = asLimit(topn.child(), 1, true); + var mvExpand = as(limitAfter.child(), MvExpand.class); + var limitBefore = asLimit(mvExpand.child(), 1, false); + var localRelation = as(limitBefore.child(), LocalRelation.class); + } + + /** + * Expects + * TopN[[Order[language_code{r}#3,ASC,LAST]],1[INTEGER]] + * \_Limit[1[INTEGER],true] + * \_Join[LEFT,[language_code{r}#3],[language_code{r}#3],[language_code{f}#6]] + * |_Limit[1[INTEGER],false] + * | \_LocalRelation[[language_code{r}#3],[IntVectorBlock[vector=ConstantIntVector[positions=1, value=1]]]] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#6, language_name{f}#7] + * + * Notice that the `TopN` at the very top has limit 1, not 3! + */ + public void testDescendantLimitLookupJoin() { + LogicalPlan plan = optimizedPlan(""" + ROW language_code = 1 + | LOOKUP JOIN languages_lookup ON language_code + | LIMIT 1 + | SORT language_code + | LIMIT 3 + """); + + var topn = as(plan, TopN.class); + var limitAfter = asLimit(topn.child(), 1, true); + var join = as(limitAfter.child(), Join.class); + var limitBefore = asLimit(join.left(), 1, false); + var localRelation = as(limitBefore.child(), LocalRelation.class); + } + private static List orderNames(TopN topN) { return topN.order().stream().map(o -> as(o.child(), NamedExpression.class).name()).toList(); } @@ -4930,7 +5253,17 @@ public void testPlanSanityCheck() throws Exception { assertThat(e.getMessage(), containsString(" optimized incorrectly due to missing references [salary")); } - public void testPlanSanityCheckWithBinaryPlans() throws Exception { + /** + * Expects + * Limit[1000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#17]] + * |_EsqlProject[[_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, gender{f}#8, hire_date{f}#13, job{f}#14, job.raw{f}#15, lang + * uages{f}#9 AS language_code, last_name{f}#10, long_noidx{f}#16, salary{f}#11]] + * | \_Limit[1000[INTEGER],false] + * | \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#17, language_name{f}#18] + */ + public void testPlanSanityCheckWithBinaryPlans() { assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); var plan = optimizedPlan(""" @@ -4939,7 +5272,8 @@ public void testPlanSanityCheckWithBinaryPlans() throws Exception { | LOOKUP JOIN languages_lookup ON language_code """); - var join = as(plan, Join.class); + var upperLimit = asLimit(plan, null, true); + var join = as(upperLimit.child(), Join.class); var joinWithInvalidLeftPlan = join.replaceChildren(join.right(), join.right()); IllegalStateException e = expectThrows(IllegalStateException.class, () -> logicalOptimizer.optimize(joinWithInvalidLeftPlan)); @@ -5995,15 +6329,15 @@ public void testLookupStats() { /** * Filter on join keys should be pushed down * Expects - * Project[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, lang - * uage_code{r}#4, last_name{f}#11, long_noidx{f}#17, salary{f}#12, language_name{f}#19]] - * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] - * |_EsqlProject[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, lang + * + * Limit[1000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] + * |_EsqlProject[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, lang * uages{f}#10 AS language_code, last_name{f}#11, long_noidx{f}#17, salary{f}#12]] - * | \_Limit[1000[INTEGER]] - * | \_Filter[languages{f}#10 > 1[INTEGER]] - * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] - * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] + * | \_Limit[1000[INTEGER],false] + * | \_Filter[languages{f}#10 > 1[INTEGER]] + * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] */ public void testLookupJoinPushDownFilterOnJoinKeyWithRename() { assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); @@ -6016,11 +6350,11 @@ public void testLookupJoinPushDownFilterOnJoinKeyWithRename() { """; var plan = optimizedPlan(query); - var join = as(plan, Join.class); + var upperLimit = asLimit(plan, 1000, true); + var join = as(upperLimit.child(), Join.class); assertThat(join.config().type(), equalTo(JoinTypes.LEFT)); var project = as(join.left(), Project.class); - var limit = as(project.child(), Limit.class); - assertThat(limit.limit().fold(FoldContext.small()), equalTo(1000)); + var limit = asLimit(project.child(), 1000, false); var filter = as(limit.child(), Filter.class); // assert that the rename has been undone var op = as(filter.condition(), GreaterThan.class); @@ -6037,15 +6371,14 @@ public void testLookupJoinPushDownFilterOnJoinKeyWithRename() { /** * Filter on on left side fields (outside the join key) should be pushed down * Expects - * Project[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, lang - * uage_code{r}#4, last_name{f}#11, long_noidx{f}#17, salary{f}#12, language_name{f}#19]] - * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] - * |_EsqlProject[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, lang + * Limit[1000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] + * |_EsqlProject[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, lang * uages{f}#10 AS language_code, last_name{f}#11, long_noidx{f}#17, salary{f}#12]] - * | \_Limit[1000[INTEGER]] - * | \_Filter[emp_no{f}#7 > 1[INTEGER]] - * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] - * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] + * | \_Limit[1000[INTEGER],false] + * | \_Filter[emp_no{f}#7 > 1[INTEGER]] + * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] */ public void testLookupJoinPushDownFilterOnLeftSideField() { assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); @@ -6059,12 +6392,12 @@ public void testLookupJoinPushDownFilterOnLeftSideField() { var plan = optimizedPlan(query); - var join = as(plan, Join.class); + var upperLimit = asLimit(plan, 1000, true); + var join = as(upperLimit.child(), Join.class); assertThat(join.config().type(), equalTo(JoinTypes.LEFT)); var project = as(join.left(), Project.class); - var limit = as(project.child(), Limit.class); - assertThat(limit.limit().fold(FoldContext.small()), equalTo(1000)); + var limit = asLimit(project.child(), 1000, false); var filter = as(limit.child(), Filter.class); var op = as(filter.condition(), GreaterThan.class); var field = as(op.left(), FieldAttribute.class); @@ -6226,14 +6559,16 @@ public void testLookupJoinPushDownDisabledForDisjunctionBetweenLeftAndRightField /** * When dropping lookup fields, the lookup relation shouldn't include them. * At least until we can implement InsertFieldExtract there. + * * Expects - * EsqlProject[[languages{f}#10]] - * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] - * |_Project[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, lang - * uages{f}#10, last_name{f}#11, long_noidx{f}#17, salary{f}#12, languages{f}#10 AS language_code]] - * | \_Limit[1000[INTEGER]] - * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] - * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18] + * EsqlProject[[languages{f}#21]] + * \_Limit[1000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#29]] + * |_Project[[_meta_field{f}#24, emp_no{f}#18, first_name{f}#19, gender{f}#20, hire_date{f}#25, job{f}#26, job.raw{f}#27, l + * anguages{f}#21, last_name{f}#22, long_noidx{f}#28, salary{f}#23, languages{f}#21 AS language_code]] + * | \_Limit[1000[INTEGER],false] + * | \_EsRelation[test][_meta_field{f}#24, emp_no{f}#18, first_name{f}#19, ..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#29] */ public void testLookupJoinKeepNoLookupFields() { assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); @@ -6255,7 +6590,9 @@ public void testLookupJoinKeepNoLookupFields() { assertThat(project.projections().size(), equalTo(1)); assertThat(project.projections().get(0).name(), equalTo("languages")); - var join = as(project.child(), Join.class); + var limit = asLimit(project.child(), 1000, true); + + var join = as(limit.child(), Join.class); var joinRightRelation = as(join.right(), EsRelation.class); assertThat(joinRightRelation.output().size(), equalTo(1)); @@ -6266,13 +6603,15 @@ public void testLookupJoinKeepNoLookupFields() { * Ensure a JOIN shadowed by another JOIN doesn't request the shadowed fields. * * Expected - * Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#20]] - * |_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] - * | |_Eval[[languages{f}#10 AS language_code]] - * | | \_Limit[1000[INTEGER]] - * | | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] - * | \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18] - * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#20, language_name{f}#21] + * Limit[1000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#20]] + * |_Limit[1000[INTEGER],true] + * | \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] + * | |_Eval[[languages{f}#10 AS language_code]] + * | | \_Limit[1000[INTEGER],false] + * | | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] + * | \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#20, language_name{f}#21] */ public void testMultipleLookupShadowing() { assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); @@ -6286,18 +6625,25 @@ public void testMultipleLookupShadowing() { var plan = optimizedPlan(query); - var finalJoin = as(plan, Join.class); + var limit1 = asLimit(plan, 1000, true); + + var finalJoin = as(limit1.child(), Join.class); var finalJoinRightRelation = as(finalJoin.right(), EsRelation.class); assertThat(finalJoinRightRelation.output().size(), equalTo(2)); assertThat(finalJoinRightRelation.output().get(0).name(), equalTo("language_code")); assertThat(finalJoinRightRelation.output().get(1).name(), equalTo("language_name")); - var initialJoin = as(finalJoin.left(), Join.class); + var limit2 = asLimit(finalJoin.left(), 1000, true); + + var initialJoin = as(limit2.child(), Join.class); var initialJoinRightRelation = as(initialJoin.right(), EsRelation.class); assertThat(initialJoinRightRelation.output().size(), equalTo(1)); assertThat(initialJoinRightRelation.output().get(0).name(), equalTo("language_code")); + + var eval = as(initialJoin.left(), Eval.class); + var limit3 = asLimit(eval.child(), 1000, false); } // @@ -6857,38 +7203,6 @@ public void testToDatePeriodToTimeDurationWithField() { assertEquals("1:60: argument of [to_timeduration(x)] must be a constant, received [x]", e.getMessage().substring(header.length())); } - // These should pass eventually once we lift some restrictions on match function - public void testMatchWithNonIndexedColumnCurrentlyUnsupported() { - final String header = "Found 1 problem\nline "; - VerificationException e = expectThrows(VerificationException.class, () -> plan(""" - from test | eval initial = substring(first_name, 1) | where match(initial, "A")""")); - assertTrue(e.getMessage().startsWith("Found ")); - assertEquals( - "1:67: [MATCH] function cannot operate on [initial], which is not a field from an index mapping", - e.getMessage().substring(header.length()) - ); - - e = expectThrows(VerificationException.class, () -> plan(""" - from test | eval text=concat(first_name, last_name) | where match(text, "cat")""")); - assertTrue(e.getMessage().startsWith("Found ")); - assertEquals( - "1:67: [MATCH] function cannot operate on [text], which is not a field from an index mapping", - e.getMessage().substring(header.length()) - ); - } - - public void testMatchFunctionIsNotNullable() { - String queryText = """ - row n = null | eval text = n + 5 | where match(text::keyword, "Anna") - """; - - VerificationException ve = expectThrows(VerificationException.class, () -> plan(queryText)); - assertThat( - ve.getMessage(), - containsString("[MATCH] function cannot operate on [text::keyword], which is not a field from an index mapping") - ); - } - public void testWhereNull() { var plan = plan(""" from test @@ -6919,4 +7233,349 @@ public void testFunctionNamedParamsAsFunctionArgument() { assertEquals(new Literal(EMPTY, 2.0, DataType.DOUBLE), ee.value()); assertEquals(DataType.DOUBLE, ee.dataType()); } + + /** + * TopN[[Order[emp_no{f}#11,ASC,LAST]],1000[INTEGER]] + * \_Join[LEFT,[language_code{r}#5],[language_code{r}#5],[language_code{f}#22]] + * |_EsqlProject[[_meta_field{f}#17, emp_no{f}#11, first_name{f}#12, gender{f}#13, hire_date{f}#18, job{f}#19, job.raw{f}#20, l + * anguages{f}#14 AS language_code, last_name{f}#15, long_noidx{f}#21, salary{f}#16, foo{r}#7]] + * | \_Eval[[[62 61 72][KEYWORD] AS foo]] + * | \_Filter[languages{f}#14 > 1[INTEGER]] + * | \_EsRelation[test][_meta_field{f}#17, emp_no{f}#11, first_name{f}#12, ..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#22, language_name{f}#23] + */ + public void testRedundantSortOnJoin() { + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); + + var plan = optimizedPlan(""" + FROM test + | SORT languages + | RENAME languages AS language_code + | EVAL foo = "bar" + | LOOKUP JOIN languages_lookup ON language_code + | WHERE language_code > 1 + | SORT emp_no + """); + + var topN = as(plan, TopN.class); + var join = as(topN.child(), Join.class); + var project = as(join.left(), EsqlProject.class); + var eval = as(project.child(), Eval.class); + var filter = as(eval.child(), Filter.class); + as(filter.child(), EsRelation.class); + } + + /** + * TopN[[Order[emp_no{f}#9,ASC,LAST]],1000[INTEGER]] + * \_Filter[emp_no{f}#9 > 1[INTEGER]] + * \_MvExpand[languages{f}#12,languages{r}#20,null] + * \_Eval[[[62 61 72][KEYWORD] AS foo]] + * \_EsRelation[test][_meta_field{f}#15, emp_no{f}#9, first_name{f}#10, g..] + */ + public void testRedundantSortOnMvExpand() { + var plan = optimizedPlan(""" + FROM test + | SORT languages + | EVAL foo = "bar" + | MV_EXPAND languages + | WHERE emp_no > 1 + | SORT emp_no + """); + + var topN = as(plan, TopN.class); + var filter = as(topN.child(), Filter.class); + var mvExpand = as(filter.child(), MvExpand.class); + var eval = as(mvExpand.child(), Eval.class); + as(eval.child(), EsRelation.class); + } + + /** + * TopN[[Order[emp_no{f}#11,ASC,LAST]],1000[INTEGER]] + * \_Join[LEFT,[language_code{r}#5],[language_code{r}#5],[language_code{f}#22]] + * |_Filter[emp_no{f}#11 > 1[INTEGER]] + * | \_MvExpand[languages{f}#14,languages{r}#24,null] + * | \_Eval[[languages{f}#14 AS language_code]] + * | \_EsRelation[test][_meta_field{f}#17, emp_no{f}#11, first_name{f}#12, ..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#22, language_name{f}#23] + */ + public void testRedundantSortOnMvExpandAndJoin() { + var plan = optimizedPlan(""" + FROM test + | SORT languages + | EVAL language_code = languages + | MV_EXPAND languages + | WHERE emp_no > 1 + | LOOKUP JOIN languages_lookup ON language_code + | SORT emp_no + """); + + var topN = as(plan, TopN.class); + var join = as(topN.child(), Join.class); + var filter = as(join.left(), Filter.class); + var mvExpand = as(filter.child(), MvExpand.class); + var eval = as(mvExpand.child(), Eval.class); + as(eval.child(), EsRelation.class); + } + + /** + * TopN[[Order[emp_no{f}#12,ASC,LAST]],1000[INTEGER]] + * \_Join[LEFT,[language_code{r}#5],[language_code{r}#5],[language_code{f}#23]] + * |_Filter[emp_no{f}#12 > 1[INTEGER]] + * | \_MvExpand[languages{f}#15,languages{r}#25,null] + * | \_Eval[[languages{f}#15 AS language_code]] + * | \_EsRelation[test][_meta_field{f}#18, emp_no{f}#12, first_name{f}#13, ..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#23, language_name{f}#24] + */ + public void testMultlipleRedundantSortOnMvExpandAndJoin() { + var plan = optimizedPlan(""" + FROM test + | SORT first_name + | EVAL language_code = languages + | MV_EXPAND languages + | sort last_name + | WHERE emp_no > 1 + | LOOKUP JOIN languages_lookup ON language_code + | SORT emp_no + """); + + var topN = as(plan, TopN.class); + var join = as(topN.child(), Join.class); + var filter = as(join.left(), Filter.class); + var mvExpand = as(filter.child(), MvExpand.class); + var eval = as(mvExpand.child(), Eval.class); + as(eval.child(), EsRelation.class); + } + + /** + * TopN[[Order[emp_no{f}#16,ASC,LAST]],1000[INTEGER]] + * \_Filter[emp_no{f}#16 > 1[INTEGER]] + * \_MvExpand[languages{f}#19,languages{r}#31] + * \_Dissect[foo{r}#5,Parser[pattern=%{z}, appendSeparator=, parser=org.elasticsearch.dissect.DissectParser@26f2cab],[z{r}#10 + * ]] + * \_Grok[foo{r}#5,Parser[pattern=%{WORD:y}, grok=org.elasticsearch.grok.Grok@6ea44ccd],[y{r}#9]] + * \_Enrich[ANY,[6c 61 6e 67 75 61 67 65 73 5f 69 64 78][KEYWORD],foo{r}#5,{"match":{"indices":[],"match_field":"id","enrich_ + * fields":["language_code","language_name"]}},{=languages_idx},[language_code{r}#29, language_name{r}#30]] + * \_Eval[[TOSTRING(languages{f}#19) AS foo]] + * \_EsRelation[test][_meta_field{f}#22, emp_no{f}#16, first_name{f}#17, ..] + */ + public void testRedundantSortOnMvExpandEnrichGrokDissect() { + var plan = optimizedPlan(""" + FROM test + | SORT languages + | EVAL foo = to_string(languages) + | ENRICH languages_idx on foo + | GROK foo "%{WORD:y}" + | DISSECT foo "%{z}" + | MV_EXPAND languages + | WHERE emp_no > 1 + | SORT emp_no + """); + + var topN = as(plan, TopN.class); + var filter = as(topN.child(), Filter.class); + var mvExpand = as(filter.child(), MvExpand.class); + var dissect = as(mvExpand.child(), Dissect.class); + var grok = as(dissect.child(), Grok.class); + var enrich = as(grok.child(), Enrich.class); + var eval = as(enrich.child(), Eval.class); + as(eval.child(), EsRelation.class); + } + + /** + * TopN[[Order[emp_no{f}#20,ASC,LAST]],1000[INTEGER]] + * \_Filter[emp_no{f}#20 > 1[INTEGER]] + * \_MvExpand[languages{f}#23,languages{r}#37] + * \_Dissect[foo{r}#5,Parser[pattern=%{z}, appendSeparator=, parser=org.elasticsearch.dissect.DissectParser@3e922db0],[z{r}#1 + * 4]] + * \_Grok[foo{r}#5,Parser[pattern=%{WORD:y}, grok=org.elasticsearch.grok.Grok@4d6ad024],[y{r}#13]] + * \_Enrich[ANY,[6c 61 6e 67 75 61 67 65 73 5f 69 64 78][KEYWORD],foo{r}#5,{"match":{"indices":[],"match_field":"id","enrich_ + * fields":["language_code","language_name"]}},{=languages_idx},[language_code{r}#35, language_name{r}#36]] + * \_Join[LEFT,[language_code{r}#8],[language_code{r}#8],[language_code{f}#31]] + * |_Eval[[TOSTRING(languages{f}#23) AS foo, languages{f}#23 AS language_code]] + * | \_EsRelation[test][_meta_field{f}#26, emp_no{f}#20, first_name{f}#21, ..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#31] + */ + public void testRedundantSortOnMvExpandJoinEnrichGrokDissect() { + var plan = optimizedPlan(""" + FROM test + | SORT languages + | EVAL foo = to_string(languages), language_code = languages + | LOOKUP JOIN languages_lookup ON language_code + | ENRICH languages_idx on foo + | GROK foo "%{WORD:y}" + | DISSECT foo "%{z}" + | MV_EXPAND languages + | WHERE emp_no > 1 + | SORT emp_no + """); + + var topN = as(plan, TopN.class); + var filter = as(topN.child(), Filter.class); + var mvExpand = as(filter.child(), MvExpand.class); + var dissect = as(mvExpand.child(), Dissect.class); + var grok = as(dissect.child(), Grok.class); + var enrich = as(grok.child(), Enrich.class); + var join = as(enrich.child(), Join.class); + var eval = as(join.left(), Eval.class); + as(eval.child(), EsRelation.class); + } + + /** + * TopN[[Order[emp_no{f}#23,ASC,LAST]],1000[INTEGER]] + * \_Filter[emp_no{f}#23 > 1[INTEGER]] + * \_MvExpand[languages{f}#26,languages{r}#36] + * \_EsqlProject[[language_name{f}#35, foo{r}#5 AS bar, languages{f}#26, emp_no{f}#23]] + * \_Join[LEFT,[language_code{r}#8],[language_code{r}#8],[language_code{f}#34]] + * |_Project[[_meta_field{f}#29, emp_no{f}#23, first_name{f}#24, gender{f}#25, hire_date{f}#30, job{f}#31, job.raw{f}#32, l + * anguages{f}#26, last_name{f}#27, long_noidx{f}#33, salary{f}#28, foo{r}#5, languages{f}#26 AS language_code]] + * | \_Eval[[TOSTRING(languages{f}#26) AS foo]] + * | \_EsRelation[test][_meta_field{f}#29, emp_no{f}#23, first_name{f}#24, ..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#34, language_name{f}#35] + */ + public void testRedundantSortOnMvExpandJoinKeepDropRename() { + var plan = optimizedPlan(""" + FROM test + | SORT languages + | EVAL foo = to_string(languages), language_code = languages + | LOOKUP JOIN languages_lookup ON language_code + | KEEP language_name, language_code, foo, languages, emp_no + | DROP language_code + | RENAME foo AS bar + | MV_EXPAND languages + | WHERE emp_no > 1 + | SORT emp_no + """); + + var topN = as(plan, TopN.class); + var filter = as(topN.child(), Filter.class); + var mvExpand = as(filter.child(), MvExpand.class); + var project = as(mvExpand.child(), Project.class); + var join = as(project.child(), Join.class); + var project2 = as(join.left(), Project.class); + var eval = as(project2.child(), Eval.class); + as(eval.child(), EsRelation.class); + } + + /** + * TopN[[Order[emp_no{f}#15,ASC,LAST]],1000[INTEGER]] + * \_Filter[emp_no{f}#15 > 1[INTEGER]] + * \_MvExpand[foo{r}#10,foo{r}#29] + * \_Eval[[CONCAT(language_name{r}#28,[66 6f 6f][KEYWORD]) AS foo]] + * \_MvExpand[language_name{f}#27,language_name{r}#28] + * \_Join[LEFT,[language_code{r}#3],[language_code{r}#3],[language_code{f}#26]] + * |_Eval[[1[INTEGER] AS language_code]] + * | \_EsRelation[test][_meta_field{f}#21, emp_no{f}#15, first_name{f}#16, ..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#26, language_name{f}#27] + */ + public void testEvalLookupMultipleSorts() { + var plan = optimizedPlan(""" + FROM test + | EVAL language_code = 1 + | LOOKUP JOIN languages_lookup ON language_code + | SORT language_name + | MV_EXPAND language_name + | EVAL foo = concat(language_name, "foo") + | MV_EXPAND foo + | WHERE emp_no > 1 + | SORT emp_no + """); + + var topN = as(plan, TopN.class); + var filter = as(topN.child(), Filter.class); + var mvExpand = as(filter.child(), MvExpand.class); + var eval = as(mvExpand.child(), Eval.class); + mvExpand = as(eval.child(), MvExpand.class); + var join = as(mvExpand.child(), Join.class); + eval = as(join.left(), Eval.class); + as(eval.child(), EsRelation.class); + + } + + public void testUnboundedSortSimple() { + var query = """ + ROW x = [1,2,3], y = 1 + | SORT y + | MV_EXPAND x + | WHERE x > 2 + """; + + VerificationException e = expectThrows(VerificationException.class, () -> plan(query)); + assertThat(e.getMessage(), containsString("line 2:5: Unbounded sort not supported yet [SORT y] please add a limit")); + } + + public void testUnboundedSortJoin() { + var query = """ + ROW x = [1,2,3], y = 2, language_code = 1 + | SORT y + | LOOKUP JOIN languages_lookup ON language_code + | WHERE language_name == "foo" + """; + + VerificationException e = expectThrows(VerificationException.class, () -> plan(query)); + assertThat(e.getMessage(), containsString("line 2:5: Unbounded sort not supported yet [SORT y] please add a limit")); + } + + public void testUnboundedSortWithMvExpandAndFilter() { + var query = """ + FROM test + | EVAL language_code = 1 + | LOOKUP JOIN languages_lookup ON language_code + | SORT language_name + | EVAL foo = concat(language_name, "foo") + | MV_EXPAND foo + | WHERE foo == "foo" + """; + + VerificationException e = expectThrows(VerificationException.class, () -> plan(query)); + assertThat(e.getMessage(), containsString("line 4:3: Unbounded sort not supported yet [SORT language_name] please add a limit")); + } + + public void testUnboundedSortWithLookupJoinAndFilter() { + var query = """ + FROM test + | EVAL language_code = 1 + | EVAL foo = concat(language_code::string, "foo") + | MV_EXPAND foo + | SORT foo + | LOOKUP JOIN languages_lookup ON language_code + | WHERE language_name == "foo" + """; + + VerificationException e = expectThrows(VerificationException.class, () -> plan(query)); + assertThat(e.getMessage(), containsString("line 5:3: Unbounded sort not supported yet [SORT foo] please add a limit")); + } + + public void testUnboundedSortExpandFilter() { + var query = """ + ROW x = [1,2,3], y = 1 + | SORT x + | MV_EXPAND x + | WHERE x > 2 + """; + + VerificationException e = expectThrows(VerificationException.class, () -> plan(query)); + assertThat(e.getMessage(), containsString("line 2:5: Unbounded sort not supported yet [SORT x] please add a limit")); + } + + public void testPruneRedundantOrderBy() { + var rule = new PruneRedundantOrderBy(); + + var query = """ + row x = [1,2,3], y = 1 + | sort x + | mv_expand x + | sort x + | mv_expand x + | sort y + """; + LogicalPlan analyzed = analyzer.analyze(parser.createStatement(query)); + LogicalPlan optimized = rule.apply(analyzed); + + // check that all the redundant SORTs are removed in a single run + var limit = as(optimized, Limit.class); + var orderBy = as(limit.child(), OrderBy.class); + var mvExpand = as(orderBy.child(), MvExpand.class); + var mvExpand2 = as(mvExpand.child(), MvExpand.class); + as(mvExpand2.child(), Row.class); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 78aaf1f354723..55448b7ceaf49 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -7596,7 +7596,7 @@ private LocalExecutionPlanner.LocalExecutionPlan physicalOperationsFromPhysicalP TestBlockFactory.getNonBreakingInstance(), Settings.EMPTY, config, - new ExchangeSourceHandler(10, null, null)::createExchangeSource, + new ExchangeSourceHandler(10, null)::createExchangeSource, () -> exchangeSinkHandler.createExchangeSink(() -> {}), null, null, @@ -7604,7 +7604,7 @@ private LocalExecutionPlanner.LocalExecutionPlan physicalOperationsFromPhysicalP List.of() ); - return planner.plan(FoldContext.small(), plan); + return planner.plan("test", FoldContext.small(), plan); } private List> findFieldNamesInLookupJoinDescription(LocalExecutionPlanner.LocalExecutionPlan physicalOperations) { @@ -7631,7 +7631,6 @@ private List> findFieldNamesInLookupJoinDescription(LocalExecutionPl } public void testScore() { - assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); var plan = physicalPlan(""" from test metadata _score | where match(first_name, "john") @@ -7658,7 +7657,6 @@ public void testScore() { } public void testScoreTopN() { - assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); var plan = physicalPlan(""" from test metadata _score | where match(first_name, "john") diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/GrammarInDevelopmentParsingTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/GrammarInDevelopmentParsingTests.java index 18d8bc9fb0a75..2ca1d8c4d1288 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/GrammarInDevelopmentParsingTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/GrammarInDevelopmentParsingTests.java @@ -19,7 +19,7 @@ public void testDevelopmentInline() throws Exception { } public void testDevelopmentLookup() throws Exception { - parse("row a = 1 | lookup \"foo\" on j", "lookup"); + parse("row a = 1 | lookup_\uD83D\uDC14 \"foo\" on j", "lookup_\uD83D\uDC14"); } public void testDevelopmentMetrics() throws Exception { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index dcc549057b77a..47e1616060bd5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -398,8 +398,11 @@ public void testStatsWithoutGroupKeyMixedAggAndFilter() { public void testInlineStatsWithGroups() { var query = "inlinestats b = min(a) by c, d.e"; if (Build.current().isSnapshot() == false) { - var e = expectThrows(ParsingException.class, () -> processingCommand(query)); - assertThat(e.getMessage(), containsString("line 1:13: mismatched input 'inlinestats' expecting {")); + expectThrows( + ParsingException.class, + containsString("line 1:13: mismatched input 'inlinestats' expecting {"), + () -> processingCommand(query) + ); return; } assertEquals( @@ -424,8 +427,11 @@ public void testInlineStatsWithGroups() { public void testInlineStatsWithoutGroups() { var query = "inlinestats min(a), c = 1"; if (Build.current().isSnapshot() == false) { - var e = expectThrows(ParsingException.class, () -> processingCommand(query)); - assertThat(e.getMessage(), containsString("line 1:13: mismatched input 'inlinestats' expecting {")); + expectThrows( + ParsingException.class, + containsString("line 1:13: mismatched input 'inlinestats' expecting {"), + () -> processingCommand(query) + ); return; } assertEquals( @@ -489,7 +495,8 @@ public void testStringAsIndexPattern() { clusterAndIndexAsIndexPattern(command, "cluster:index"); clusterAndIndexAsIndexPattern(command, "cluster:.index"); clusterAndIndexAsIndexPattern(command, "cluster*:index*"); - clusterAndIndexAsIndexPattern(command, "cluster*:*"); + clusterAndIndexAsIndexPattern(command, "cluster*:*");// this is not a valid pattern, * should be inside <> + clusterAndIndexAsIndexPattern(command, "cluster*:"); clusterAndIndexAsIndexPattern(command, "cluster*:*"); clusterAndIndexAsIndexPattern(command, "*:index*"); clusterAndIndexAsIndexPattern(command, "*:*"); @@ -857,16 +864,17 @@ public void testSuggestAvailableSourceCommandsOnParsingError() { Tuple.tuple("a/*hi*/", "a"), Tuple.tuple("explain [ frm a ]", "frm") )) { - ParsingException pe = expectThrows(ParsingException.class, () -> statement(queryWithUnexpectedCmd.v1())); - assertThat( - pe.getMessage(), + expectThrows( + ParsingException.class, allOf( containsString("mismatched input '" + queryWithUnexpectedCmd.v2() + "'"), containsString("'explain'"), containsString("'from'"), containsString("'row'") - ) + ), + () -> statement(queryWithUnexpectedCmd.v1()) ); + } } @@ -881,15 +889,15 @@ public void testSuggestAvailableProcessingCommandsOnParsingError() { Tuple.tuple("from a | a/*hi*/", "a"), Tuple.tuple("explain [ from a | evl b = c ]", "evl") )) { - ParsingException pe = expectThrows(ParsingException.class, () -> statement(queryWithUnexpectedCmd.v1())); - assertThat( - pe.getMessage(), + expectThrows( + ParsingException.class, allOf( containsString("mismatched input '" + queryWithUnexpectedCmd.v2() + "'"), containsString("'eval'"), containsString("'stats'"), containsString("'where'") - ) + ), + () -> statement(queryWithUnexpectedCmd.v1()) ); } } @@ -980,10 +988,10 @@ public void testGrokPattern() { assertEquals("%{WORD:foo}", grok.parser().pattern()); assertEquals(List.of(referenceAttribute("foo", KEYWORD)), grok.extractedFields()); - ParsingException pe = expectThrows(ParsingException.class, () -> statement("row a = \"foo bar\" | grok a \"%{_invalid_:x}\"")); - assertThat( - pe.getMessage(), - containsString("Invalid pattern [%{_invalid_:x}] for grok: Unable to find pattern [_invalid_] in Grok's pattern dictionary") + expectThrows( + ParsingException.class, + containsString("Invalid pattern [%{_invalid_:x}] for grok: Unable to find pattern [_invalid_] in Grok's pattern dictionary"), + () -> statement("row a = \"foo bar\" | grok a \"%{_invalid_:x}\"") ); cmd = processingCommand("grok a \"%{WORD:foo} %{WORD:foo}\""); @@ -1073,15 +1081,28 @@ public void testEnrich() { processingCommand("enrich _" + mode.name() + ":countries ON country_code") ); - expectError("from a | enrich countries on foo* ", "Using wildcards [*] in ENRICH WITH projections is not allowed [foo*]"); - expectError("from a | enrich countries on foo with bar*", "Using wildcards [*] in ENRICH WITH projections is not allowed [bar*]"); + expectError("from a | enrich countries on foo* ", "Using wildcards [*] in ENRICH WITH projections is not allowed, found [foo*]"); + expectError("from a | enrich countries on * ", "Using wildcards [*] in ENRICH WITH projections is not allowed, found [*]"); + expectError( + "from a | enrich countries on foo with bar*", + "Using wildcards [*] in ENRICH WITH projections is not allowed, found [bar*]" + ); + expectError("from a | enrich countries on foo with *", "Using wildcards [*] in ENRICH WITH projections is not allowed, found [*]"); expectError( "from a | enrich countries on foo with x = bar* ", - "Using wildcards [*] in ENRICH WITH projections is not allowed [bar*]" + "Using wildcards [*] in ENRICH WITH projections is not allowed, found [bar*]" + ); + expectError( + "from a | enrich countries on foo with x = * ", + "Using wildcards [*] in ENRICH WITH projections is not allowed, found [*]" ); expectError( "from a | enrich countries on foo with x* = bar ", - "Using wildcards [*] in ENRICH WITH projections is not allowed [x*]" + "Using wildcards [*] in ENRICH WITH projections is not allowed, found [x*]" + ); + expectError( + "from a | enrich countries on foo with * = bar ", + "Using wildcards [*] in ENRICH WITH projections is not allowed, found [*]" ); expectError( "from a | enrich typo:countries on foo", @@ -1109,8 +1130,7 @@ public void testKeepStarMvExpand() { public void testUsageOfProject() { String query = "from test | project foo, bar"; - ParsingException e = expectThrows(ParsingException.class, "Expected syntax error for " + query, () -> statement(query)); - assertThat(e.getMessage(), containsString("mismatched input 'project' expecting")); + expectThrows(ParsingException.class, containsString("mismatched input 'project' expecting"), () -> statement(query)); } public void testInputParams() { @@ -1580,10 +1600,6 @@ public void testIntervalParam() { } public void testParamForIdentifier() { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); // field names can appear in eval/where/stats/sort/keep/drop/rename/dissect/grok/enrich/mvexpand // eval, where assertEquals( @@ -1695,8 +1711,7 @@ public void testParamForIdentifier() { List.of(new Order(EMPTY, attribute("f.11..f.12.*"), Order.OrderDirection.ASC, Order.NullsPosition.LAST)) ), attribute("f.*.13.f.14*"), - attribute("f.*.13.f.14*"), - null + attribute("f.*.13.f.14*") ), statement( """ @@ -1842,10 +1857,6 @@ public void testParamForIdentifier() { } public void testParamForIdentifierPattern() { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); // name patterns can appear in keep and drop // all patterns LogicalPlan plan = statement( @@ -1935,10 +1946,6 @@ public void testParamForIdentifierPattern() { } public void testParamInInvalidPosition() { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); // param for pattern is not supported in eval/where/stats/sort/rename/dissect/grok/enrich/mvexpand // where/stats/sort/dissect/grok are covered in RestEsqlTestCase List invalidParamPositions = List.of("eval ?f1 = 1", "stats x = ?f1(*)", "mv_expand ?f1", "rename ?f1 as ?f2"); @@ -1979,7 +1986,7 @@ public void testParamInInvalidPosition() { expectError( "from idx1 | " + enrich, List.of(paramAsPattern("f1", pattern), paramAsIdentifier("f2", "f.2"), paramAsIdentifier("f3", "f.3*")), - "Using wildcards [*] in ENRICH WITH projections is not allowed [" + pattern + "]" + "Using wildcards [*] in ENRICH WITH projections is not allowed, found [" + pattern + "]" ); expectError( "from idx1 | " + enrich, @@ -1990,10 +1997,6 @@ public void testParamInInvalidPosition() { } public void testMissingParam() { - assumeTrue( - "named parameters for identifiers and patterns require snapshot build", - EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() - ); // cover all processing commands eval/where/stats/sort/rename/dissect/grok/enrich/mvexpand/keep/drop String error = "Unknown query parameter [f1], did you mean [f4]?"; String errorMvExpandFunctionNameCommandOption = "Query parameter [?f1] is null or undefined, cannot be used as an identifier"; @@ -2046,8 +2049,7 @@ public void testQuotedName() { private void assertStringAsIndexPattern(String string, String statement) { if (Build.current().isSnapshot() == false && statement.contains("METRIC")) { - var e = expectThrows(ParsingException.class, () -> statement(statement)); - assertThat(e.getMessage(), containsString("mismatched input 'METRICS' expecting {")); + expectThrows(ParsingException.class, containsString("mismatched input 'METRICS' expecting {"), () -> statement(statement)); return; } LogicalPlan from = statement(statement); @@ -2058,8 +2060,11 @@ private void assertStringAsIndexPattern(String string, String statement) { private void assertStringAsLookupIndexPattern(String string, String statement) { if (Build.current().isSnapshot() == false) { - var e = expectThrows(ParsingException.class, () -> statement(statement)); - assertThat(e.getMessage(), containsString("line 1:14: LOOKUP_🐔 is in preview and only available in SNAPSHOT build")); + expectThrows( + ParsingException.class, + containsString("line 1:14: LOOKUP_🐔 is in preview and only available in SNAPSHOT build"), + () -> statement(statement) + ); return; } var plan = statement(statement); @@ -2126,8 +2131,11 @@ public void testInlineConvertWithNonexistentType() { public void testLookup() { String query = "ROW a = 1 | LOOKUP_🐔 t ON j"; if (Build.current().isSnapshot() == false) { - var e = expectThrows(ParsingException.class, () -> statement(query)); - assertThat(e.getMessage(), containsString("line 1:13: mismatched input 'LOOKUP_🐔' expecting {")); + expectThrows( + ParsingException.class, + containsString("line 1:13: mismatched input 'LOOKUP_🐔' expecting {"), + () -> statement(query) + ); return; } var plan = statement(query); @@ -2283,6 +2291,10 @@ public void testInvalidAlias() { expectError("from test | eval A = coalesce(\"Å\", Å)", "line 1:36: token recognition error at: 'Å'"); } + public void testInvalidRemoteClusterPattern() { + expectError("from \"rem:ote\":index", "cluster string [rem:ote] must not contain ':'"); + } + private LogicalPlan unresolvedRelation(String index) { return new UnresolvedRelation(EMPTY, new IndexPattern(EMPTY, index), false, List.of(), IndexMode.STANDARD, null, "FROM"); } @@ -2466,8 +2478,25 @@ by fn2(f3, {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"opt assertEquals(List.of(referenceAttribute("bar", KEYWORD)), dissect.extractedFields()); UnresolvedRelation ur = as(dissect.child(), UnresolvedRelation.class); assertEquals(ur, relation("test")); + } + public void testNamedFunctionArgumentInMapWithNamedParameters() { // map entry values provided in named parameter, arrays are not supported by named parameters yet + LinkedHashMap expectedMap1 = new LinkedHashMap<>(4); + expectedMap1.put("option1", "string"); + expectedMap1.put("option2", 1); + expectedMap1.put("option3", List.of(2.0, 3.0, 4.0)); + expectedMap1.put("option4", List.of(true, false)); + LinkedHashMap expectedMap2 = new LinkedHashMap<>(4); + expectedMap2.put("option1", List.of("string1", "string2")); + expectedMap2.put("option2", List.of(1, 2, 3)); + expectedMap2.put("option3", 2.0); + expectedMap2.put("option4", true); + LinkedHashMap expectedMap3 = new LinkedHashMap<>(4); + expectedMap3.put("option1", "string"); + expectedMap3.put("option2", 2.0); + expectedMap3.put("option3", List.of(1, 2, 3)); + expectedMap3.put("option4", List.of(true, false)); assertEquals( new Filter( EMPTY, @@ -2565,7 +2594,7 @@ by fn2(f3, {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"opt ) ); - plan = statement( + LogicalPlan plan = statement( """ from test | dissect ?fn1(?n1, ?n2, {"option1":?n3,"option2":?n4,"option3":[2.0,3.0,4.0],"option4":[true,false]}) "%{bar}" @@ -2585,16 +2614,16 @@ by fn2(f3, {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"opt ) ) ); - grok = as(plan, Grok.class); + Grok grok = as(plan, Grok.class); assertEquals(function("fn2", List.of(attribute("f3"), mapExpression(expectedMap2))), grok.input()); assertEquals("%{WORD:foo}", grok.parser().pattern()); assertEquals(List.of(referenceAttribute("foo", KEYWORD)), grok.extractedFields()); - dissect = as(grok.child(), Dissect.class); + Dissect dissect = as(grok.child(), Dissect.class); assertEquals(function("fn1", List.of(attribute("f1"), attribute("f2"), mapExpression(expectedMap1))), dissect.input()); assertEquals("%{bar}", dissect.parser().pattern()); assertEquals("", dissect.parser().appendSeparator()); assertEquals(List.of(referenceAttribute("bar", KEYWORD)), dissect.extractedFields()); - ur = as(dissect.child(), UnresolvedRelation.class); + UnresolvedRelation ur = as(dissect.child(), UnresolvedRelation.class); assertEquals(ur, relation("test")); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/AbstractNodeSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/AbstractNodeSerializationTests.java index e6faa9a253d76..998b895a4e005 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/AbstractNodeSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/AbstractNodeSerializationTests.java @@ -51,7 +51,7 @@ public static List randomFieldAttributes(int min, int max, boolean on } @Override - protected final T copyInstance(T instance, TransportVersion version) throws IOException { + protected T copyInstance(T instance, TransportVersion version) throws IOException { return copyInstance( instance, getNamedWriteableRegistry(), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinSerializationTests.java index 7c75ea623b34f..2b812e4caf260 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinSerializationTests.java @@ -47,9 +47,4 @@ protected Join mutateInstance(Join instance) throws IOException { } return new Join(instance.source(), left, right, config); } - - @Override - protected boolean alwaysEmptySource() { - return true; - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/LimitSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/LimitSerializationTests.java index 5d994eb2880ba..b1ffb9c5f8ba8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/LimitSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/LimitSerializationTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.plan.logical; +import org.elasticsearch.TransportVersion; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.FieldAttributeTests; @@ -19,23 +20,33 @@ protected Limit createTestInstance() { Source source = randomSource(); Expression limit = FieldAttributeTests.createFieldAttribute(0, false); LogicalPlan child = randomChild(0); - return new Limit(source, limit, child); + return new Limit(source, limit, child, randomBoolean()); } @Override protected Limit mutateInstance(Limit instance) throws IOException { Expression limit = instance.limit(); LogicalPlan child = instance.child(); - if (randomBoolean()) { - limit = randomValueOtherThan(limit, () -> FieldAttributeTests.createFieldAttribute(0, false)); - } else { - child = randomValueOtherThan(child, () -> randomChild(0)); + boolean duplicated = instance.duplicated(); + switch (randomIntBetween(0, 2)) { + case 0 -> limit = randomValueOtherThan(limit, () -> FieldAttributeTests.createFieldAttribute(0, false)); + case 1 -> child = randomValueOtherThan(child, () -> randomChild(0)); + case 2 -> duplicated = duplicated == false; + default -> throw new IllegalStateException("Should never reach here"); } - return new Limit(instance.source(), limit, child); + return new Limit(instance.source(), limit, child, duplicated); } @Override protected boolean alwaysEmptySource() { return true; } + + @Override + protected Limit copyInstance(Limit instance, TransportVersion version) throws IOException { + // Limit#duplicated() is ALWAYS false when being serialized and we assert that in Limit#writeTo(). + // So, we need to manually simulate this situation. + Limit deserializedCopy = super.copyInstance(instance.withDuplicated(false), version); + return deserializedCopy.withDuplicated(instance.duplicated()); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/OrderExecSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/OrderExecSerializationTests.java deleted file mode 100644 index 755f1cd4f52da..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/OrderExecSerializationTests.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.plan.physical; - -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.Order; -import org.elasticsearch.xpack.esql.expression.OrderSerializationTests; - -import java.io.IOException; -import java.util.List; - -public class OrderExecSerializationTests extends AbstractPhysicalPlanSerializationTests { - public static OrderExec randomOrderExec(int depth) { - Source source = randomSource(); - PhysicalPlan child = randomChild(depth); - List order = randomList(1, 10, OrderSerializationTests::randomOrder); - return new OrderExec(source, child, order); - } - - @Override - protected OrderExec createTestInstance() { - return randomOrderExec(0); - } - - @Override - protected OrderExec mutateInstance(OrderExec instance) throws IOException { - PhysicalPlan child = instance.child(); - List order = instance.order(); - if (randomBoolean()) { - child = randomValueOtherThan(child, () -> randomChild(0)); - } else { - order = randomValueOtherThan(order, () -> randomList(1, 10, OrderSerializationTests::randomOrder)); - } - return new OrderExec(instance.source(), child, order); - } - - @Override - protected boolean alwaysEmptySource() { - return true; - } -} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index e1e606a6e84b1..7e5143d5a3ac0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -84,6 +84,7 @@ public void closeIndex() throws IOException { public void testLuceneSourceOperatorHugeRowSize() throws IOException { int estimatedRowSize = randomEstimatedRowSize(estimatedRowSizeIsHuge); LocalExecutionPlanner.LocalExecutionPlan plan = planner().plan( + "test", FoldContext.small(), new EsQueryExec( Source.EMPTY, @@ -110,6 +111,7 @@ public void testLuceneTopNSourceOperator() throws IOException { EsQueryExec.FieldSort sort = new EsQueryExec.FieldSort(sortField, Order.OrderDirection.ASC, Order.NullsPosition.LAST); Literal limit = new Literal(Source.EMPTY, 10, DataType.INTEGER); LocalExecutionPlanner.LocalExecutionPlan plan = planner().plan( + "test", FoldContext.small(), new EsQueryExec( Source.EMPTY, @@ -136,6 +138,7 @@ public void testLuceneTopNSourceOperatorDistanceSort() throws IOException { EsQueryExec.GeoDistanceSort sort = new EsQueryExec.GeoDistanceSort(sortField, Order.OrderDirection.ASC, 1, -1); Literal limit = new Literal(Source.EMPTY, 10, DataType.INTEGER); LocalExecutionPlanner.LocalExecutionPlan plan = planner().plan( + "test", FoldContext.small(), new EsQueryExec( Source.EMPTY, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java index 57210fda07f2b..f9732272dbd74 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.xpack.esql.optimizer.TestPlannerOptimizer; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; -import org.elasticsearch.xpack.esql.stats.Metrics; +import org.elasticsearch.xpack.esql.telemetry.Metrics; import org.hamcrest.Matcher; import org.junit.BeforeClass; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java index 780045077f7b8..1009eaea9b54c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java @@ -48,6 +48,7 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; import org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes; @@ -292,6 +293,10 @@ private Block getBlockForMultiType(DocBlock indexDoc, MultiTypeEsField multiType private Block extractBlockForSingleDoc(DocBlock docBlock, String columnName, TestBlockCopier blockCopier) { var indexId = docBlock.asVector().shards().getInt(0); var indexPage = indexPages.get(indexId); + if (MetadataAttribute.INDEX.equals(columnName)) { + return docBlock.blockFactory() + .newConstantBytesRefBlockWith(new BytesRef(indexPage.index), blockCopier.docIndices.getPositionCount()); + } int columnIndex = indexPage.columnIndex(columnName) .orElseThrow(() -> new EsqlIllegalArgumentException("Cannot find column named [{}] in {}", columnName, indexPage.columnNames)); var originalData = indexPage.page.getBlock(columnIndex); @@ -410,8 +415,9 @@ private Block extractBlockForColumn( ) { foreachIndexDoc(docBlock, indexDoc -> { TestBlockCopier blockCopier = blockCopier(dataType, extractPreference, indexDoc.asVector().docs()); - Block blockForIndex = extractBlock.apply(indexDoc, blockCopier); - blockBuilder.copyFrom(blockForIndex, 0, blockForIndex.getPositionCount()); + try (Block blockForIndex = extractBlock.apply(indexDoc, blockCopier)) { + blockBuilder.copyFrom(blockForIndex, 0, blockForIndex.getPositionCount()); + } }); var result = blockBuilder.build(); assert result.getPositionCount() == docBlock.getPositionCount() diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java index f3b1d84e507a5..e58824290c49e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java @@ -155,11 +155,14 @@ protected ClusterComputeRequest mutateInstance(ClusterComputeRequest in) throws public void testFallbackIndicesOptions() throws Exception { ClusterComputeRequest request = createTestInstance(); - var version = TransportVersionUtils.randomVersionBetween(random(), TransportVersions.V_8_14_0, TransportVersions.V_8_16_0); - ClusterComputeRequest cloned = copyInstance(request, version); + var oldVersion = TransportVersionUtils.randomVersionBetween( + random(), + TransportVersions.V_8_14_0, + TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_16_0) + ); + ClusterComputeRequest cloned = copyInstance(request, oldVersion); assertThat(cloned.clusterAlias(), equalTo(request.clusterAlias())); assertThat(cloned.sessionId(), equalTo(request.sessionId())); - assertThat(cloned.configuration(), equalTo(request.configuration())); RemoteClusterPlan plan = cloned.remoteClusterPlan(); assertThat(plan.plan(), equalTo(request.remoteClusterPlan().plan())); assertThat(plan.targetIndices(), equalTo(request.remoteClusterPlan().targetIndices())); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java index 7db3216d1736d..f4deaa45f1f87 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.esql.plugin; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.PlainActionFuture; @@ -62,6 +64,7 @@ private List randomProfiles() { for (int i = 0; i < numProfiles; i++) { profiles.add( new DriverProfile( + RandomStrings.randomAsciiLettersOfLength(random(), 5), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSenderTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSenderTests.java new file mode 100644 index 0000000000000..e181d9bb34955 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestSenderTests.java @@ -0,0 +1,287 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.NoShardAvailableActionException; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.compute.test.ComputeTestCase; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.internal.AliasFilter; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.FixedExecutorBuilder; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.transport.TransportService; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.Set; +import java.util.concurrent.Executor; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.esql.plugin.DataNodeRequestSender.NodeRequest; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class DataNodeRequestSenderTests extends ComputeTestCase { + + private TestThreadPool threadPool; + private Executor executor = null; + private static final String ESQL_TEST_EXECUTOR = "esql_test_executor"; + + private final DiscoveryNode node1 = DiscoveryNodeUtils.create("node-1"); + private final DiscoveryNode node2 = DiscoveryNodeUtils.create("node-2"); + private final DiscoveryNode node3 = DiscoveryNodeUtils.create("node-3"); + private final DiscoveryNode node4 = DiscoveryNodeUtils.create("node-4"); + private final DiscoveryNode node5 = DiscoveryNodeUtils.create("node-5"); + private final ShardId shard1 = new ShardId("index", "n/a", 1); + private final ShardId shard2 = new ShardId("index", "n/a", 2); + private final ShardId shard3 = new ShardId("index", "n/a", 3); + private final ShardId shard4 = new ShardId("index", "n/a", 4); + private final ShardId shard5 = new ShardId("index", "n/a", 5); + + @Before + public void setThreadPool() { + int numThreads = randomBoolean() ? 1 : between(2, 16); + threadPool = new TestThreadPool( + "test", + new FixedExecutorBuilder(Settings.EMPTY, ESQL_TEST_EXECUTOR, numThreads, 1024, "esql", EsExecutors.TaskTrackingConfig.DEFAULT) + ); + executor = threadPool.executor(ESQL_TEST_EXECUTOR); + } + + @After + public void shutdownThreadPool() throws Exception { + terminate(threadPool); + } + + public void testEmpty() { + var future = sendRequests(List.of(), (node, shardIds, aliasFilters, listener) -> fail("expect no data-node request is sent")); + var resp = safeGet(future); + assertThat(resp.totalShards, equalTo(0)); + } + + public void testOnePass() { + var targetShards = List.of( + targetShard(shard1, node1), + targetShard(shard2, node2, node4), + targetShard(shard3, node1, node2), + targetShard(shard4, node2, node3) + ); + Queue sent = ConcurrentCollections.newQueue(); + var future = sendRequests(targetShards, (node, shardIds, aliasFilters, listener) -> { + sent.add(new NodeRequest(node, shardIds, aliasFilters)); + var resp = new DataNodeComputeResponse(List.of(), Map.of()); + runWithDelay(() -> listener.onResponse(resp)); + }); + safeGet(future); + assertThat(sent.size(), equalTo(2)); + assertThat(groupRequests(sent, 2), equalTo(Map.of(node1, List.of(shard1, shard3), node2, List.of(shard2, shard4)))); + } + + public void testMissingShards() { + var targetShards = List.of(targetShard(shard1, node1), targetShard(shard3), targetShard(shard4, node2, node3)); + var future = sendRequests(targetShards, (node, shardIds, aliasFilters, listener) -> { + fail("expect no data-node request is sent when target shards are missing"); + }); + var error = expectThrows(NoShardAvailableActionException.class, future::actionGet); + assertThat(error.getMessage(), containsString("no shard copies found")); + } + + public void testRetryThenSuccess() { + var targetShards = List.of( + targetShard(shard1, node1), + targetShard(shard2, node4, node2), + targetShard(shard3, node2, node3), + targetShard(shard4, node2, node3), + targetShard(shard5, node1, node3, node2) + ); + Queue sent = ConcurrentCollections.newQueue(); + var future = sendRequests(targetShards, (node, shardIds, aliasFilters, listener) -> { + sent.add(new NodeRequest(node, shardIds, aliasFilters)); + Map failures = new HashMap<>(); + if (node.equals(node1) && shardIds.contains(shard5)) { + failures.put(shard5, new IOException("test")); + } + if (node.equals(node4) && shardIds.contains(shard2)) { + failures.put(shard2, new IOException("test")); + } + runWithDelay(() -> listener.onResponse(new DataNodeComputeResponse(List.of(), failures))); + }); + try { + future.actionGet(1, TimeUnit.MINUTES); + } catch (Exception e) { + throw new AssertionError(e); + } + assertThat(sent, hasSize(5)); + var firstRound = groupRequests(sent, 3); + assertThat(firstRound, equalTo(Map.of(node1, List.of(shard1, shard5), node4, List.of(shard2), node2, List.of(shard3, shard4)))); + var secondRound = groupRequests(sent, 2); + assertThat(secondRound, equalTo(Map.of(node2, List.of(shard2), node3, List.of(shard5)))); + } + + public void testRetryButFail() { + var targetShards = List.of( + targetShard(shard1, node1), + targetShard(shard2, node4, node2), + targetShard(shard3, node2, node3), + targetShard(shard4, node2, node3), + targetShard(shard5, node1, node3, node2) + ); + Queue sent = ConcurrentCollections.newQueue(); + var future = sendRequests(targetShards, (node, shardIds, aliasFilters, listener) -> { + sent.add(new NodeRequest(node, shardIds, aliasFilters)); + Map failures = new HashMap<>(); + if (shardIds.contains(shard5)) { + failures.put(shard5, new IOException("test failure for shard5")); + } + runWithDelay(() -> listener.onResponse(new DataNodeComputeResponse(List.of(), failures))); + }); + var error = expectThrows(Exception.class, future::actionGet); + assertNotNull(ExceptionsHelper.unwrap(error, IOException.class)); + // {node-1, node-2, node-4}, {node-3}, {node-2} + assertThat(sent.size(), equalTo(5)); + var firstRound = groupRequests(sent, 3); + assertThat(firstRound, equalTo(Map.of(node1, List.of(shard1, shard5), node2, List.of(shard3, shard4), node4, List.of(shard2)))); + NodeRequest fourth = sent.remove(); + assertThat(fourth.node(), equalTo(node3)); + assertThat(fourth.shardIds(), equalTo(List.of(shard5))); + NodeRequest fifth = sent.remove(); + assertThat(fifth.node(), equalTo(node2)); + assertThat(fifth.shardIds(), equalTo(List.of(shard5))); + } + + public void testDoNotRetryOnRequestLevelFailure() { + var targetShards = List.of(targetShard(shard1, node1), targetShard(shard2, node2), targetShard(shard3, node1)); + Queue sent = ConcurrentCollections.newQueue(); + AtomicBoolean failed = new AtomicBoolean(); + var future = sendRequests(targetShards, (node, shardIds, aliasFilters, listener) -> { + sent.add(new NodeRequest(node, shardIds, aliasFilters)); + if (node1.equals(node) && failed.compareAndSet(false, true)) { + runWithDelay(() -> listener.onFailure(new IOException("test request level failure"), true)); + } else { + runWithDelay(() -> listener.onResponse(new DataNodeComputeResponse(List.of(), Map.of()))); + } + }); + Exception exception = expectThrows(Exception.class, future::actionGet); + assertNotNull(ExceptionsHelper.unwrap(exception, IOException.class)); + // one round: {node-1, node-2} + assertThat(sent.size(), equalTo(2)); + var firstRound = groupRequests(sent, 2); + assertThat(firstRound, equalTo(Map.of(node1, List.of(shard1, shard3), node2, List.of(shard2)))); + } + + static DataNodeRequestSender.TargetShard targetShard(ShardId shardId, DiscoveryNode... nodes) { + return new DataNodeRequestSender.TargetShard(shardId, new ArrayList<>(Arrays.asList(nodes)), null); + } + + static Map> groupRequests(Queue sent, int limit) { + Map> map = new HashMap<>(); + for (int i = 0; i < limit; i++) { + NodeRequest r = sent.remove(); + assertNull(map.put(r.node(), r.shardIds().stream().sorted().toList())); + } + return map; + } + + void runWithDelay(Runnable runnable) { + if (randomBoolean()) { + threadPool.schedule(runnable, TimeValue.timeValueNanos(between(0, 5000)), executor); + } else { + executor.execute(runnable); + } + } + + PlainActionFuture sendRequests(List shards, Sender sender) { + PlainActionFuture future = new PlainActionFuture<>(); + TransportService transportService = mock(TransportService.class); + when(transportService.getThreadPool()).thenReturn(threadPool); + CancellableTask task = new CancellableTask( + randomNonNegativeLong(), + "type", + "action", + randomAlphaOfLength(10), + TaskId.EMPTY_TASK_ID, + Collections.emptyMap() + ); + DataNodeRequestSender requestSender = new DataNodeRequestSender(transportService, executor, task) { + @Override + void searchShards( + Task parentTask, + String clusterAlias, + QueryBuilder filter, + Set concreteIndices, + OriginalIndices originalIndices, + ActionListener listener + ) { + var targetShards = new TargetShards( + shards.stream().collect(Collectors.toMap(TargetShard::shardId, Function.identity())), + shards.size(), + 0 + ); + assertSame(parentTask, task); + runWithDelay(() -> listener.onResponse(targetShards)); + } + + @Override + protected void sendRequest( + DiscoveryNode node, + List shardIds, + Map aliasFilters, + NodeListener listener + ) { + sender.sendRequestToOneNode(node, shardIds, aliasFilters, listener); + } + }; + requestSender.startComputeOnDataNodes( + "", + Set.of(randomAlphaOfLength(10)), + new OriginalIndices(new String[0], SearchRequest.DEFAULT_INDICES_OPTIONS), + null, + () -> {}, + future + ); + return future; + } + + interface Sender { + void sendRequestToOneNode( + DiscoveryNode node, + List shardIds, + Map aliasFilters, + DataNodeRequestSender.NodeListener listener + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMathQueryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMathQueryTests.java index 3b5b2d8f85452..339b07dfabc41 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMathQueryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMathQueryTests.java @@ -75,7 +75,8 @@ public void testQuery() throws IOException { SearchExecutionContext ctx = createSearchExecutionContext(mapper, new IndexSearcher(reader)); Query query = new SingleValueMatchQuery( ctx.getForField(mapper.fieldType("foo"), MappedFieldType.FielddataOperation.SEARCH), - Warnings.createWarnings(DriverContext.WarningsMode.COLLECT, 1, 1, "test") + Warnings.createWarnings(DriverContext.WarningsMode.COLLECT, 1, 1, "test"), + "single-value function encountered multi-value" ); runCase(fieldValues, ctx.searcher().count(query)); setup.assertRewrite(ctx.searcher(), query); @@ -90,7 +91,8 @@ public void testEmpty() throws IOException { SearchExecutionContext ctx = createSearchExecutionContext(mapper, new IndexSearcher(reader)); Query query = new SingleValueMatchQuery( ctx.getForField(mapper.fieldType("foo"), MappedFieldType.FielddataOperation.SEARCH), - Warnings.createWarnings(DriverContext.WarningsMode.COLLECT, 1, 1, "test") + Warnings.createWarnings(DriverContext.WarningsMode.COLLECT, 1, 1, "test"), + "single-value function encountered multi-value" ); runCase(List.of(), ctx.searcher().count(query)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtilsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlCCSUtilsTests.java similarity index 71% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtilsTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlCCSUtilsTests.java index a84e5b144e64c..49cfbba5c7610 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtilsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlCCSUtilsTests.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.session; import org.apache.lucene.index.CorruptIndexException; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesFailure; import org.elasticsearch.action.search.ShardSearchFailure; @@ -20,6 +22,7 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.license.internal.XPackLicenseStatus; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.NoSeedNodeLeftException; @@ -47,26 +50,30 @@ import java.util.stream.Collectors; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; -import static org.elasticsearch.xpack.esql.session.EsqlSessionCCSUtils.checkForCcsLicense; +import static org.elasticsearch.xpack.esql.session.EsqlCCSUtils.checkForCcsLicense; +import static org.elasticsearch.xpack.esql.session.EsqlCCSUtils.shouldIgnoreRuntimeError; +import static org.elasticsearch.xpack.esql.session.EsqlCCSUtils.skipUnavailableListener; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; -public class EsqlSessionCCSUtilsTests extends ESTestCase { +public class EsqlCCSUtilsTests extends ESTestCase { + + private final String LOCAL_CLUSTER_ALIAS = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; + private final String REMOTE1_ALIAS = "remote1"; + private final String REMOTE2_ALIAS = "remote2"; public void testCreateIndexExpressionFromAvailableClusters() { - final String localClusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; - final String remote1Alias = "remote1"; - final String remote2Alias = "remote2"; // no clusters marked as skipped { EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); - executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); - executionInfo.swapCluster(remote1Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", true)); - executionInfo.swapCluster(remote2Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote2Alias, "mylogs1,mylogs2,logs*", true)); + executionInfo.swapCluster(LOCAL_CLUSTER_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(LOCAL_CLUSTER_ALIAS, "logs*", false)); + executionInfo.swapCluster(REMOTE1_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE1_ALIAS, "*", true)); + executionInfo.swapCluster(REMOTE2_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE2_ALIAS, "mylogs1,mylogs2,logs*", true)); - String indexExpr = EsqlSessionCCSUtils.createIndexExpressionFromAvailableClusters(executionInfo); + String indexExpr = EsqlCCSUtils.createIndexExpressionFromAvailableClusters(executionInfo); List list = Arrays.stream(Strings.splitStringByCommaToArray(indexExpr)).toList(); assertThat(list.size(), equalTo(5)); assertThat( @@ -78,19 +85,19 @@ public void testCreateIndexExpressionFromAvailableClusters() { // one cluster marked as skipped, so not present in revised index expression { EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); - executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); - executionInfo.swapCluster(remote1Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*,foo", true)); + executionInfo.swapCluster(LOCAL_CLUSTER_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(LOCAL_CLUSTER_ALIAS, "logs*", false)); + executionInfo.swapCluster(REMOTE1_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE1_ALIAS, "*,foo", true)); executionInfo.swapCluster( - remote2Alias, + REMOTE2_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster( - remote2Alias, + REMOTE2_ALIAS, "mylogs1,mylogs2,logs*", true, EsqlExecutionInfo.Cluster.Status.SKIPPED ) ); - String indexExpr = EsqlSessionCCSUtils.createIndexExpressionFromAvailableClusters(executionInfo); + String indexExpr = EsqlCCSUtils.createIndexExpressionFromAvailableClusters(executionInfo); List list = Arrays.stream(Strings.splitStringByCommaToArray(indexExpr)).toList(); assertThat(list.size(), equalTo(3)); assertThat(new HashSet<>(list), equalTo(Strings.commaDelimitedListToSet("logs*,remote1:*,remote1:foo"))); @@ -99,73 +106,70 @@ public void testCreateIndexExpressionFromAvailableClusters() { // two clusters marked as skipped, so only local cluster present in revised index expression { EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); - executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); + executionInfo.swapCluster(LOCAL_CLUSTER_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(LOCAL_CLUSTER_ALIAS, "logs*", false)); executionInfo.swapCluster( - remote1Alias, - (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*,foo", true, EsqlExecutionInfo.Cluster.Status.SKIPPED) + REMOTE1_ALIAS, + (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE1_ALIAS, "*,foo", true, EsqlExecutionInfo.Cluster.Status.SKIPPED) ); executionInfo.swapCluster( - remote2Alias, + REMOTE2_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster( - remote2Alias, + REMOTE2_ALIAS, "mylogs1,mylogs2,logs*", true, EsqlExecutionInfo.Cluster.Status.SKIPPED ) ); - assertThat(EsqlSessionCCSUtils.createIndexExpressionFromAvailableClusters(executionInfo), equalTo("logs*")); + assertThat(EsqlCCSUtils.createIndexExpressionFromAvailableClusters(executionInfo), equalTo("logs*")); } // only remotes present and all marked as skipped, so in revised index expression should be empty string { EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); executionInfo.swapCluster( - remote1Alias, - (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*,foo", true, EsqlExecutionInfo.Cluster.Status.SKIPPED) + REMOTE1_ALIAS, + (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE1_ALIAS, "*,foo", true, EsqlExecutionInfo.Cluster.Status.SKIPPED) ); executionInfo.swapCluster( - remote2Alias, + REMOTE2_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster( - remote2Alias, + REMOTE2_ALIAS, "mylogs1,mylogs2,logs*", true, EsqlExecutionInfo.Cluster.Status.SKIPPED ) ); - assertThat(EsqlSessionCCSUtils.createIndexExpressionFromAvailableClusters(executionInfo), equalTo("")); + assertThat(EsqlCCSUtils.createIndexExpressionFromAvailableClusters(executionInfo), equalTo("")); } } public void testUpdateExecutionInfoWithUnavailableClusters() { - final String localClusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; - final String remote1Alias = "remote1"; - final String remote2Alias = "remote2"; // skip_unavailable=true clusters are unavailable, both marked as SKIPPED { EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); - executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); - executionInfo.swapCluster(remote1Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", true)); - executionInfo.swapCluster(remote2Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote2Alias, "mylogs1,mylogs2,logs*", true)); + executionInfo.swapCluster(LOCAL_CLUSTER_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(LOCAL_CLUSTER_ALIAS, "logs*", false)); + executionInfo.swapCluster(REMOTE1_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE1_ALIAS, "*", true)); + executionInfo.swapCluster(REMOTE2_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE2_ALIAS, "mylogs1,mylogs2,logs*", true)); var failure = new FieldCapabilitiesFailure(new String[] { "logs-a" }, new NoSeedNodeLeftException("unable to connect")); - var unvailableClusters = Map.of(remote1Alias, failure, remote2Alias, failure); - EsqlSessionCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, unvailableClusters); + var unvailableClusters = Map.of(REMOTE1_ALIAS, failure, REMOTE2_ALIAS, failure); + EsqlCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, unvailableClusters); - assertThat(executionInfo.clusterAliases(), equalTo(Set.of(localClusterAlias, remote1Alias, remote2Alias))); + assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER_ALIAS, REMOTE1_ALIAS, REMOTE2_ALIAS))); assertNull(executionInfo.overallTook()); - EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(localClusterAlias); + EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER_ALIAS); assertThat(localCluster.getIndexExpression(), equalTo("logs*")); assertClusterStatusAndShardCounts(localCluster, EsqlExecutionInfo.Cluster.Status.RUNNING); - EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(remote1Alias); + EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(REMOTE1_ALIAS); assertThat(remote1Cluster.getIndexExpression(), equalTo("*")); assertClusterStatusAndShardCounts(remote1Cluster, EsqlExecutionInfo.Cluster.Status.SKIPPED); - EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(remote2Alias); + EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE2_ALIAS); assertThat(remote2Cluster.getIndexExpression(), equalTo("mylogs1,mylogs2,logs*")); assertClusterStatusAndShardCounts(remote2Cluster, EsqlExecutionInfo.Cluster.Status.SKIPPED); } @@ -173,14 +177,17 @@ public void testUpdateExecutionInfoWithUnavailableClusters() { // skip_unavailable=false cluster is unavailable, throws Exception { EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); - executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); - executionInfo.swapCluster(remote1Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", true)); - executionInfo.swapCluster(remote2Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote2Alias, "mylogs1,mylogs2,logs*", false)); + executionInfo.swapCluster(LOCAL_CLUSTER_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(LOCAL_CLUSTER_ALIAS, "logs*", false)); + executionInfo.swapCluster(REMOTE1_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE1_ALIAS, "*", true)); + executionInfo.swapCluster( + REMOTE2_ALIAS, + (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE2_ALIAS, "mylogs1,mylogs2,logs*", false) + ); var failure = new FieldCapabilitiesFailure(new String[] { "logs-a" }, new NoSeedNodeLeftException("unable to connect")); RemoteTransportException e = expectThrows( RemoteTransportException.class, - () -> EsqlSessionCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, Map.of(remote2Alias, failure)) + () -> EsqlCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, Map.of(REMOTE2_ALIAS, failure)) ); assertThat(e.status().getStatus(), equalTo(500)); assertThat( @@ -193,42 +200,42 @@ public void testUpdateExecutionInfoWithUnavailableClusters() { // all clusters available, no Clusters in ExecutionInfo should be modified { EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); - executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); - executionInfo.swapCluster(remote1Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", true)); - executionInfo.swapCluster(remote2Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote2Alias, "mylogs1,mylogs2,logs*", false)); + executionInfo.swapCluster(LOCAL_CLUSTER_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(LOCAL_CLUSTER_ALIAS, "logs*", false)); + executionInfo.swapCluster(REMOTE1_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE1_ALIAS, "*", true)); + executionInfo.swapCluster( + REMOTE2_ALIAS, + (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE2_ALIAS, "mylogs1,mylogs2,logs*", false) + ); - EsqlSessionCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, Map.of()); + EsqlCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, Map.of()); - assertThat(executionInfo.clusterAliases(), equalTo(Set.of(localClusterAlias, remote1Alias, remote2Alias))); + assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER_ALIAS, REMOTE1_ALIAS, REMOTE2_ALIAS))); assertNull(executionInfo.overallTook()); - EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(localClusterAlias); + EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER_ALIAS); assertThat(localCluster.getIndexExpression(), equalTo("logs*")); assertClusterStatusAndShardCounts(localCluster, EsqlExecutionInfo.Cluster.Status.RUNNING); - EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(remote1Alias); + EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(REMOTE1_ALIAS); assertThat(remote1Cluster.getIndexExpression(), equalTo("*")); assertClusterStatusAndShardCounts(remote1Cluster, EsqlExecutionInfo.Cluster.Status.RUNNING); - EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(remote2Alias); + EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE2_ALIAS); assertThat(remote2Cluster.getIndexExpression(), equalTo("mylogs1,mylogs2,logs*")); assertClusterStatusAndShardCounts(remote2Cluster, EsqlExecutionInfo.Cluster.Status.RUNNING); } } public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { - final String localClusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; - final String remote1Alias = "remote1"; - final String remote2Alias = "remote2"; // all clusters had matching indices from field-caps call, so no updates to EsqlExecutionInfo should happen { EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); - executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); - executionInfo.swapCluster(remote1Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", randomBoolean())); + executionInfo.swapCluster(LOCAL_CLUSTER_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(LOCAL_CLUSTER_ALIAS, "logs*", false)); + executionInfo.swapCluster(REMOTE1_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE1_ALIAS, "*", randomBoolean())); executionInfo.swapCluster( - remote2Alias, - (k, v) -> new EsqlExecutionInfo.Cluster(remote2Alias, "mylogs1,mylogs2,logs*", randomBoolean()) + REMOTE2_ALIAS, + (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE2_ALIAS, "mylogs1,mylogs2,logs*", randomBoolean()) ); EsIndex esIndex = new EsIndex( @@ -251,17 +258,17 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { IndexResolution indexResolution = IndexResolution.valid(esIndex, esIndex.concreteIndices(), Map.of()); - EsqlSessionCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); + EsqlCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); - EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(localClusterAlias); + EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER_ALIAS); assertThat(localCluster.getIndexExpression(), equalTo("logs*")); assertClusterStatusAndShardCounts(localCluster, EsqlExecutionInfo.Cluster.Status.RUNNING); - EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(remote1Alias); + EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(REMOTE1_ALIAS); assertThat(remote1Cluster.getIndexExpression(), equalTo("*")); assertClusterStatusAndShardCounts(remote1Cluster, EsqlExecutionInfo.Cluster.Status.RUNNING); - EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(remote2Alias); + EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE2_ALIAS); assertThat(remote2Cluster.getIndexExpression(), equalTo("mylogs1,mylogs2,logs*")); assertClusterStatusAndShardCounts(remote2Cluster, EsqlExecutionInfo.Cluster.Status.RUNNING); } @@ -270,11 +277,11 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { // marked as SKIPPED with 0 total shards, 0 took time, etc. { EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); - executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); - executionInfo.swapCluster(remote1Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", randomBoolean())); + executionInfo.swapCluster(LOCAL_CLUSTER_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(LOCAL_CLUSTER_ALIAS, "logs*", false)); + executionInfo.swapCluster(REMOTE1_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE1_ALIAS, "*", randomBoolean())); executionInfo.swapCluster( - remote2Alias, - (k, v) -> new EsqlExecutionInfo.Cluster(remote2Alias, "mylogs1,mylogs2,logs*", randomBoolean()) + REMOTE2_ALIAS, + (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE2_ALIAS, "mylogs1,mylogs2,logs*", randomBoolean()) ); EsIndex esIndex = new EsIndex( @@ -295,13 +302,13 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { Map unavailableClusters = Map.of(); IndexResolution indexResolution = IndexResolution.valid(esIndex, esIndex.concreteIndices(), unavailableClusters); - EsqlSessionCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); + EsqlCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); - EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(localClusterAlias); + EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER_ALIAS); assertThat(localCluster.getIndexExpression(), equalTo("logs*")); assertClusterStatusAndShardCounts(localCluster, EsqlExecutionInfo.Cluster.Status.RUNNING); - EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(remote1Alias); + EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(REMOTE1_ALIAS); assertThat(remote1Cluster.getIndexExpression(), equalTo("*")); assertThat(remote1Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); assertThat(remote1Cluster.getTook().millis(), equalTo(0L)); @@ -310,7 +317,7 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { assertThat(remote1Cluster.getSkippedShards(), equalTo(0)); assertThat(remote1Cluster.getFailedShards(), equalTo(0)); - EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(remote2Alias); + EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE2_ALIAS); assertThat(remote2Cluster.getIndexExpression(), equalTo("mylogs1,mylogs2,logs*")); assertClusterStatusAndShardCounts(remote2Cluster, EsqlExecutionInfo.Cluster.Status.RUNNING); } @@ -320,11 +327,11 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { // marked as SKIPPED { EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); - executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); - executionInfo.swapCluster(remote1Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", randomBoolean())); + executionInfo.swapCluster(LOCAL_CLUSTER_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(LOCAL_CLUSTER_ALIAS, "logs*", false)); + executionInfo.swapCluster(REMOTE1_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE1_ALIAS, "*", randomBoolean())); executionInfo.swapCluster( - remote2Alias, - (k, v) -> new EsqlExecutionInfo.Cluster(remote2Alias, "mylogs1*,mylogs2*,logs*", randomBoolean()) + REMOTE2_ALIAS, + (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE2_ALIAS, "mylogs1*,mylogs2*,logs*", randomBoolean()) ); EsIndex esIndex = new EsIndex( @@ -334,22 +341,22 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { ); // remote1 is unavailable var failure = new FieldCapabilitiesFailure(new String[] { "logs-a" }, new NoSeedNodeLeftException("unable to connect")); - Map unavailableClusters = Map.of(remote1Alias, failure); + Map unavailableClusters = Map.of(REMOTE1_ALIAS, failure); IndexResolution indexResolution = IndexResolution.valid(esIndex, esIndex.concreteIndices(), unavailableClusters); - EsqlSessionCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); + EsqlCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); - EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(localClusterAlias); + EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER_ALIAS); assertThat(localCluster.getIndexExpression(), equalTo("logs*")); assertClusterStatusAndShardCounts(localCluster, EsqlExecutionInfo.Cluster.Status.RUNNING); - EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(remote1Alias); + EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(REMOTE1_ALIAS); assertThat(remote1Cluster.getIndexExpression(), equalTo("*")); // since remote1 is in the unavailable Map (passed to IndexResolution.valid), it's status will not be changed // by updateExecutionInfoWithClustersWithNoMatchingIndices (it is handled in updateExecutionInfoWithUnavailableClusters) assertThat(remote1Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)); - EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(remote2Alias); + EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE2_ALIAS); assertThat(remote2Cluster.getIndexExpression(), equalTo("mylogs1*,mylogs2*,logs*")); assertThat(remote2Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); assertThat(remote2Cluster.getTook().millis(), equalTo(0L)); @@ -363,11 +370,11 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { // but had no matching indices and since a concrete index was requested, a VerificationException is thrown { EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); - executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*")); - executionInfo.swapCluster(remote1Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", randomBoolean())); + executionInfo.swapCluster(LOCAL_CLUSTER_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(LOCAL_CLUSTER_ALIAS, "logs*")); + executionInfo.swapCluster(REMOTE1_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE1_ALIAS, "*", randomBoolean())); executionInfo.swapCluster( - remote2Alias, - (k, v) -> new EsqlExecutionInfo.Cluster(remote2Alias, "mylogs1,mylogs2,logs*", randomBoolean()) + REMOTE2_ALIAS, + (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE2_ALIAS, "mylogs1,mylogs2,logs*", randomBoolean()) ); EsIndex esIndex = new EsIndex( @@ -377,11 +384,11 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { ); var failure = new FieldCapabilitiesFailure(new String[] { "logs-a" }, new NoSeedNodeLeftException("unable to connect")); - Map unavailableClusters = Map.of(remote1Alias, failure); + Map unavailableClusters = Map.of(REMOTE1_ALIAS, failure); IndexResolution indexResolution = IndexResolution.valid(esIndex, esIndex.concreteIndices(), unavailableClusters); VerificationException ve = expectThrows( VerificationException.class, - () -> EsqlSessionCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution) + () -> EsqlCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution) ); assertThat(ve.getDetailedMessage(), containsString("Unknown index [remote2:mylogs1,mylogs2,logs*]")); } @@ -390,13 +397,13 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { // (the EsqlSessionCCSUtils.updateExecutionInfoWithUnavailableClusters() method handles that case not the one tested here) { EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); - executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*")); - executionInfo.swapCluster(remote1Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", randomBoolean())); + executionInfo.swapCluster(LOCAL_CLUSTER_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(LOCAL_CLUSTER_ALIAS, "logs*")); + executionInfo.swapCluster(REMOTE1_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE1_ALIAS, "*", randomBoolean())); // remote2 is already marked as SKIPPED (simulating failed enrich policy lookup due to unavailable cluster) executionInfo.swapCluster( - remote2Alias, + REMOTE2_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster( - remote2Alias, + REMOTE2_ALIAS, "mylogs1*,mylogs2*,logs*", randomBoolean(), EsqlExecutionInfo.Cluster.Status.SKIPPED @@ -411,22 +418,22 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { // remote1 is unavailable var failure = new FieldCapabilitiesFailure(new String[] { "logs-a" }, new NoSeedNodeLeftException("unable to connect")); - Map unavailableClusters = Map.of(remote1Alias, failure); + Map unavailableClusters = Map.of(REMOTE1_ALIAS, failure); IndexResolution indexResolution = IndexResolution.valid(esIndex, esIndex.concreteIndices(), unavailableClusters); - EsqlSessionCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); + EsqlCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); - EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(localClusterAlias); + EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER_ALIAS); assertThat(localCluster.getIndexExpression(), equalTo("logs*")); assertClusterStatusAndShardCounts(localCluster, EsqlExecutionInfo.Cluster.Status.RUNNING); - EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(remote1Alias); + EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(REMOTE1_ALIAS); assertThat(remote1Cluster.getIndexExpression(), equalTo("*")); // since remote1 is in the unavailable Map (passed to IndexResolution.valid), it's status will not be changed // by updateExecutionInfoWithClustersWithNoMatchingIndices (it is handled in updateExecutionInfoWithUnavailableClusters) assertThat(remote1Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)); - EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(remote2Alias); + EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE2_ALIAS); assertThat(remote2Cluster.getIndexExpression(), equalTo("mylogs1*,mylogs2*,logs*")); assertThat(remote2Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); } @@ -444,7 +451,7 @@ public void testDetermineUnavailableRemoteClusters() { ) ); - Map unavailableClusters = EsqlSessionCCSUtils.determineUnavailableRemoteClusters(failures); + Map unavailableClusters = EsqlCCSUtils.determineUnavailableRemoteClusters(failures); assertThat(unavailableClusters.keySet(), equalTo(Set.of("remote1", "remote2"))); } @@ -454,7 +461,7 @@ public void testDetermineUnavailableRemoteClusters() { failures.add(new FieldCapabilitiesFailure(new String[] { "remote2:mylogs1" }, new NoSuchRemoteClusterException("remote2"))); failures.add(new FieldCapabilitiesFailure(new String[] { "remote2:mylogs1" }, new NoSeedNodeLeftException("no seed node"))); - Map unavailableClusters = EsqlSessionCCSUtils.determineUnavailableRemoteClusters(failures); + Map unavailableClusters = EsqlCCSUtils.determineUnavailableRemoteClusters(failures); assertThat(unavailableClusters.keySet(), equalTo(Set.of("remote2"))); } @@ -468,7 +475,7 @@ public void testDetermineUnavailableRemoteClusters() { new IllegalStateException("Unable to open any connections") ) ); - Map unavailableClusters = EsqlSessionCCSUtils.determineUnavailableRemoteClusters(failures); + Map unavailableClusters = EsqlCCSUtils.determineUnavailableRemoteClusters(failures); assertThat(unavailableClusters.keySet(), equalTo(Set.of("remote2"))); } @@ -476,29 +483,28 @@ public void testDetermineUnavailableRemoteClusters() { { List failures = new ArrayList<>(); failures.add(new FieldCapabilitiesFailure(new String[] { "remote1:mylogs1" }, new RuntimeException("foo"))); - Map unavailableClusters = EsqlSessionCCSUtils.determineUnavailableRemoteClusters(failures); + Map unavailableClusters = EsqlCCSUtils.determineUnavailableRemoteClusters(failures); assertThat(unavailableClusters.keySet(), equalTo(Set.of())); } // empty failures list { List failures = new ArrayList<>(); - Map unavailableClusters = EsqlSessionCCSUtils.determineUnavailableRemoteClusters(failures); + Map unavailableClusters = EsqlCCSUtils.determineUnavailableRemoteClusters(failures); assertThat(unavailableClusters.keySet(), equalTo(Set.of())); } } public void testUpdateExecutionInfoAtEndOfPlanning() { - String localClusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; - String remote1Alias = "remote1"; - String remote2Alias = "remote2"; + String REMOTE1_ALIAS = "remote1"; + String REMOTE2_ALIAS = "remote2"; EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true); - executionInfo.swapCluster(localClusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false)); + executionInfo.swapCluster(LOCAL_CLUSTER_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(LOCAL_CLUSTER_ALIAS, "logs*", false)); executionInfo.swapCluster( - remote1Alias, - (k, v) -> new EsqlExecutionInfo.Cluster(remote1Alias, "*", true, EsqlExecutionInfo.Cluster.Status.SKIPPED) + REMOTE1_ALIAS, + (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE1_ALIAS, "*", true, EsqlExecutionInfo.Cluster.Status.SKIPPED) ); - executionInfo.swapCluster(remote2Alias, (k, v) -> new EsqlExecutionInfo.Cluster(remote2Alias, "mylogs1,mylogs2,logs*", false)); + executionInfo.swapCluster(REMOTE2_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE2_ALIAS, "mylogs1,mylogs2,logs*", false)); assertNull(executionInfo.planningTookTime()); assertNull(executionInfo.overallTook()); @@ -506,7 +512,7 @@ public void testUpdateExecutionInfoAtEndOfPlanning() { Thread.sleep(1); } catch (InterruptedException e) {} - EsqlSessionCCSUtils.updateExecutionInfoAtEndOfPlanning(executionInfo); + EsqlCCSUtils.updateExecutionInfoAtEndOfPlanning(executionInfo); assertThat(executionInfo.planningTookTime().millis(), greaterThanOrEqualTo(0L)); assertNull(executionInfo.overallTook()); @@ -517,7 +523,7 @@ public void testUpdateExecutionInfoAtEndOfPlanning() { assertNull(localCluster.getTotalShards()); assertNull(localCluster.getTook()); - EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(remote1Alias); + EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(REMOTE1_ALIAS); assertThat(remote1Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); assertThat(remote1Cluster.getTotalShards(), equalTo(0)); assertThat(remote1Cluster.getSuccessfulShards(), equalTo(0)); @@ -526,7 +532,7 @@ public void testUpdateExecutionInfoAtEndOfPlanning() { assertThat(remote1Cluster.getTook().millis(), greaterThanOrEqualTo(0L)); assertThat(remote1Cluster.getTook().millis(), equalTo(executionInfo.planningTookTime().millis())); - EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(remote2Alias); + EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE2_ALIAS); assertThat(remote2Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)); assertNull(remote2Cluster.getTotalShards()); assertNull(remote2Cluster.getTook()); @@ -534,7 +540,10 @@ public void testUpdateExecutionInfoAtEndOfPlanning() { private void assertClusterStatusAndShardCounts(EsqlExecutionInfo.Cluster cluster, EsqlExecutionInfo.Cluster.Status status) { assertThat(cluster.getStatus(), equalTo(status)); - assertNull(cluster.getTook()); + if (cluster.getTook() != null) { + // It is also ok if it's null in some tests + assertThat(cluster.getTook().millis(), greaterThanOrEqualTo(0L)); + } if (status == EsqlExecutionInfo.Cluster.Status.RUNNING) { assertNull(cluster.getTotalShards()); assertNull(cluster.getSuccessfulShards()); @@ -545,6 +554,11 @@ private void assertClusterStatusAndShardCounts(EsqlExecutionInfo.Cluster cluster assertThat(cluster.getSuccessfulShards(), equalTo(0)); assertThat(cluster.getSkippedShards(), equalTo(0)); assertThat(cluster.getFailedShards(), equalTo(0)); + } else if (status == EsqlExecutionInfo.Cluster.Status.PARTIAL) { + assertThat(cluster.getTotalShards(), equalTo(0)); + assertThat(cluster.getSuccessfulShards(), equalTo(0)); + assertThat(cluster.getSkippedShards(), equalTo(0)); + assertThat(cluster.getFailedShards(), equalTo(0)); } else { fail("Unexpected status: " + status); } @@ -560,35 +574,32 @@ private static Map randomMapping() { } public void testReturnSuccessWithEmptyResult() { - String localClusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; - String remote1Alias = "remote1"; - String remote2Alias = "remote2"; String remote3Alias = "remote3"; NoClustersToSearchException noClustersException = new NoClustersToSearchException(); Predicate skipUnPredicate = s -> { - if (s.equals("remote2") || s.equals("remote3")) { + if (s.equals(REMOTE2_ALIAS) || s.equals("remote3")) { return true; } return false; }; - EsqlExecutionInfo.Cluster localCluster = new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false); - EsqlExecutionInfo.Cluster remote1 = new EsqlExecutionInfo.Cluster(remote1Alias, "logs*", false); - EsqlExecutionInfo.Cluster remote2 = new EsqlExecutionInfo.Cluster(remote2Alias, "logs*", true); + EsqlExecutionInfo.Cluster localCluster = new EsqlExecutionInfo.Cluster(LOCAL_CLUSTER_ALIAS, "logs*", false); + EsqlExecutionInfo.Cluster remote1 = new EsqlExecutionInfo.Cluster(REMOTE1_ALIAS, "logs*", false); + EsqlExecutionInfo.Cluster remote2 = new EsqlExecutionInfo.Cluster(REMOTE2_ALIAS, "logs*", true); EsqlExecutionInfo.Cluster remote3 = new EsqlExecutionInfo.Cluster(remote3Alias, "logs*", true); // not a cross-cluster cluster search, so do not return empty result { EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(skipUnPredicate, randomBoolean()); - executionInfo.swapCluster(localClusterAlias, (k, v) -> localCluster); - assertFalse(EsqlSessionCCSUtils.returnSuccessWithEmptyResult(executionInfo, noClustersException)); + executionInfo.swapCluster(LOCAL_CLUSTER_ALIAS, (k, v) -> localCluster); + assertFalse(EsqlCCSUtils.returnSuccessWithEmptyResult(executionInfo, noClustersException)); } // local cluster is present, so do not return empty result { EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(skipUnPredicate, randomBoolean()); - executionInfo.swapCluster(localClusterAlias, (k, v) -> localCluster); - executionInfo.swapCluster(remote1Alias, (k, v) -> remote1); + executionInfo.swapCluster(LOCAL_CLUSTER_ALIAS, (k, v) -> localCluster); + executionInfo.swapCluster(REMOTE1_ALIAS, (k, v) -> remote1); // TODO: this logic will be added in the follow-on PR that handles missing indices // assertFalse(EsqlSessionCCSUtils.returnSuccessWithEmptyResult(executionInfo, noClustersException)); } @@ -596,16 +607,16 @@ public void testReturnSuccessWithEmptyResult() { // remote-only, one cluster is skip_unavailable=false, so do not return empty result { EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(skipUnPredicate, randomBoolean()); - executionInfo.swapCluster(remote1Alias, (k, v) -> remote1); - executionInfo.swapCluster(remote2Alias, (k, v) -> remote2); - assertFalse(EsqlSessionCCSUtils.returnSuccessWithEmptyResult(executionInfo, noClustersException)); + executionInfo.swapCluster(REMOTE1_ALIAS, (k, v) -> remote1); + executionInfo.swapCluster(REMOTE2_ALIAS, (k, v) -> remote2); + assertFalse(EsqlCCSUtils.returnSuccessWithEmptyResult(executionInfo, noClustersException)); } // remote-only, all clusters are skip_unavailable=true, so should return empty result with // NoSuchClustersException or "remote unavailable" type exception { EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(skipUnPredicate, randomBoolean()); - executionInfo.swapCluster(remote2Alias, (k, v) -> remote2); + executionInfo.swapCluster(REMOTE2_ALIAS, (k, v) -> remote2); executionInfo.swapCluster(remote3Alias, (k, v) -> remote3); Exception e = randomFrom( new NoSuchRemoteClusterException("foo"), @@ -613,23 +624,22 @@ public void testReturnSuccessWithEmptyResult() { new NoSeedNodeLeftException("foo"), new IllegalStateException("unknown host") ); - assertTrue(EsqlSessionCCSUtils.returnSuccessWithEmptyResult(executionInfo, e)); + assertTrue(EsqlCCSUtils.returnSuccessWithEmptyResult(executionInfo, e)); } // remote-only, all clusters are skip_unavailable=true, but exception is not "remote unavailable" so return false // Note: this functionality may change in follow-on PRs, so remove this test in that case { EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(skipUnPredicate, randomBoolean()); - executionInfo.swapCluster(remote2Alias, (k, v) -> remote2); + executionInfo.swapCluster(REMOTE2_ALIAS, (k, v) -> remote2); executionInfo.swapCluster(remote3Alias, (k, v) -> remote3); - assertFalse(EsqlSessionCCSUtils.returnSuccessWithEmptyResult(executionInfo, new NullPointerException())); + assertFalse(EsqlCCSUtils.returnSuccessWithEmptyResult(executionInfo, new NullPointerException())); } } public void testUpdateExecutionInfoToReturnEmptyResult() { - String localClusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; - String remote1Alias = "remote1"; - String remote2Alias = "remote2"; + String REMOTE1_ALIAS = "remote1"; + String REMOTE2_ALIAS = "remote2"; String remote3Alias = "remote3"; ConnectTransportException transportEx = new ConnectTransportException(null, "foo"); Predicate skipUnPredicate = s -> { @@ -639,9 +649,9 @@ public void testUpdateExecutionInfoToReturnEmptyResult() { return false; }; - EsqlExecutionInfo.Cluster localCluster = new EsqlExecutionInfo.Cluster(localClusterAlias, "logs*", false); - EsqlExecutionInfo.Cluster remote1 = new EsqlExecutionInfo.Cluster(remote1Alias, "logs*", true); - EsqlExecutionInfo.Cluster remote2 = new EsqlExecutionInfo.Cluster(remote2Alias, "logs*", true); + EsqlExecutionInfo.Cluster localCluster = new EsqlExecutionInfo.Cluster(LOCAL_CLUSTER_ALIAS, "logs*", false); + EsqlExecutionInfo.Cluster remote1 = new EsqlExecutionInfo.Cluster(REMOTE1_ALIAS, "logs*", true); + EsqlExecutionInfo.Cluster remote2 = new EsqlExecutionInfo.Cluster(REMOTE2_ALIAS, "logs*", true); EsqlExecutionInfo.Cluster remote3 = new EsqlExecutionInfo.Cluster(remote3Alias, "logs*", true); EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(skipUnPredicate, randomBoolean()); @@ -652,13 +662,13 @@ public void testUpdateExecutionInfoToReturnEmptyResult() { assertNull(executionInfo.overallTook()); - EsqlSessionCCSUtils.updateExecutionInfoToReturnEmptyResult(executionInfo, transportEx); + EsqlCCSUtils.updateExecutionInfoToReturnEmptyResult(executionInfo, transportEx); assertNotNull(executionInfo.overallTook()); - assertThat(executionInfo.getCluster(localClusterAlias).getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); - assertThat(executionInfo.getCluster(localClusterAlias).getFailures().size(), equalTo(0)); + assertThat(executionInfo.getCluster(LOCAL_CLUSTER_ALIAS).getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(executionInfo.getCluster(LOCAL_CLUSTER_ALIAS).getFailures().size(), equalTo(0)); - for (String remoteAlias : Set.of(remote1Alias, remote2Alias, remote3Alias)) { + for (String remoteAlias : Set.of(REMOTE1_ALIAS, REMOTE2_ALIAS, remote3Alias)) { assertThat(executionInfo.getCluster(remoteAlias).getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); List remoteFailures = executionInfo.getCluster(remoteAlias).getFailures(); assertThat(remoteFailures.size(), equalTo(1)); @@ -667,11 +677,11 @@ public void testUpdateExecutionInfoToReturnEmptyResult() { } public void testConcreteIndexRequested() { - assertThat(EsqlSessionCCSUtils.concreteIndexRequested("logs*"), equalTo(false)); - assertThat(EsqlSessionCCSUtils.concreteIndexRequested("mylogs1,mylogs2,logs*"), equalTo(true)); - assertThat(EsqlSessionCCSUtils.concreteIndexRequested("x*,logs"), equalTo(true)); - assertThat(EsqlSessionCCSUtils.concreteIndexRequested("logs,metrics"), equalTo(true)); - assertThat(EsqlSessionCCSUtils.concreteIndexRequested("*"), equalTo(false)); + assertThat(EsqlCCSUtils.concreteIndexRequested("logs*"), equalTo(false)); + assertThat(EsqlCCSUtils.concreteIndexRequested("mylogs1,mylogs2,logs*"), equalTo(true)); + assertThat(EsqlCCSUtils.concreteIndexRequested("x*,logs"), equalTo(true)); + assertThat(EsqlCCSUtils.concreteIndexRequested("logs,metrics"), equalTo(true)); + assertThat(EsqlCCSUtils.concreteIndexRequested("*"), equalTo(false)); } public void testCheckForCcsLicense() { @@ -758,6 +768,64 @@ public void testCheckForCcsLicense() { } } + public void testShouldIgnoreRuntimeError() { + Predicate skipUnPredicate = s -> s.equals(REMOTE1_ALIAS); + + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(skipUnPredicate, true); + executionInfo.swapCluster(LOCAL_CLUSTER_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(LOCAL_CLUSTER_ALIAS, "logs*", false)); + executionInfo.swapCluster(REMOTE1_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE1_ALIAS, "*", true)); + executionInfo.swapCluster(REMOTE2_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE2_ALIAS, "mylogs1,mylogs2,logs*", false)); + + // remote1: skip_unavailable=true, so should ignore connect errors, but not others + assertThat( + shouldIgnoreRuntimeError(executionInfo, REMOTE1_ALIAS, new IllegalStateException("Unable to open any connections")), + is(true) + ); + assertThat(shouldIgnoreRuntimeError(executionInfo, REMOTE1_ALIAS, new TaskCancelledException("task cancelled")), is(false)); + assertThat(shouldIgnoreRuntimeError(executionInfo, REMOTE1_ALIAS, new ElasticsearchException("something is wrong")), is(false)); + // remote2: skip_unavailable=false, so should not ignore any errors + assertThat( + shouldIgnoreRuntimeError(executionInfo, REMOTE2_ALIAS, new IllegalStateException("Unable to open any connections")), + is(false) + ); + assertThat(shouldIgnoreRuntimeError(executionInfo, REMOTE2_ALIAS, new TaskCancelledException("task cancelled")), is(false)); + // same for local + assertThat( + shouldIgnoreRuntimeError(executionInfo, LOCAL_CLUSTER_ALIAS, new IllegalStateException("Unable to open any connections")), + is(false) + ); + assertThat(shouldIgnoreRuntimeError(executionInfo, LOCAL_CLUSTER_ALIAS, new TaskCancelledException("task cancelled")), is(false)); + } + + public void testSkipUnavailableListener() { + Predicate skipUnPredicate = s -> s.equals(REMOTE1_ALIAS); + + EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(skipUnPredicate, true); + executionInfo.swapCluster(LOCAL_CLUSTER_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(LOCAL_CLUSTER_ALIAS, "logs*", false)); + executionInfo.swapCluster(REMOTE1_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE1_ALIAS, "*", true)); + executionInfo.swapCluster(REMOTE2_ALIAS, (k, v) -> new EsqlExecutionInfo.Cluster(REMOTE2_ALIAS, "mylogs1,mylogs2,logs*", false)); + + ActionListener expectResult = ActionListener.wrap(unused -> {}, (e) -> fail("Listener should not have failed")); + ActionListener expectFailure = ActionListener.wrap(unused -> fail("Listener should have failed"), (e) -> {}); + + // snip_unavailable=true but not connect exception, so should fail + skipUnavailableListener(expectFailure, executionInfo, REMOTE1_ALIAS, EsqlExecutionInfo.Cluster.Status.PARTIAL).onFailure( + new ElasticsearchException("something is wrong") + ); + assertThat(executionInfo.getCluster(REMOTE1_ALIAS).getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)); + + // snip_unavailable=true, so should not fail + skipUnavailableListener(expectResult, executionInfo, REMOTE1_ALIAS, EsqlExecutionInfo.Cluster.Status.PARTIAL).onFailure( + new IllegalStateException("Unable to open any connections") + ); + assertThat(executionInfo.getCluster(REMOTE1_ALIAS).getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.PARTIAL)); + // snip_unavailable=false, so should fail + skipUnavailableListener(expectFailure, executionInfo, REMOTE2_ALIAS, EsqlExecutionInfo.Cluster.Status.PARTIAL).onFailure( + new IllegalStateException("Unable to open any connections") + ); + + } + private XPackLicenseStatus activeLicenseStatus(License.OperationMode operationMode) { return new XPackLicenseStatus(operationMode, true, null); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java similarity index 98% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java index a3c5cd9168b4f..aa735e5cb6d86 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; @@ -125,7 +125,7 @@ public void testFailedMetric() { new EsqlExecutionInfo(randomBoolean()), groupIndicesByCluster, runPhase, - EsqlTestUtils.MOCK_QUERY_BUILDER_RESOLVER, + EsqlTestUtils.MOCK_TRANSPORT_ACTION_SERVICES, new ActionListener<>() { @Override public void onResponse(Result result) { @@ -156,7 +156,7 @@ public void onFailure(Exception e) { new EsqlExecutionInfo(randomBoolean()), groupIndicesByCluster, runPhase, - EsqlTestUtils.MOCK_QUERY_BUILDER_RESOLVER, + EsqlTestUtils.MOCK_TRANSPORT_ACTION_SERVICES, new ActionListener<>() { @Override public void onResponse(Result result) {} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/VerifierMetricsTests.java similarity index 93% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/VerifierMetricsTests.java index eda906b147956..de377fe78588c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/VerifierMetricsTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; @@ -22,23 +22,23 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyzer; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.DISSECT; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.DROP; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.ENRICH; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.EVAL; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.FROM; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.GROK; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.KEEP; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.MV_EXPAND; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.RENAME; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.ROW; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.SHOW; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.SORT; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.STATS; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.WHERE; -import static org.elasticsearch.xpack.esql.stats.Metrics.FPREFIX; -import static org.elasticsearch.xpack.esql.stats.Metrics.FUNC_PREFIX; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.DISSECT; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.DROP; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.ENRICH; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.EVAL; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.FROM; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.GROK; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.KEEP; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.LIMIT; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.MV_EXPAND; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.RENAME; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.ROW; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.SHOW; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.SORT; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.STATS; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.WHERE; +import static org.elasticsearch.xpack.esql.telemetry.Metrics.FPREFIX; +import static org.elasticsearch.xpack.esql.telemetry.Metrics.FUNC_PREFIX; public class VerifierMetricsTests extends ESTestCase { diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java index 5e534295c06c5..860a4a1915ea1 100644 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java +++ b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java @@ -8,30 +8,19 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.index.engine.frozen.FrozenEngine; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; -import org.elasticsearch.xpack.core.frozen.action.FreezeIndexAction; -import org.elasticsearch.xpack.frozen.action.TransportFreezeIndexAction; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; public class FrozenIndices extends Plugin implements ActionPlugin { - @Override - public List> getSettings() { - return Arrays.asList(FrozenEngine.INDEX_FROZEN); - } - @Override public List> getActions() { List> actions = new ArrayList<>(); actions.add(new ActionHandler<>(XPackUsageFeatureAction.FROZEN_INDICES, FrozenIndicesUsageTransportAction.class)); - actions.add(new ActionHandler<>(FreezeIndexAction.INSTANCE, TransportFreezeIndexAction.class)); return actions; } } diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java deleted file mode 100644 index 248902b4b7a9e..0000000000000 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java +++ /dev/null @@ -1,233 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.frozen.action; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.close.CloseIndexClusterStateUpdateRequest; -import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; -import org.elasticsearch.action.admin.indices.open.OpenIndexClusterStateUpdateRequest; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.ActiveShardCount; -import org.elasticsearch.action.support.DestructiveOperations; -import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.AckedClusterStateUpdateTask; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateUpdateTask; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.block.ClusterBlocks; -import org.elasticsearch.cluster.metadata.IndexAbstraction; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.metadata.MetadataIndexStateService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Priority; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.engine.frozen.FrozenEngine; -import org.elasticsearch.injection.guice.Inject; -import org.elasticsearch.protocol.xpack.frozen.FreezeRequest; -import org.elasticsearch.protocol.xpack.frozen.FreezeResponse; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.frozen.action.FreezeIndexAction; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.SortedMap; - -public final class TransportFreezeIndexAction extends TransportMasterNodeAction { - - private static final Logger logger = LogManager.getLogger(TransportFreezeIndexAction.class); - - private final DestructiveOperations destructiveOperations; - private final IndexNameExpressionResolver indexNameExpressionResolver; - private final MetadataIndexStateService indexStateService; - - @Inject - public TransportFreezeIndexAction( - MetadataIndexStateService indexStateService, - TransportService transportService, - ClusterService clusterService, - ThreadPool threadPool, - ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - DestructiveOperations destructiveOperations - ) { - super( - FreezeIndexAction.NAME, - transportService, - clusterService, - threadPool, - actionFilters, - FreezeRequest::new, - FreezeResponse::new, - EsExecutors.DIRECT_EXECUTOR_SERVICE - ); - this.indexStateService = indexStateService; - this.indexNameExpressionResolver = indexNameExpressionResolver; - this.destructiveOperations = destructiveOperations; - } - - @Override - protected void doExecute(Task task, FreezeRequest request, ActionListener listener) { - destructiveOperations.failDestructive(request.indices()); - super.doExecute(task, request, listener); - } - - private Index[] resolveIndices(FreezeRequest request, ClusterState state) { - List indices = new ArrayList<>(); - for (Index index : indexNameExpressionResolver.concreteIndices(state, request)) { - IndexMetadata metadata = state.metadata().index(index); - Settings settings = metadata.getSettings(); - // only unfreeze if we are frozen and only freeze if we are not frozen already. - // this prevents all indices that are already frozen that match a pattern to - // go through the cycles again. - if ((request.freeze() && FrozenEngine.INDEX_FROZEN.get(settings) == false) - || (request.freeze() == false && FrozenEngine.INDEX_FROZEN.get(settings))) { - indices.add(index); - } - } - if (indices.isEmpty() && request.indicesOptions().allowNoIndices() == false) { - throw new ResourceNotFoundException("no index found to " + (request.freeze() ? "freeze" : "unfreeze")); - } - return indices.toArray(Index.EMPTY_ARRAY); - } - - @Override - protected void masterOperation(Task task, FreezeRequest request, ClusterState state, ActionListener listener) { - final Index[] concreteIndices = resolveIndices(request, state); - if (concreteIndices.length == 0) { - listener.onResponse(new FreezeResponse(true, true)); - return; - } - - final CloseIndexClusterStateUpdateRequest closeRequest = new CloseIndexClusterStateUpdateRequest( - request.masterNodeTimeout(), - request.ackTimeout(), - task.getId(), - ActiveShardCount.DEFAULT, - concreteIndices - ); - - indexStateService.closeIndices(closeRequest, new ActionListener<>() { - @Override - public void onResponse(final CloseIndexResponse response) { - if (response.isAcknowledged()) { - toggleFrozenSettings(concreteIndices, request, listener); - } else { - // TODO improve FreezeResponse so that it also reports failures from the close index API - listener.onResponse(new FreezeResponse(false, false)); - } - } - - @Override - public void onFailure(final Exception t) { - logger.debug(() -> "failed to close indices [" + Arrays.toString(concreteIndices) + "]", t); - listener.onFailure(t); - } - }); - } - - private void toggleFrozenSettings( - final Index[] concreteIndices, - final FreezeRequest request, - final ActionListener listener - ) { - submitUnbatchedTask( - "toggle-frozen-settings", - new AckedClusterStateUpdateTask(Priority.URGENT, request, listener.delegateFailure((delegate, acknowledgedResponse) -> { - OpenIndexClusterStateUpdateRequest updateRequest = new OpenIndexClusterStateUpdateRequest( - request.masterNodeTimeout(), - request.ackTimeout(), - request.waitForActiveShards(), - concreteIndices - ); - indexStateService.openIndices( - updateRequest, - delegate.safeMap( - openIndexClusterStateUpdateResponse -> new FreezeResponse( - openIndexClusterStateUpdateResponse.isAcknowledged(), - openIndexClusterStateUpdateResponse.isShardsAcknowledged() - ) - ) - ); - })) { - @Override - public ClusterState execute(ClusterState currentState) { - List writeIndices = new ArrayList<>(); - SortedMap lookup = currentState.metadata().getIndicesLookup(); - for (Index index : concreteIndices) { - IndexAbstraction ia = lookup.get(index.getName()); - if (ia != null && ia.getParentDataStream() != null && ia.getParentDataStream().getWriteIndex().equals(index)) { - writeIndices.add(index.getName()); - } - } - if (writeIndices.size() > 0) { - throw new IllegalArgumentException( - "cannot freeze the following data stream write indices [" - + Strings.collectionToCommaDelimitedString(writeIndices) - + "]" - ); - } - - final Metadata.Builder builder = Metadata.builder(currentState.metadata()); - ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - for (Index index : concreteIndices) { - final IndexMetadata indexMetadata = currentState.metadata().getIndexSafe(index); - if (indexMetadata.getState() != IndexMetadata.State.CLOSE) { - throw new IllegalStateException("index [" + index.getName() + "] is not closed"); - } - final Settings.Builder settingsBuilder = Settings.builder().put(indexMetadata.getSettings()); - if (request.freeze()) { - settingsBuilder.put(FrozenEngine.INDEX_FROZEN.getKey(), true); - settingsBuilder.put(IndexSettings.INDEX_SEARCH_THROTTLED.getKey(), true); - settingsBuilder.put("index.blocks.write", true); - blocks.addIndexBlock(index.getName(), IndexMetadata.INDEX_WRITE_BLOCK); - } else { - settingsBuilder.remove(FrozenEngine.INDEX_FROZEN.getKey()); - settingsBuilder.remove(IndexSettings.INDEX_SEARCH_THROTTLED.getKey()); - if (indexMetadata.isSearchableSnapshot() == false) { - settingsBuilder.remove("index.blocks.write"); - blocks.removeIndexBlock(index.getName(), IndexMetadata.INDEX_WRITE_BLOCK); - } - } - builder.put( - IndexMetadata.builder(indexMetadata) - .settingsVersion(indexMetadata.getSettingsVersion() + 1) - .settings(settingsBuilder) - .build(), - true - ); - } - return ClusterState.builder(currentState).blocks(blocks).metadata(builder).build(); - } - } - ); - } - - @Override - protected ClusterBlockException checkBlock(FreezeRequest request, ClusterState state) { - return state.blocks() - .indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, indexNameExpressionResolver.concreteIndexNames(state, request)); - } - - @SuppressForbidden(reason = "legacy usage of unbatched task") // TODO add support for batching here - private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String source, ClusterStateUpdateTask task) { - clusterService.submitUnbatchedStateUpdateTask(source, task); - } -} diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java index 78870cbb9530b..fe9bdef9bebb9 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java @@ -18,8 +18,6 @@ import org.elasticsearch.protocol.xpack.graph.VertexRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.Scope; -import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -38,7 +36,6 @@ /** * @see GraphExploreRequest */ -@ServerlessScope(Scope.PUBLIC) public class RestGraphAction extends BaseRestHandler { public static final ParseField TIMEOUT_FIELD = new ParseField("timeout"); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/BaseMockEISAuthServerTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/BaseMockEISAuthServerTest.java index 230b7ff576296..d0f797e9f8fab 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/BaseMockEISAuthServerTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/BaseMockEISAuthServerTest.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.ESRestTestCase; import org.junit.ClassRule; @@ -40,12 +39,9 @@ public class BaseMockEISAuthServerTest extends ESRestTestCase { .setting("xpack.security.enabled", "true") // Adding both settings unless one feature flag is disabled in a particular environment .setting("xpack.inference.elastic.url", mockEISServer::getUrl) - // TODO remove this once we've removed DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG and EIS_GATEWAY_URL - .setting("xpack.inference.eis.gateway.url", mockEISServer::getUrl) // This plugin is located in the inference/qa/test-service-plugin package, look for TestInferenceServicePlugin .plugin("inference-service-test") .user("x_pack_rest_user", "x-pack-test-password") - .feature(FeatureFlag.INFERENCE_UNIFIED_API_ENABLED) .build(); // The reason we're doing this is to make sure the mock server is initialized first so we can get the address before communicating diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java index 0a2200ff912ac..47f34fa486daf 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java @@ -40,9 +40,35 @@ public void testAttachToDeployment() throws IOException { is(Map.of("num_allocations", 1, "num_threads", 1, "model_id", "attach_to_deployment", "deployment_id", "existing_deployment")) ); + var getModel = getModel(inferenceId); + serviceSettings = getModel.get("service_settings"); + assertThat( + getModel.toString(), + serviceSettings, + is(Map.of("num_allocations", 1, "num_threads", 1, "model_id", "attach_to_deployment", "deployment_id", "existing_deployment")) + ); + var results = infer(inferenceId, List.of("washing machine")); assertNotNull(results.get("sparse_embedding")); + var updatedNumAllocations = randomIntBetween(1, 10); + var updatedEndpointConfig = updateEndpoint(inferenceId, updatedEndpointConfig(updatedNumAllocations), TaskType.SPARSE_EMBEDDING); + assertThat( + updatedEndpointConfig.get("service_settings"), + is( + Map.of( + "num_allocations", + updatedNumAllocations, + "num_threads", + 1, + "model_id", + "attach_to_deployment", + "deployment_id", + "existing_deployment" + ) + ) + ); + deleteModel(inferenceId); // assert deployment not stopped var stats = (List>) getTrainedModelStats(modelId).get("trained_model_stats"); @@ -80,9 +106,46 @@ public void testAttachWithModelId() throws IOException { ) ); + var getModel = getModel(inferenceId); + serviceSettings = getModel.get("service_settings"); + assertThat( + getModel.toString(), + serviceSettings, + is( + Map.of( + "num_allocations", + 1, + "num_threads", + 1, + "model_id", + "attach_with_model_id", + "deployment_id", + "existing_deployment_with_model_id" + ) + ) + ); + var results = infer(inferenceId, List.of("washing machine")); assertNotNull(results.get("sparse_embedding")); + var updatedNumAllocations = randomIntBetween(1, 10); + var updatedEndpointConfig = updateEndpoint(inferenceId, updatedEndpointConfig(updatedNumAllocations), TaskType.SPARSE_EMBEDDING); + assertThat( + updatedEndpointConfig.get("service_settings"), + is( + Map.of( + "num_allocations", + updatedNumAllocations, + "num_threads", + 1, + "model_id", + "attach_with_model_id", + "deployment_id", + "existing_deployment_with_model_id" + ) + ) + ); + stopMlNodeDeployment(deploymentId); } @@ -189,6 +252,16 @@ private String endpointConfig(String modelId, String deploymentId) { """, modelId, deploymentId); } + private String updatedEndpointConfig(int numAllocations) { + return Strings.format(""" + { + "service_settings": { + "num_allocations": %d + } + } + """, numAllocations); + } + private Response startMlNodeDeploymemnt(String modelId, String deploymentId) throws IOException { String endPoint = "/_ml/trained_models/" + modelId diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index 5174b5bbb8cb4..36a4b95a7ca23 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -19,7 +19,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentBuilder; @@ -50,8 +49,8 @@ public class InferenceBaseRestTest extends ESRestTestCase { .setting("xpack.security.enabled", "true") .plugin("inference-service-test") .user("x_pack_rest_user", "x-pack-test-password") - .feature(FeatureFlag.INFERENCE_UNIFIED_API_ENABLED) .build(); + @ClassRule public static MlModelServer mlModelServer = new MlModelServer(); @@ -239,6 +238,11 @@ static Map updateEndpoint(String inferenceID, String modelConfig return putRequest(endpoint, modelConfig); } + static Map updateEndpoint(String inferenceID, String modelConfig) throws IOException { + String endpoint = Strings.format("_inference/%s/_update", inferenceID); + return putRequest(endpoint, modelConfig); + } + protected Map putPipeline(String pipelineId, String modelId) throws IOException { String endpoint = Strings.format("_ingest/pipeline/%s", pipelineId); String body = """ @@ -356,7 +360,7 @@ protected Deque unifiedCompletionInferOnMockService( List input, @Nullable Consumer responseConsumerCallback ) throws Exception { - var endpoint = Strings.format("_inference/%s/%s/_unified", taskType, modelId); + var endpoint = Strings.format("_inference/%s/%s/_stream", taskType, modelId); return callAsyncUnified(endpoint, input, "user", responseConsumerCallback); } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index b786cd1298495..793b3f7a9a349 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -369,6 +369,61 @@ public void testUnifiedCompletionInference() throws Exception { } } + public void testUpdateEndpointWithWrongTaskTypeInURL() throws IOException { + putModel("sparse_embedding_model", mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + var e = expectThrows( + ResponseException.class, + () -> updateEndpoint( + "sparse_embedding_model", + updateConfig(null, randomAlphaOfLength(10), randomIntBetween(1, 10)), + TaskType.TEXT_EMBEDDING + ) + ); + assertThat(e.getMessage(), containsString("Task type must match the task type of the existing endpoint")); + } + + public void testUpdateEndpointWithWrongTaskTypeInBody() throws IOException { + putModel("sparse_embedding_model", mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + var e = expectThrows( + ResponseException.class, + () -> updateEndpoint( + "sparse_embedding_model", + updateConfig(TaskType.TEXT_EMBEDDING, randomAlphaOfLength(10), randomIntBetween(1, 10)) + ) + ); + assertThat(e.getMessage(), containsString("Task type must match the task type of the existing endpoint")); + } + + public void testUpdateEndpointWithTaskTypeInURL() throws IOException { + testUpdateEndpoint(false, true); + } + + public void testUpdateEndpointWithTaskTypeInBody() throws IOException { + testUpdateEndpoint(true, false); + } + + public void testUpdateEndpointWithTaskTypeInBodyAndURL() throws IOException { + testUpdateEndpoint(true, true); + } + + @SuppressWarnings("unchecked") + private void testUpdateEndpoint(boolean taskTypeInBody, boolean taskTypeInURL) throws IOException { + String endpointId = "sparse_embedding_model"; + putModel(endpointId, mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + + int temperature = randomIntBetween(1, 10); + var expectedConfig = updateConfig(taskTypeInBody ? TaskType.SPARSE_EMBEDDING : null, randomAlphaOfLength(1), temperature); + Map updatedEndpoint; + if (taskTypeInURL) { + updatedEndpoint = updateEndpoint(endpointId, expectedConfig, TaskType.SPARSE_EMBEDDING); + } else { + updatedEndpoint = updateEndpoint(endpointId, expectedConfig); + } + + Map updatedTaskSettings = (Map) updatedEndpoint.get("task_settings"); + assertEquals(temperature, updatedTaskSettings.get("temperature")); + } + private static Iterator expectedResultsIterator(List input) { // The Locale needs to be ROOT to match what the test service is going to respond with return Stream.concat(input.stream().map(s -> s.toUpperCase(Locale.ROOT)).map(InferenceCrudIT::expectedResult), Stream.of("[DONE]")) diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetModelsWithElasticInferenceServiceIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetModelsWithElasticInferenceServiceIT.java index 76483a5f62fec..42289c50864e6 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetModelsWithElasticInferenceServiceIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetModelsWithElasticInferenceServiceIT.java @@ -10,13 +10,15 @@ package org.elasticsearch.xpack.inference; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature; import java.io.IOException; +import java.util.List; +import java.util.Map; import static org.elasticsearch.xpack.inference.InferenceBaseRestTest.getAllModels; import static org.elasticsearch.xpack.inference.InferenceBaseRestTest.getModels; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; public class InferenceGetModelsWithElasticInferenceServiceIT extends BaseMockEISAuthServerTest { @@ -24,18 +26,20 @@ public void testGetDefaultEndpoints() throws IOException { var allModels = getAllModels(); var chatCompletionModels = getModels("_all", TaskType.CHAT_COMPLETION); - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - assertThat(allModels, hasSize(4)); - assertThat(chatCompletionModels, hasSize(1)); - - for (var model : chatCompletionModels) { - assertEquals("chat_completion", model.get("task_type")); - } - } else { - assertThat(allModels, hasSize(3)); - assertThat(chatCompletionModels, hasSize(0)); + assertThat(allModels, hasSize(5)); + assertThat(chatCompletionModels, hasSize(1)); + + for (var model : chatCompletionModels) { + assertEquals("chat_completion", model.get("task_type")); } + assertInferenceIdTaskType(allModels, ".rainbow-sprinkles-elastic", TaskType.CHAT_COMPLETION); + assertInferenceIdTaskType(allModels, ".elser-v2-elastic", TaskType.SPARSE_EMBEDDING); + } + + private static void assertInferenceIdTaskType(List> models, String inferenceId, TaskType taskType) { + var model = models.stream().filter(m -> m.get("inference_id").equals(inferenceId)).findFirst(); + assertTrue("could not find inference id: " + inferenceId, model.isPresent()); + assertThat(model.get().get("task_type"), is(taskType.toString())); } } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java index 856fdeb6287e9..9d4cec798964a 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java @@ -12,11 +12,8 @@ import org.elasticsearch.client.Request; import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import java.util.Map; @@ -28,12 +25,7 @@ public class InferenceGetServicesIT extends BaseMockEISAuthServerTest { @SuppressWarnings("unchecked") public void testGetServicesWithoutTaskType() throws IOException { List services = getAllServices(); - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - assertThat(services.size(), equalTo(19)); - } else { - assertThat(services.size(), equalTo(18)); - } + assertThat(services.size(), equalTo(19)); String[] providers = new String[services.size()]; for (int i = 0; i < services.size(); i++) { @@ -41,14 +33,15 @@ public void testGetServicesWithoutTaskType() throws IOException { providers[i] = (String) serviceConfig.get("service"); } - var providerList = new ArrayList<>( - Arrays.asList( + assertArrayEquals( + List.of( "alibabacloud-ai-search", "amazonbedrock", "anthropic", "azureaistudio", "azureopenai", "cohere", + "elastic", "elasticsearch", "googleaistudio", "googlevertexai", @@ -61,13 +54,9 @@ public void testGetServicesWithoutTaskType() throws IOException { "test_service", "text_embedding_test_service", "watsonxai" - ) + ).toArray(), + providers ); - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - providerList.add(6, "elastic"); - } - assertArrayEquals(providerList.toArray(), providers); } @SuppressWarnings("unchecked") @@ -130,7 +119,7 @@ public void testGetServicesWithCompletionTaskType() throws IOException { providers[i] = (String) serviceConfig.get("service"); } - var providerList = new ArrayList<>( + assertArrayEquals( List.of( "alibabacloud-ai-search", "amazonbedrock", @@ -141,21 +130,15 @@ public void testGetServicesWithCompletionTaskType() throws IOException { "googleaistudio", "openai", "streaming_completion_test_service" - ) + ).toArray(), + providers ); - - assertArrayEquals(providers, providerList.toArray()); } @SuppressWarnings("unchecked") public void testGetServicesWithChatCompletionTaskType() throws IOException { List services = getServices(TaskType.CHAT_COMPLETION); - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - assertThat(services.size(), equalTo(3)); - } else { - assertThat(services.size(), equalTo(2)); - } + assertThat(services.size(), equalTo(3)); String[] providers = new String[services.size()]; for (int i = 0; i < services.size(); i++) { @@ -163,26 +146,13 @@ public void testGetServicesWithChatCompletionTaskType() throws IOException { providers[i] = (String) serviceConfig.get("service"); } - var providerList = new ArrayList<>(List.of("openai", "streaming_completion_test_service")); - - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - providerList.addFirst("elastic"); - } - - assertArrayEquals(providers, providerList.toArray()); + assertArrayEquals(List.of("elastic", "openai", "streaming_completion_test_service").toArray(), providers); } @SuppressWarnings("unchecked") public void testGetServicesWithSparseEmbeddingTaskType() throws IOException { List services = getServices(TaskType.SPARSE_EMBEDDING); - - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - assertThat(services.size(), equalTo(5)); - } else { - assertThat(services.size(), equalTo(4)); - } + assertThat(services.size(), equalTo(5)); String[] providers = new String[services.size()]; for (int i = 0; i < services.size(); i++) { @@ -190,12 +160,10 @@ public void testGetServicesWithSparseEmbeddingTaskType() throws IOException { providers[i] = (String) serviceConfig.get("service"); } - var providerList = new ArrayList<>(Arrays.asList("alibabacloud-ai-search", "elasticsearch", "hugging_face", "test_service")); - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - providerList.add(1, "elastic"); - } - assertArrayEquals(providers, providerList.toArray()); + assertArrayEquals( + List.of("alibabacloud-ai-search", "elastic", "elasticsearch", "hugging_face", "test_service").toArray(), + providers + ); } private List getAllServices() throws IOException { diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 9da6b52555498..303f957c7ab20 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -20,8 +20,9 @@ import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.plugins.Plugin; @@ -49,14 +50,21 @@ public class ShardBulkInferenceActionFilterIT extends ESIntegTestCase { public static final String INDEX_NAME = "test-index"; private final boolean useLegacyFormat; + private final boolean useSyntheticSource; - public ShardBulkInferenceActionFilterIT(boolean useLegacyFormat) { + public ShardBulkInferenceActionFilterIT(boolean useLegacyFormat, boolean useSyntheticSource) { this.useLegacyFormat = useLegacyFormat; + this.useSyntheticSource = useSyntheticSource; } @ParametersFactory public static Iterable parameters() throws Exception { - return List.of(new Object[] { true }, new Object[] { false }); + return List.of( + new Object[] { true, false }, + new Object[] { true, true }, + new Object[] { false, false }, + new Object[] { false, true } + ); } @Before @@ -79,37 +87,38 @@ protected Collection> nodePlugins() { @Override public Settings indexSettings() { - return Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + var builder = Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 10)) - .put(InferenceMetadataFieldsMapper.USE_LEGACY_SEMANTIC_TEXT_FORMAT.getKey(), useLegacyFormat) - .build(); + .put(InferenceMetadataFieldsMapper.USE_LEGACY_SEMANTIC_TEXT_FORMAT.getKey(), useLegacyFormat); + if (useSyntheticSource) { + builder.put(IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey(), true); + builder.put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC.name()); + } + return builder.build(); } public void testBulkOperations() throws Exception { - indicesAdmin().prepareCreate(INDEX_NAME) - .setMapping( - String.format( - Locale.ROOT, - """ - { - "properties": { - "sparse_field": { - "type": "semantic_text", - "inference_id": "%s" - }, - "dense_field": { - "type": "semantic_text", - "inference_id": "%s" - } + prepareCreate(INDEX_NAME).setMapping( + String.format( + Locale.ROOT, + """ + { + "properties": { + "sparse_field": { + "type": "semantic_text", + "inference_id": "%s" + }, + "dense_field": { + "type": "semantic_text", + "inference_id": "%s" } } - """, - TestSparseInferenceServiceExtension.TestInferenceService.NAME, - TestDenseInferenceServiceExtension.TestInferenceService.NAME - ) + } + """, + TestSparseInferenceServiceExtension.TestInferenceService.NAME, + TestDenseInferenceServiceExtension.TestInferenceService.NAME ) - .get(); + ).get(); int totalBulkReqs = randomIntBetween(2, 100); long totalDocs = 0; diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/InferenceRevokeDefaultEndpointsIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/InferenceRevokeDefaultEndpointsIT.java new file mode 100644 index 0000000000000..5205ce07a0676 --- /dev/null +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/InferenceRevokeDefaultEndpointsIT.java @@ -0,0 +1,279 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.integration; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.MinimalServiceSettings; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnparsedModel; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceComponents; +import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorizationHandler; +import org.junit.After; +import org.junit.Before; + +import java.util.Collection; +import java.util.EnumSet; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.hamcrest.CoreMatchers.is; +import static org.mockito.Mockito.mock; + +public class InferenceRevokeDefaultEndpointsIT extends ESSingleNodeTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + + private ModelRegistry modelRegistry; + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private String gatewayUrl; + + @Before + public void createComponents() throws Exception { + threadPool = createThreadPool(inferenceUtilityPool()); + webServer.start(); + gatewayUrl = getUrl(webServer); + modelRegistry = new ModelRegistry(client()); + } + + @After + public void shutdown() { + terminate(threadPool); + webServer.close(); + } + + @Override + protected boolean resetNodeAfterTest() { + return true; + } + + @Override + protected Collection> getPlugins() { + return pluginList(ReindexPlugin.class); + } + + public void testDefaultConfigs_Returns_DefaultChatCompletion_V1_WhenTaskTypeIsCorrect() throws Exception { + String responseJson = """ + { + "models": [ + { + "model_name": "rainbow-sprinkles", + "task_types": ["chat"] + } + ] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + try (var service = createElasticInferenceService()) { + service.waitForAuthorizationToComplete(TIMEOUT); + assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.CHAT_COMPLETION))); + assertThat( + service.defaultConfigIds(), + is( + List.of( + new InferenceService.DefaultConfigId(".rainbow-sprinkles-elastic", MinimalServiceSettings.chatCompletion(), service) + ) + ) + ); + assertThat(service.supportedTaskTypes(), is(EnumSet.of(TaskType.CHAT_COMPLETION))); + + PlainActionFuture> listener = new PlainActionFuture<>(); + service.defaultConfigs(listener); + assertThat(listener.actionGet(TIMEOUT).get(0).getConfigurations().getInferenceEntityId(), is(".rainbow-sprinkles-elastic")); + } + } + + public void testRemoves_DefaultChatCompletion_V1_WhenAuthorizationReturnsEmpty() throws Exception { + { + String responseJson = """ + { + "models": [ + { + "model_name": "rainbow-sprinkles", + "task_types": ["chat"] + } + ] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + try (var service = createElasticInferenceService()) { + service.waitForAuthorizationToComplete(TIMEOUT); + assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.CHAT_COMPLETION))); + assertThat( + service.defaultConfigIds(), + is( + List.of( + new InferenceService.DefaultConfigId( + ".rainbow-sprinkles-elastic", + MinimalServiceSettings.chatCompletion(), + service + ) + ) + ) + ); + assertThat(service.supportedTaskTypes(), is(EnumSet.of(TaskType.CHAT_COMPLETION))); + + PlainActionFuture> listener = new PlainActionFuture<>(); + service.defaultConfigs(listener); + assertThat(listener.actionGet(TIMEOUT).get(0).getConfigurations().getInferenceEntityId(), is(".rainbow-sprinkles-elastic")); + + var getModelListener = new PlainActionFuture(); + // persists the default endpoints + modelRegistry.getModel(".rainbow-sprinkles-elastic", getModelListener); + + var inferenceEntity = getModelListener.actionGet(TIMEOUT); + assertThat(inferenceEntity.inferenceEntityId(), is(".rainbow-sprinkles-elastic")); + assertThat(inferenceEntity.taskType(), is(TaskType.CHAT_COMPLETION)); + } + } + { + String noAuthorizationResponseJson = """ + { + "models": [] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(noAuthorizationResponseJson)); + + try (var service = createElasticInferenceService()) { + service.waitForAuthorizationToComplete(TIMEOUT); + assertThat(service.supportedStreamingTasks(), is(EnumSet.noneOf(TaskType.class))); + assertTrue(service.defaultConfigIds().isEmpty()); + assertThat(service.supportedTaskTypes(), is(EnumSet.noneOf(TaskType.class))); + + var getModelListener = new PlainActionFuture(); + modelRegistry.getModel(".rainbow-sprinkles-elastic", getModelListener); + + var exception = expectThrows(ResourceNotFoundException.class, () -> getModelListener.actionGet(TIMEOUT)); + assertThat(exception.getMessage(), is("Inference endpoint not found [.rainbow-sprinkles-elastic]")); + } + } + } + + public void testRemoves_DefaultChatCompletion_V1_WhenAuthorizationDoesNotReturnAuthForIt() throws Exception { + { + String responseJson = """ + { + "models": [ + { + "model_name": "rainbow-sprinkles", + "task_types": ["chat"] + }, + { + "model_name": "elser-v2", + "task_types": ["embed/text/sparse"] + } + ] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + try (var service = createElasticInferenceService()) { + service.waitForAuthorizationToComplete(TIMEOUT); + assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.CHAT_COMPLETION))); + assertThat( + service.defaultConfigIds(), + is( + List.of( + new InferenceService.DefaultConfigId(".elser-v2-elastic", MinimalServiceSettings.sparseEmbedding(), service), + new InferenceService.DefaultConfigId( + ".rainbow-sprinkles-elastic", + MinimalServiceSettings.chatCompletion(), + service + ) + ) + ) + ); + assertThat(service.supportedTaskTypes(), is(EnumSet.of(TaskType.CHAT_COMPLETION, TaskType.SPARSE_EMBEDDING))); + + PlainActionFuture> listener = new PlainActionFuture<>(); + service.defaultConfigs(listener); + assertThat(listener.actionGet(TIMEOUT).get(0).getConfigurations().getInferenceEntityId(), is(".elser-v2-elastic")); + assertThat(listener.actionGet(TIMEOUT).get(1).getConfigurations().getInferenceEntityId(), is(".rainbow-sprinkles-elastic")); + + var getModelListener = new PlainActionFuture(); + // persists the default endpoints + modelRegistry.getModel(".rainbow-sprinkles-elastic", getModelListener); + + var inferenceEntity = getModelListener.actionGet(TIMEOUT); + assertThat(inferenceEntity.inferenceEntityId(), is(".rainbow-sprinkles-elastic")); + assertThat(inferenceEntity.taskType(), is(TaskType.CHAT_COMPLETION)); + } + } + { + String noAuthorizationResponseJson = """ + { + "models": [ + { + "model_name": "elser-v2", + "task_types": ["embed/text/sparse"] + } + ] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(noAuthorizationResponseJson)); + + try (var service = createElasticInferenceService()) { + service.waitForAuthorizationToComplete(TIMEOUT); + assertThat(service.supportedStreamingTasks(), is(EnumSet.noneOf(TaskType.class))); + assertThat( + service.defaultConfigIds(), + is( + List.of( + new InferenceService.DefaultConfigId(".elser-v2-elastic", MinimalServiceSettings.sparseEmbedding(), service) + ) + ) + ); + assertThat(service.supportedTaskTypes(), is(EnumSet.of(TaskType.SPARSE_EMBEDDING))); + + var getModelListener = new PlainActionFuture(); + modelRegistry.getModel(".rainbow-sprinkles-elastic", getModelListener); + var exception = expectThrows(ResourceNotFoundException.class, () -> getModelListener.actionGet(TIMEOUT)); + assertThat(exception.getMessage(), is("Inference endpoint not found [.rainbow-sprinkles-elastic]")); + } + } + } + + private ElasticInferenceService createElasticInferenceService() { + var httpManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, httpManager); + + return new ElasticInferenceService( + senderFactory, + createWithEmptySettings(threadPool), + new ElasticInferenceServiceComponents(gatewayUrl), + modelRegistry, + new ElasticInferenceServiceAuthorizationHandler(gatewayUrl, threadPool) + ); + } +} diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index 51ee42cf2f7f2..4fad6977ab852 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -10,10 +10,12 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.InferenceService; @@ -51,7 +53,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.function.Function; @@ -70,6 +74,7 @@ import static org.mockito.Mockito.mock; public class ModelRegistryIT extends ESSingleNodeTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); private ModelRegistry modelRegistry; @@ -195,6 +200,56 @@ public void testDeleteModel() throws Exception { assertThat(exceptionHolder.get().getMessage(), containsString("Inference endpoint not found [model1]")); } + public void testNonExistentDeleteModel_DoesNotThrowAnException() { + var listener = new PlainActionFuture(); + + modelRegistry.deleteModel("non-existent-model", listener); + assertTrue(listener.actionGet(TIMEOUT)); + } + + public void testRemoveDefaultConfigs_DoesNotThrowAnException_WhenSearchingForNonExistentInferenceEndpointIds() { + var listener = new PlainActionFuture(); + + modelRegistry.deleteModels(Set.of("non-existent-model", "abc"), listener); + assertTrue(listener.actionGet(TIMEOUT)); + } + + public void testRemoveDefaultConfigs_RemovesModelsFromPersistentStorage_AndInMemoryCache() { + var service = mock(InferenceService.class); + + var defaultConfigs = new ArrayList(); + var defaultIds = new ArrayList(); + for (var id : new String[] { "model1", "model2", "model3" }) { + var modelSettings = ModelRegistryTests.randomMinimalServiceSettings(); + defaultConfigs.add(createModel(id, modelSettings.taskType(), "name")); + defaultIds.add(new InferenceService.DefaultConfigId(id, modelSettings, service)); + } + + doAnswer(invocation -> { + ActionListener> listener = invocation.getArgument(0); + listener.onResponse(defaultConfigs); + return Void.TYPE; + }).when(service).defaultConfigs(any()); + + defaultIds.forEach(modelRegistry::addDefaultIds); + + var getModelsListener = new PlainActionFuture>(); + modelRegistry.getAllModels(true, getModelsListener); + var unparsedModels = getModelsListener.actionGet(TIMEOUT); + assertThat(unparsedModels.size(), is(3)); + + var removeModelsListener = new PlainActionFuture(); + + modelRegistry.removeDefaultConfigs(Set.of("model1", "model2", "model3"), removeModelsListener); + assertTrue(removeModelsListener.actionGet(TIMEOUT)); + + var getModelsAfterDeleteListener = new PlainActionFuture>(); + // the models should have been removed from the in memory cache, if not they they will be persisted again by this call + modelRegistry.getAllModels(true, getModelsAfterDeleteListener); + var unparsedModelsAfterDelete = getModelsAfterDeleteListener.actionGet(TIMEOUT); + assertThat(unparsedModelsAfterDelete.size(), is(0)); + } + public void testGetModelsByTaskType() throws InterruptedException { var service = "foo"; var sparseAndTextEmbeddingModels = new ArrayList(); @@ -315,8 +370,7 @@ public void testGetAllModels_WithDefaults() throws Exception { } doAnswer(invocation -> { - @SuppressWarnings("unchecked") - var listener = (ActionListener>) invocation.getArguments()[0]; + ActionListener> listener = invocation.getArgument(0); listener.onResponse(defaultConfigs); return Void.TYPE; }).when(service).defaultConfigs(any()); @@ -381,8 +435,7 @@ public void testGetAllModels_OnlyDefaults() throws Exception { } doAnswer(invocation -> { - @SuppressWarnings("unchecked") - var listener = (ActionListener>) invocation.getArguments()[0]; + ActionListener> listener = invocation.getArgument(0); listener.onResponse(defaultConfigs); return Void.TYPE; }).when(service).defaultConfigs(any()); @@ -424,8 +477,7 @@ public void testGetAllModels_withDoNotPersist() throws Exception { } doAnswer(invocation -> { - @SuppressWarnings("unchecked") - var listener = (ActionListener>) invocation.getArguments()[0]; + ActionListener> listener = invocation.getArgument(0); listener.onResponse(defaultConfigs); return Void.TYPE; }).when(service).defaultConfigs(any()); @@ -469,8 +521,7 @@ public void testGet_WithDefaults() throws InterruptedException { ); doAnswer(invocation -> { - @SuppressWarnings("unchecked") - var listener = (ActionListener>) invocation.getArguments()[0]; + ActionListener> listener = invocation.getArgument(0); listener.onResponse(defaultConfigs); return Void.TYPE; }).when(service).defaultConfigs(any()); @@ -523,8 +574,7 @@ public void testGetByTaskType_WithDefaults() throws Exception { defaultIds.add(new InferenceService.DefaultConfigId("default-chat", MinimalServiceSettings.completion(), service)); doAnswer(invocation -> { - @SuppressWarnings("unchecked") - var listener = (ActionListener>) invocation.getArguments()[0]; + ActionListener> listener = invocation.getArgument(0); listener.onResponse(List.of(defaultSparse, defaultChat, defaultText)); return Void.TYPE; }).when(service).defaultConfigs(any()); diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java index a22e179479dec..903961794b337 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java @@ -44,10 +44,12 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.XContentFormattedException; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEvent; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventField; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventParser; @@ -80,6 +82,7 @@ public class ServerSentEventsRestActionListenerTests extends ESIntegTestCase { private static final String REQUEST_COUNT = "request_count"; private static final String WITH_ERROR = "with_error"; private static final String ERROR_ROUTE = "/_inference_error"; + private static final String FORMATTED_ERROR_ROUTE = "/_formatted_inference_error"; private static final String NO_STREAM_ROUTE = "/_inference_no_stream"; private static final Exception expectedException = new IllegalStateException("hello there"); private static final String expectedExceptionAsServerSentEvent = """ @@ -88,6 +91,11 @@ public class ServerSentEventsRestActionListenerTests extends ESIntegTestCase { "type":"illegal_state_exception","reason":"hello there"},"status":500\ }"""; + private static final Exception expectedFormattedException = new XContentFormattedException( + expectedException, + RestStatus.INTERNAL_SERVER_ERROR + ); + @Override protected boolean addMockHttpTransport() { return false; @@ -145,6 +153,16 @@ public List routes() { public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) { new ServerSentEventsRestActionListener(channel, threadPool).onFailure(expectedException); } + }, new RestHandler() { + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.POST, FORMATTED_ERROR_ROUTE)); + } + + @Override + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) { + new ServerSentEventsRestActionListener(channel, threadPool).onFailure(expectedFormattedException); + } }, new RestHandler() { @Override public List routes() { @@ -424,6 +442,21 @@ public void testErrorMidStream() { assertThat(collector.stringsVerified.getLast(), equalTo(expectedExceptionAsServerSentEvent)); } + public void testFormattedError() throws IOException { + var request = new Request(RestRequest.Method.POST.name(), FORMATTED_ERROR_ROUTE); + + try { + getRestClient().performRequest(request); + fail("Expected an exception to be thrown from the error route"); + } catch (ResponseException e) { + var response = e.getResponse(); + assertThat(response.getStatusLine().getStatusCode(), is(HttpStatus.SC_INTERNAL_SERVER_ERROR)); + assertThat(EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8), equalTo(""" + \uFEFFevent: error + data:\s""" + expectedExceptionAsServerSentEvent + "\n\n")); + } + } + public void testNoStream() { var collector = new RandomStringCollector(); var expectedTestCount = randomIntBetween(2, 30); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index 4707a7824fcd1..e8fcb8dfe117d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -15,6 +15,7 @@ import java.util.Set; +import static org.elasticsearch.xpack.inference.queries.SemanticKnnVectorQueryRewriteInterceptor.SEMANTIC_KNN_FILTER_FIX; import static org.elasticsearch.xpack.inference.queries.SemanticKnnVectorQueryRewriteInterceptor.SEMANTIC_KNN_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED; import static org.elasticsearch.xpack.inference.queries.SemanticMatchQueryRewriteInterceptor.SEMANTIC_MATCH_QUERY_REWRITE_INTERCEPTION_SUPPORTED; import static org.elasticsearch.xpack.inference.queries.SemanticSparseVectorQueryRewriteInterceptor.SEMANTIC_SPARSE_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED; @@ -25,6 +26,7 @@ public class InferenceFeatures implements FeatureSpecification { private static final NodeFeature SEMANTIC_TEXT_HIGHLIGHTER = new NodeFeature("semantic_text.highlighter"); + private static final NodeFeature SEMANTIC_TEXT_HIGHLIGHTER_DEFAULT = new NodeFeature("semantic_text.highlighter.default"); @Override public Set getTestFeatures() { @@ -34,13 +36,16 @@ public Set getTestFeatures() { SemanticTextFieldMapper.SEMANTIC_TEXT_DELETE_FIX, SemanticTextFieldMapper.SEMANTIC_TEXT_ZERO_SIZE_FIX, SemanticTextFieldMapper.SEMANTIC_TEXT_ALWAYS_EMIT_INFERENCE_ID_FIX, + SemanticTextFieldMapper.SEMANTIC_TEXT_SKIP_INFERENCE_FIELDS, SEMANTIC_TEXT_HIGHLIGHTER, SEMANTIC_MATCH_QUERY_REWRITE_INTERCEPTION_SUPPORTED, SEMANTIC_SPARSE_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED, SemanticInferenceMetadataFieldsMapper.EXPLICIT_NULL_FIXES, SEMANTIC_KNN_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED, TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_ALIAS_HANDLING_FIX, - SemanticInferenceMetadataFieldsMapper.INFERENCE_METADATA_FIELDS_ENABLED_BY_DEFAULT + SemanticInferenceMetadataFieldsMapper.INFERENCE_METADATA_FIELDS_ENABLED_BY_DEFAULT, + SEMANTIC_TEXT_HIGHLIGHTER_DEFAULT, + SEMANTIC_KNN_FILTER_FIX ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index 6fc9870034018..e8dc763116707 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -75,6 +75,8 @@ import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserServiceSettings; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsServiceSettings; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankServiceSettings; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankTaskSettings; import org.elasticsearch.xpack.inference.services.jinaai.JinaAIServiceSettings; import org.elasticsearch.xpack.inference.services.jinaai.embeddings.JinaAIEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.jinaai.embeddings.JinaAIEmbeddingsTaskSettings; @@ -364,6 +366,17 @@ private static void addIbmWatsonxNamedWritables(List namedWriteables) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index b007aa8bfa1f8..e3604351c1937 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -58,6 +58,7 @@ import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; import org.elasticsearch.xpack.core.inference.action.GetInferenceServicesAction; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.InferenceActionProxy; import org.elasticsearch.xpack.core.inference.action.PutInferenceModelAction; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; import org.elasticsearch.xpack.core.inference.action.UpdateInferenceModelAction; @@ -67,11 +68,15 @@ import org.elasticsearch.xpack.inference.action.TransportGetInferenceModelAction; import org.elasticsearch.xpack.inference.action.TransportGetInferenceServicesAction; import org.elasticsearch.xpack.inference.action.TransportInferenceAction; +import org.elasticsearch.xpack.inference.action.TransportInferenceActionProxy; import org.elasticsearch.xpack.inference.action.TransportInferenceUsageAction; import org.elasticsearch.xpack.inference.action.TransportPutInferenceModelAction; import org.elasticsearch.xpack.inference.action.TransportUnifiedCompletionInferenceAction; import org.elasticsearch.xpack.inference.action.TransportUpdateInferenceModelAction; import org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilter; +import org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator; +import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; +import org.elasticsearch.xpack.inference.common.NoopNodeLocalRateLimitCalculator; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.amazonbedrock.AmazonBedrockRequestSender; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; @@ -101,7 +106,6 @@ import org.elasticsearch.xpack.inference.rest.RestInferenceAction; import org.elasticsearch.xpack.inference.rest.RestPutInferenceModelAction; import org.elasticsearch.xpack.inference.rest.RestStreamInferenceAction; -import org.elasticsearch.xpack.inference.rest.RestUnifiedCompletionInferenceAction; import org.elasticsearch.xpack.inference.rest.RestUpdateInferenceModelAction; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.alibabacloudsearch.AlibabaCloudSearchService; @@ -131,12 +135,9 @@ import java.util.Map; import java.util.function.Predicate; import java.util.function.Supplier; -import java.util.stream.Stream; import static java.util.Collections.singletonList; -import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; -import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG; -import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG; +import static org.elasticsearch.xpack.inference.common.InferenceAPIClusterAwareRateLimitingFeature.INFERENCE_API_CLUSTER_AWARE_RATE_LIMITING_FEATURE_FLAG; public class InferencePlugin extends Plugin implements @@ -193,24 +194,18 @@ public InferencePlugin(Settings settings) { @Override public List> getActions() { - var availableActions = List.of( + return List.of( new ActionHandler<>(InferenceAction.INSTANCE, TransportInferenceAction.class), - + new ActionHandler<>(InferenceActionProxy.INSTANCE, TransportInferenceActionProxy.class), new ActionHandler<>(GetInferenceModelAction.INSTANCE, TransportGetInferenceModelAction.class), new ActionHandler<>(PutInferenceModelAction.INSTANCE, TransportPutInferenceModelAction.class), new ActionHandler<>(UpdateInferenceModelAction.INSTANCE, TransportUpdateInferenceModelAction.class), new ActionHandler<>(DeleteInferenceEndpointAction.INSTANCE, TransportDeleteInferenceEndpointAction.class), new ActionHandler<>(XPackUsageFeatureAction.INFERENCE, TransportInferenceUsageAction.class), new ActionHandler<>(GetInferenceDiagnosticsAction.INSTANCE, TransportGetInferenceDiagnosticsAction.class), - new ActionHandler<>(GetInferenceServicesAction.INSTANCE, TransportGetInferenceServicesAction.class) + new ActionHandler<>(GetInferenceServicesAction.INSTANCE, TransportGetInferenceServicesAction.class), + new ActionHandler<>(UnifiedCompletionAction.INSTANCE, TransportUnifiedCompletionInferenceAction.class) ); - - List> conditionalActions = - UnifiedCompletionFeature.UNIFIED_COMPLETION_FEATURE_FLAG.isEnabled() - ? List.of(new ActionHandler<>(UnifiedCompletionAction.INSTANCE, TransportUnifiedCompletionInferenceAction.class)) - : List.of(); - - return Stream.concat(availableActions.stream(), conditionalActions.stream()).toList(); } @Override @@ -225,7 +220,7 @@ public List getRestHandlers( Supplier nodesInCluster, Predicate clusterSupportsFeature ) { - var availableRestActions = List.of( + return List.of( new RestInferenceAction(), new RestStreamInferenceAction(threadPoolSetOnce), new RestGetInferenceModelAction(), @@ -235,15 +230,11 @@ public List getRestHandlers( new RestGetInferenceDiagnosticsAction(), new RestGetInferenceServicesAction() ); - List conditionalRestActions = UnifiedCompletionFeature.UNIFIED_COMPLETION_FEATURE_FLAG.isEnabled() - ? List.of(new RestUnifiedCompletionInferenceAction(threadPoolSetOnce)) - : List.of(); - - return Stream.concat(availableRestActions.stream(), conditionalRestActions.stream()).toList(); } @Override public Collection createComponents(PluginServices services) { + var components = new ArrayList<>(); var throttlerManager = new ThrottlerManager(settings, services.threadPool(), services.clusterService()); var truncator = new Truncator(settings, services.clusterService()); serviceComponents.set(new ServiceComponents(services.threadPool(), throttlerManager, settings, truncator)); @@ -264,46 +255,44 @@ public Collection createComponents(PluginServices services) { var inferenceServices = new ArrayList<>(inferenceServiceExtensions); inferenceServices.add(this::getInferenceServiceFactories); - if (isElasticInferenceServiceEnabled()) { - // Create a separate instance of HTTPClientManager with its own SSL configuration (`xpack.inference.elastic.http.ssl.*`). - var elasticInferenceServiceHttpClientManager = HttpClientManager.create( - settings, - services.threadPool(), - services.clusterService(), - throttlerManager, - getSslService() - ); - - var elasticInferenceServiceRequestSenderFactory = new HttpRequestSender.Factory( - serviceComponents.get(), - elasticInferenceServiceHttpClientManager, - services.clusterService() - ); - elasicInferenceServiceFactory.set(elasticInferenceServiceRequestSenderFactory); - - ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); - String elasticInferenceUrl = this.getElasticInferenceServiceUrl(inferenceServiceSettings); - - var elasticInferenceServiceComponentsInstance = new ElasticInferenceServiceComponents(elasticInferenceUrl); - elasticInferenceServiceComponents.set(elasticInferenceServiceComponentsInstance); - - var authorizationHandler = new ElasticInferenceServiceAuthorizationHandler( - elasticInferenceServiceComponentsInstance.elasticInferenceServiceUrl(), - services.threadPool() - ); - - inferenceServices.add( - () -> List.of( - context -> new ElasticInferenceService( - elasicInferenceServiceFactory.get(), - serviceComponents.get(), - elasticInferenceServiceComponentsInstance, - modelRegistry, - authorizationHandler - ) + // Create a separate instance of HTTPClientManager with its own SSL configuration (`xpack.inference.elastic.http.ssl.*`). + var elasticInferenceServiceHttpClientManager = HttpClientManager.create( + settings, + services.threadPool(), + services.clusterService(), + throttlerManager, + getSslService() + ); + + var elasticInferenceServiceRequestSenderFactory = new HttpRequestSender.Factory( + serviceComponents.get(), + elasticInferenceServiceHttpClientManager, + services.clusterService() + ); + elasicInferenceServiceFactory.set(elasticInferenceServiceRequestSenderFactory); + + ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); + String elasticInferenceUrl = inferenceServiceSettings.getElasticInferenceServiceUrl(); + + var elasticInferenceServiceComponentsInstance = new ElasticInferenceServiceComponents(elasticInferenceUrl); + elasticInferenceServiceComponents.set(elasticInferenceServiceComponentsInstance); + + var authorizationHandler = new ElasticInferenceServiceAuthorizationHandler( + elasticInferenceServiceComponentsInstance.elasticInferenceServiceUrl(), + services.threadPool() + ); + + inferenceServices.add( + () -> List.of( + context -> new ElasticInferenceService( + elasicInferenceServiceFactory.get(), + serviceComponents.get(), + elasticInferenceServiceComponentsInstance, + modelRegistry, + authorizationHandler ) - ); - } + ) + ); var factoryContext = new InferenceServiceExtension.InferenceServiceFactoryContext( services.client(), @@ -314,20 +303,37 @@ public Collection createComponents(PluginServices services) { // This must be done after the HttpRequestSenderFactory is created so that the services can get the // reference correctly - var registry = new InferenceServiceRegistry(inferenceServices, factoryContext); - registry.init(services.client()); - for (var service : registry.getServices().values()) { + var serviceRegistry = new InferenceServiceRegistry(inferenceServices, factoryContext); + serviceRegistry.init(services.client()); + for (var service : serviceRegistry.getServices().values()) { service.defaultConfigIds().forEach(modelRegistry::addDefaultIds); } - inferenceServiceRegistry.set(registry); + inferenceServiceRegistry.set(serviceRegistry); - var actionFilter = new ShardBulkInferenceActionFilter(services.clusterService(), registry, modelRegistry); + var actionFilter = new ShardBulkInferenceActionFilter(services.clusterService(), serviceRegistry, modelRegistry); shardBulkInferenceActionFilter.set(actionFilter); var meterRegistry = services.telemetryProvider().getMeterRegistry(); - var stats = new PluginComponentBinding<>(InferenceStats.class, InferenceStats.create(meterRegistry)); + var inferenceStats = new PluginComponentBinding<>(InferenceStats.class, InferenceStats.create(meterRegistry)); + + components.add(serviceRegistry); + components.add(modelRegistry); + components.add(httpClientManager); + components.add(inferenceStats); + + // Only add InferenceServiceNodeLocalRateLimitCalculator (which is a ClusterStateListener) for cluster aware rate limiting, + // if the rate limiting feature flags are enabled, otherwise provide noop implementation + InferenceServiceRateLimitCalculator calculator; + if (INFERENCE_API_CLUSTER_AWARE_RATE_LIMITING_FEATURE_FLAG.isEnabled()) { + calculator = new InferenceServiceNodeLocalRateLimitCalculator(services.clusterService(), serviceRegistry); + } else { + calculator = new NoopNodeLocalRateLimitCalculator(); + } + + // Add binding for interface -> implementation + components.add(new PluginComponentBinding<>(InferenceServiceRateLimitCalculator.class, calculator)); - return List.of(modelRegistry, registry, httpClientManager, stats); + return components; } @Override @@ -429,11 +435,7 @@ public List> getSettings() { settings.addAll(Truncator.getSettingsDefinitions()); settings.addAll(RequestExecutorServiceSettings.getSettingsDefinitions()); settings.add(SKIP_VALIDATE_AND_START); - - // Register Elastic Inference Service settings definitions if the corresponding feature flag is enabled. - if (isElasticInferenceServiceEnabled()) { - settings.addAll(ElasticInferenceServiceSettings.getSettingsDefinitions()); - } + settings.addAll(ElasticInferenceServiceSettings.getSettingsDefinitions()); return settings; } @@ -505,30 +507,6 @@ public Map getHighlighters() { return Map.of(SemanticTextHighlighter.NAME, new SemanticTextHighlighter()); } - // Get Elastic Inference service URL based on feature flags to support transitioning - // to the new Elastic Inference Service URL. - private String getElasticInferenceServiceUrl(ElasticInferenceServiceSettings settings) { - String elasticInferenceUrl = null; - - if (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - elasticInferenceUrl = settings.getElasticInferenceServiceUrl(); - } else if (DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - log.warn( - "Deprecated flag {} detected for enabling {}. Please use {}.", - ELASTIC_INFERENCE_SERVICE_IDENTIFIER, - DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG, - ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG - ); - elasticInferenceUrl = settings.getEisGatewayUrl(); - } - - return elasticInferenceUrl; - } - - protected Boolean isElasticInferenceServiceEnabled() { - return (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() || DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()); - } - protected SSLService getSslService() { return XPackPlugin.getSharedSslService(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnifiedCompletionFeature.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnifiedCompletionFeature.java deleted file mode 100644 index 3e13d0c1e39de..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnifiedCompletionFeature.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference; - -import org.elasticsearch.common.util.FeatureFlag; - -/** - * Unified Completion feature flag. When the feature is complete, this flag will be removed. - * Enable feature via JVM option: `-Des.inference_unified_feature_flag_enabled=true`. - */ -public class UnifiedCompletionFeature { - public static final FeatureFlag UNIFIED_COMPLETION_FEATURE_FLAG = new FeatureFlag("inference_unified"); - - private UnifiedCompletionFeature() {} -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java index b6c7d26b36f9a..4afafc5adf0c3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java @@ -13,6 +13,10 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.ChunkedToXContent; @@ -27,24 +31,43 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.inference.action.BaseInferenceActionRequest; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; import org.elasticsearch.xpack.inference.common.DelegatingProcessor; +import org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator; +import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; import org.elasticsearch.xpack.inference.telemetry.InferenceTimer; +import java.io.IOException; +import java.util.Random; +import java.util.concurrent.Executor; +import java.util.concurrent.Flow; import java.util.function.Supplier; import java.util.stream.Collectors; +import static org.elasticsearch.ExceptionsHelper.unwrapCause; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.InferencePlugin.INFERENCE_API_FEATURE; import static org.elasticsearch.xpack.inference.telemetry.InferenceStats.modelAttributes; import static org.elasticsearch.xpack.inference.telemetry.InferenceStats.responseAttributes; +/** + * Base class for transport actions that handle inference requests. + * Works in conjunction with {@link InferenceServiceNodeLocalRateLimitCalculator} to + * route requests to specific nodes, iff they support "node-local" rate limiting, which is described in detail + * in {@link InferenceServiceNodeLocalRateLimitCalculator}. + * + * @param The specific type of inference request being handled + */ public abstract class BaseTransportInferenceAction extends HandledTransportAction< Request, InferenceAction.Response> { @@ -57,6 +80,11 @@ public abstract class BaseTransportInferenceAction requestReader + Writeable.Reader requestReader, + InferenceServiceRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, + NodeClient nodeClient, + ThreadPool threadPool ) { super(inferenceActionName, transportService, actionFilters, requestReader, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.licenseState = licenseState; @@ -75,8 +106,24 @@ public BaseTransportInferenceAction( this.serviceRegistry = serviceRegistry; this.inferenceStats = inferenceStats; this.streamingTaskManager = streamingTaskManager; + this.inferenceServiceRateLimitCalculator = inferenceServiceNodeLocalRateLimitCalculator; + this.nodeClient = nodeClient; + this.threadPool = threadPool; + this.transportService = transportService; + this.random = Randomness.get(); } + protected abstract boolean isInvalidTaskTypeForInferenceEndpoint(Request request, UnparsedModel unparsedModel); + + protected abstract ElasticsearchStatusException createInvalidTaskTypeException(Request request, UnparsedModel unparsedModel); + + protected abstract void doInference( + Model model, + Request request, + InferenceService service, + ActionListener listener + ); + @Override protected void doExecute(Task task, Request request, ActionListener listener) { if (INFERENCE_API_FEATURE.check(licenseState) == false) { @@ -87,31 +134,32 @@ protected void doExecute(Task task, Request request, ActionListener { - var service = serviceRegistry.getService(unparsedModel.service()); + var serviceName = unparsedModel.service(); + try { - validationHelper(service::isEmpty, () -> unknownServiceException(unparsedModel.service(), request.getInferenceEntityId())); - validationHelper( - () -> request.getTaskType().isAnyOrSame(unparsedModel.taskType()) == false, - () -> requestModelTaskTypeMismatchException(request.getTaskType(), unparsedModel.taskType()) - ); - validationHelper( - () -> isInvalidTaskTypeForInferenceEndpoint(request, unparsedModel), - () -> createInvalidTaskTypeException(request, unparsedModel) - ); + validateRequest(request, unparsedModel); } catch (Exception e) { recordMetrics(unparsedModel, timer, e); listener.onFailure(e); return; } - var model = service.get() - .parsePersistedConfigWithSecrets( + var service = serviceRegistry.getService(serviceName).get(); + var routingDecision = determineRouting(serviceName, request, unparsedModel); + + if (routingDecision.currentNodeShouldHandleRequest()) { + var model = service.parsePersistedConfigWithSecrets( unparsedModel.inferenceEntityId(), unparsedModel.taskType(), unparsedModel.settings(), unparsedModel.secrets() ); - inferOnServiceWithMetrics(model, request, service.get(), timer, listener); + inferOnServiceWithMetrics(model, request, service, timer, listener); + } else { + // Reroute request + request.setHasBeenRerouted(true); + rerouteRequest(request, listener, routingDecision.targetNode); + } }, e -> { try { inferenceStats.inferenceDuration().record(timer.elapsedMillis(), responseAttributes(e)); @@ -124,15 +172,91 @@ protected void doExecute(Task task, Request request, ActionListener unknownServiceException(serviceName, request.getInferenceEntityId())); + validationHelper( + () -> request.getTaskType().isAnyOrSame(unparsedModel.taskType()) == false, + () -> requestModelTaskTypeMismatchException(requestTaskType, unparsedModel.taskType()) + ); + validationHelper( + () -> isInvalidTaskTypeForInferenceEndpoint(request, unparsedModel), + () -> createInvalidTaskTypeException(request, unparsedModel) + ); + } + + private NodeRoutingDecision determineRouting(String serviceName, Request request, UnparsedModel unparsedModel) { + var modelTaskType = unparsedModel.taskType(); + + // Rerouting not supported or request was already rerouted + if (inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceName, modelTaskType) == false + || request.hasBeenRerouted()) { + return NodeRoutingDecision.handleLocally(); + } + + var rateLimitAssignment = inferenceServiceRateLimitCalculator.getRateLimitAssignment(serviceName, modelTaskType); + + // No assignment yet + if (rateLimitAssignment == null) { + return NodeRoutingDecision.handleLocally(); + } + + var responsibleNodes = rateLimitAssignment.responsibleNodes(); + + // Empty assignment + if (responsibleNodes == null || responsibleNodes.isEmpty()) { + return NodeRoutingDecision.handleLocally(); + } + + var nodeToHandleRequest = responsibleNodes.get(random.nextInt(responsibleNodes.size())); + String localNodeId = nodeClient.getLocalNodeId(); + + // The drawn node is the current node + if (nodeToHandleRequest.getId().equals(localNodeId)) { + return NodeRoutingDecision.handleLocally(); + } + + // Reroute request + return NodeRoutingDecision.routeTo(nodeToHandleRequest); + } + private static void validationHelper(Supplier validationFailure, Supplier exceptionCreator) { if (validationFailure.get()) { throw exceptionCreator.get(); } } - protected abstract boolean isInvalidTaskTypeForInferenceEndpoint(Request request, UnparsedModel unparsedModel); - - protected abstract ElasticsearchStatusException createInvalidTaskTypeException(Request request, UnparsedModel unparsedModel); + private void rerouteRequest(Request request, ActionListener listener, DiscoveryNode nodeToHandleRequest) { + transportService.sendRequest( + nodeToHandleRequest, + InferenceAction.NAME, + request, + new TransportResponseHandler() { + @Override + public Executor executor() { + return threadPool.executor(InferencePlugin.UTILITY_THREAD_POOL_NAME); + } + + @Override + public void handleResponse(InferenceAction.Response response) { + listener.onResponse(response); + } + + @Override + public void handleException(TransportException exp) { + listener.onFailure(exp); + } + + @Override + public InferenceAction.Response read(StreamInput in) throws IOException { + return new InferenceAction.Response(in); + } + } + ); + } private void recordMetrics(UnparsedModel model, InferenceTimer timer, @Nullable Throwable t) { try { @@ -158,7 +282,9 @@ private void inferOnServiceWithMetrics( var instrumentedStream = new PublisherWithMetrics(timer, model); taskProcessor.subscribe(instrumentedStream); - listener.onResponse(new InferenceAction.Response(inferenceResults, instrumentedStream)); + var streamErrorHandler = streamErrorHandler(instrumentedStream); + + listener.onResponse(new InferenceAction.Response(inferenceResults, streamErrorHandler)); } else { recordMetrics(model, timer, null); listener.onResponse(new InferenceAction.Response(inferenceResults)); @@ -169,29 +295,26 @@ private void inferOnServiceWithMetrics( })); } + protected Flow.Publisher streamErrorHandler(Flow.Processor upstream) { + return upstream; + } + private void recordMetrics(Model model, InferenceTimer timer, @Nullable Throwable t) { try { - inferenceStats.inferenceDuration().record(timer.elapsedMillis(), responseAttributes(model, t)); + inferenceStats.inferenceDuration().record(timer.elapsedMillis(), responseAttributes(model, unwrapCause(t))); } catch (Exception e) { log.atDebug().withThrowable(e).log("Failed to record metrics with a parsed model, dropping metrics"); } } private void inferOnService(Model model, Request request, InferenceService service, ActionListener listener) { - if (request.isStreaming() == false || service.canStream(request.getTaskType())) { + if (request.isStreaming() == false || service.canStream(model.getTaskType())) { doInference(model, request, service, listener); } else { listener.onFailure(unsupportedStreamingTaskException(request, service)); } } - protected abstract void doInference( - Model model, - Request request, - InferenceService service, - ActionListener listener - ); - private ElasticsearchStatusException unsupportedStreamingTaskException(Request request, InferenceService service) { var supportedTasks = service.supportedStreamingTasks(); if (supportedTasks.isEmpty()) { @@ -259,4 +382,14 @@ public void onComplete() { super.onComplete(); } } + + private record NodeRoutingDecision(boolean currentNodeShouldHandleRequest, DiscoveryNode targetNode) { + static NodeRoutingDecision handleLocally() { + return new NodeRoutingDecision(true, null); + } + + static NodeRoutingDecision routeTo(DiscoveryNode node) { + return new NodeRoutingDecision(false, node); + } + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java index 242c2f79733eb..3b6901ae0c31d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java @@ -88,6 +88,17 @@ private void doExecuteForked( ClusterState state, ActionListener masterListener ) { + if (modelRegistry.containsDefaultConfigId(request.getInferenceEndpointId())) { + masterListener.onFailure( + new ElasticsearchStatusException( + "[{}] is a reserved inference endpoint. Cannot delete a reserved inference endpoint.", + RestStatus.BAD_REQUEST, + request.getInferenceEndpointId() + ) + ); + return; + } + SubscribableListener.newForked(modelConfigListener -> { // Get the model from the registry diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java index 24ef0d7d610d0..e8f52e42f5708 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.InferenceServiceResults; @@ -17,9 +18,11 @@ import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; +import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; @@ -33,7 +36,10 @@ public TransportInferenceAction( ModelRegistry modelRegistry, InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, - StreamingTaskManager streamingTaskManager + StreamingTaskManager streamingTaskManager, + InferenceServiceRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, + NodeClient nodeClient, + ThreadPool threadPool ) { super( InferenceAction.NAME, @@ -44,7 +50,10 @@ public TransportInferenceAction( serviceRegistry, inferenceStats, streamingTaskManager, - InferenceAction.Request::new + InferenceAction.Request::new, + inferenceServiceNodeLocalRateLimitCalculator, + nodeClient, + threadPool ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionProxy.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionProxy.java new file mode 100644 index 0000000000000..6d46f834d4873 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionProxy.java @@ -0,0 +1,125 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnparsedModel; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.InferenceActionProxy; +import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; + +import java.io.IOException; + +import static org.elasticsearch.xpack.core.ClientHelper.INFERENCE_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; + +public class TransportInferenceActionProxy extends HandledTransportAction { + private final ModelRegistry modelRegistry; + private final Client client; + + @Inject + public TransportInferenceActionProxy( + TransportService transportService, + ActionFilters actionFilters, + ModelRegistry modelRegistry, + Client client + ) { + super( + InferenceActionProxy.NAME, + transportService, + actionFilters, + InferenceActionProxy.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + + this.modelRegistry = modelRegistry; + this.client = client; + } + + @Override + protected void doExecute(Task task, InferenceActionProxy.Request request, ActionListener listener) { + try { + ActionListener getModelListener = listener.delegateFailureAndWrap((l, unparsedModel) -> { + if (unparsedModel.taskType() == TaskType.CHAT_COMPLETION) { + sendUnifiedCompletionRequest(request, l); + } else { + sendInferenceActionRequest(request, l); + } + }); + + if (request.getTaskType() == TaskType.ANY) { + modelRegistry.getModelWithSecrets(request.getInferenceEntityId(), getModelListener); + } else if (request.getTaskType() == TaskType.CHAT_COMPLETION) { + sendUnifiedCompletionRequest(request, listener); + } else { + sendInferenceActionRequest(request, listener); + } + } catch (Exception e) { + listener.onFailure(e); + } + } + + private void sendUnifiedCompletionRequest(InferenceActionProxy.Request request, ActionListener listener) { + // format any validation exceptions from the rest -> transport path as UnifiedChatCompletionException + var unifiedErrorFormatListener = listener.delegateResponse((l, e) -> l.onFailure(UnifiedChatCompletionException.fromThrowable(e))); + + try { + if (request.isStreaming() == false) { + throw new ElasticsearchStatusException( + "The [chat_completion] task type only supports streaming, please try again with the _stream API", + RestStatus.BAD_REQUEST + ); + } + + UnifiedCompletionAction.Request unifiedRequest; + try ( + var parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, request.getContent(), request.getContentType()) + ) { + unifiedRequest = UnifiedCompletionAction.Request.parseRequest( + request.getInferenceEntityId(), + request.getTaskType(), + request.getTimeout(), + parser + ); + } + + executeAsyncWithOrigin(client, INFERENCE_ORIGIN, UnifiedCompletionAction.INSTANCE, unifiedRequest, unifiedErrorFormatListener); + } catch (Exception e) { + unifiedErrorFormatListener.onFailure(e); + } + } + + private void sendInferenceActionRequest(InferenceActionProxy.Request request, ActionListener listener) + throws IOException { + InferenceAction.Request.Builder inferenceActionRequestBuilder; + try (var parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, request.getContent(), request.getContentType())) { + inferenceActionRequestBuilder = InferenceAction.Request.parseRequest( + request.getInferenceEntityId(), + request.getTaskType(), + parser + ); + inferenceActionRequestBuilder.setInferenceTimeout(request.getTimeout()).setStream(request.isStreaming()); + } + + executeAsyncWithOrigin(client, INFERENCE_ORIGIN, InferenceAction.INSTANCE, inferenceActionRequestBuilder.build(), listener); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java index 9354ac2a83182..1144a11d86cc9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.InferenceServiceResults; @@ -19,12 +21,19 @@ import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; +import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; +import java.util.concurrent.Flow; + public class TransportUnifiedCompletionInferenceAction extends BaseTransportInferenceAction { @Inject @@ -35,7 +44,10 @@ public TransportUnifiedCompletionInferenceAction( ModelRegistry modelRegistry, InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, - StreamingTaskManager streamingTaskManager + StreamingTaskManager streamingTaskManager, + InferenceServiceRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, + NodeClient nodeClient, + ThreadPool threadPool ) { super( UnifiedCompletionAction.NAME, @@ -46,7 +58,10 @@ public TransportUnifiedCompletionInferenceAction( serviceRegistry, inferenceStats, streamingTaskManager, - UnifiedCompletionAction.Request::new + UnifiedCompletionAction.Request::new, + inferenceServiceNodeLocalRateLimitCalculator, + nodeClient, + threadPool ); } @@ -77,4 +92,40 @@ protected void doInference( ) { service.unifiedCompletionInfer(model, request.getUnifiedCompletionRequest(), null, listener); } + + @Override + protected void doExecute(Task task, UnifiedCompletionAction.Request request, ActionListener listener) { + super.doExecute(task, request, listener.delegateResponse((l, e) -> l.onFailure(UnifiedChatCompletionException.fromThrowable(e)))); + } + + /** + * If we get any errors, either in {@link #doExecute} via the listener.onFailure or while streaming, make sure that they are formatted + * as {@link UnifiedChatCompletionException}. + */ + @Override + protected Flow.Publisher streamErrorHandler(Flow.Processor upstream) { + return downstream -> { + upstream.subscribe(new Flow.Subscriber<>() { + @Override + public void onSubscribe(Flow.Subscription subscription) { + downstream.onSubscribe(subscription); + } + + @Override + public void onNext(ChunkedToXContent item) { + downstream.onNext(item); + } + + @Override + public void onError(Throwable throwable) { + downstream.onError(UnifiedChatCompletionException.fromThrowable(throwable)); + } + + @Override + public void onComplete() { + downstream.onComplete(); + } + }); + }; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java index b857ef3068835..ed005a86d66b5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.inference.InferenceService; @@ -50,6 +51,7 @@ import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalModel; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalServiceSettings; @@ -255,14 +257,13 @@ private void updateInClusterEndpoint( ActionListener listener ) throws IOException { // The model we are trying to update must have a trained model associated with it if it is an in-cluster deployment - throwIfTrainedModelDoesntExist(request); + var deploymentId = getDeploymentIdForInClusterEndpoint(existingParsedModel); + throwIfTrainedModelDoesntExist(request.getInferenceEntityId(), deploymentId); Map serviceSettings = request.getContentAsSettings().serviceSettings(); if (serviceSettings != null && serviceSettings.get(NUM_ALLOCATIONS) instanceof Integer numAllocations) { - UpdateTrainedModelDeploymentAction.Request updateRequest = new UpdateTrainedModelDeploymentAction.Request( - request.getInferenceEntityId() - ); + UpdateTrainedModelDeploymentAction.Request updateRequest = new UpdateTrainedModelDeploymentAction.Request(deploymentId); updateRequest.setNumberOfAllocations(numAllocations); var delegate = listener.delegateFailure((l2, response) -> { @@ -270,7 +271,8 @@ private void updateInClusterEndpoint( }); logger.info( - "Updating trained model deployment for inference entity [{}] with [{}] num_allocations", + "Updating trained model deployment [{}] for inference entity [{}] with [{}] num_allocations", + deploymentId, request.getInferenceEntityId(), numAllocations ); @@ -293,12 +295,26 @@ private boolean isInClusterService(String name) { return List.of(ElasticsearchInternalService.NAME, ElasticsearchInternalService.OLD_ELSER_SERVICE_NAME).contains(name); } - private void throwIfTrainedModelDoesntExist(UpdateInferenceModelAction.Request request) throws ElasticsearchStatusException { - var assignments = TrainedModelAssignmentUtils.modelAssignments(request.getInferenceEntityId(), clusterService.state()); + private String getDeploymentIdForInClusterEndpoint(Model model) { + if (model instanceof ElasticsearchInternalModel esModel) { + return esModel.mlNodeDeploymentId(); + } else { + throw new IllegalStateException( + Strings.format( + "Cannot update inference endpoint [%s]. Class [%s] is not an Elasticsearch internal model", + model.getInferenceEntityId(), + model.getClass().getSimpleName() + ) + ); + } + } + + private void throwIfTrainedModelDoesntExist(String inferenceEntityId, String deploymentId) throws ElasticsearchStatusException { + var assignments = TrainedModelAssignmentUtils.modelAssignments(deploymentId, clusterService.state()); if ((assignments == null || assignments.isEmpty())) { throw ExceptionsHelper.entityNotFoundException( Messages.MODEL_ID_DOES_NOT_MATCH_EXISTING_MODEL_IDS_BUT_MUST_FOR_IN_CLUSTER_SERVICE, - request.getInferenceEntityId() + inferenceEntityId ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java index 9b0b1104df660..fb796c2afdfeb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingFloat; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingSparse; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceError; +import org.elasticsearch.xpack.core.inference.results.InferenceByteEmbedding; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; @@ -69,7 +70,7 @@ public static EmbeddingType fromDenseVectorElementType(DenseVectorFieldMapper.El private List chunkedOffsets; private List>> floatResults; - private List>> byteResults; + private List>> byteResults; private List>> sparseResults; private AtomicArray errors; private ActionListener> finalListener; @@ -389,9 +390,9 @@ private ChunkedInferenceEmbeddingFloat mergeFloatResultsWithInputs( private ChunkedInferenceEmbeddingByte mergeByteResultsWithInputs( ChunkOffsetsAndInput chunks, - AtomicArray> debatchedResults + AtomicArray> debatchedResults ) { - var all = new ArrayList(); + var all = new ArrayList(); for (int i = 0; i < debatchedResults.length(); i++) { var subBatch = debatchedResults.get(i); all.addAll(subBatch); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceAPIClusterAwareRateLimitingFeature.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceAPIClusterAwareRateLimitingFeature.java new file mode 100644 index 0000000000000..22de92526ba89 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceAPIClusterAwareRateLimitingFeature.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.common.util.FeatureFlag; +import org.elasticsearch.xpack.inference.InferencePlugin; + +/** + * Cluster aware rate limiting feature flag. When the feature is complete and fully rolled out, this flag will be removed. + * Enable feature via JVM option: `-Des.inference_cluster_aware_rate_limiting_feature_flag_enabled=true`. + * + * This controls, whether {@link InferenceServiceNodeLocalRateLimitCalculator} gets instantiated and + * added as injectable {@link InferencePlugin} component. + */ +public class InferenceAPIClusterAwareRateLimitingFeature { + + public static final FeatureFlag INFERENCE_API_CLUSTER_AWARE_RATE_LIMITING_FEATURE_FLAG = new FeatureFlag( + "inference_cluster_aware_rate_limiting" + ); + + private InferenceAPIClusterAwareRateLimitingFeature() {} + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculator.java new file mode 100644 index 0000000000000..4778e4cc6d30c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculator.java @@ -0,0 +1,197 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.xpack.core.inference.action.BaseInferenceActionRequest; +import org.elasticsearch.xpack.inference.action.BaseTransportInferenceAction; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.SenderService; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Note: {@link InferenceAPIClusterAwareRateLimitingFeature} needs to be enabled for this class to get + * instantiated inside {@link org.elasticsearch.xpack.inference.InferencePlugin} and be available via dependency injection. + * + * Calculates and manages node-local rate limits for inference services based on changes in the cluster topology. + * This calculator calculates a "node-local" rate-limit, which essentially divides the rate limit for a service/task type + * through the number of nodes, which got assigned to this service/task type pair. Without this calculator the rate limit stored + * in the inference endpoint configuration would get effectively multiplied by the number of nodes in a cluster (assuming a ~ uniform + * distribution of requests to the nodes in the cluster). + * + * The calculator works in conjunction with several other components: + * - {@link BaseTransportInferenceAction} - Uses the calculator to determine, whether to reroute a request or not + * - {@link BaseInferenceActionRequest} - Tracks, if the request (an instance of a subclass of {@link BaseInferenceActionRequest}) + * already got re-routed at least once + * - {@link HttpRequestSender} - Provides original rate limits that this calculator divides through the number of nodes + * responsible for a service/task type + */ +public class InferenceServiceNodeLocalRateLimitCalculator implements InferenceServiceRateLimitCalculator { + + public static final Integer DEFAULT_MAX_NODES_PER_GROUPING = 3; + + /** + * Configuration mapping services to their task type rate limiting settings. + * Each service can have multiple configs defining: + * - Which task types support request re-routing and "node-local" rate limit calculation + * - How many nodes should handle requests for each task type, based on cluster size (dynamically calculated or statically provided) + **/ + static final Map> SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS = Map.of( + ElasticInferenceService.NAME, + // TODO: should probably be a map/set + List.of(new NodeLocalRateLimitConfig(TaskType.SPARSE_EMBEDDING, (numNodesInCluster) -> DEFAULT_MAX_NODES_PER_GROUPING)) + ); + + record NodeLocalRateLimitConfig(TaskType taskType, MaxNodesPerGroupingStrategy maxNodesPerGroupingStrategy) {} + + @FunctionalInterface + private interface MaxNodesPerGroupingStrategy { + + Integer calculate(Integer numberOfNodesInCluster); + + } + + private static final Logger logger = LogManager.getLogger(InferenceServiceNodeLocalRateLimitCalculator.class); + + private final InferenceServiceRegistry serviceRegistry; + + private final ConcurrentHashMap> serviceAssignments; + + @Inject + public InferenceServiceNodeLocalRateLimitCalculator(ClusterService clusterService, InferenceServiceRegistry serviceRegistry) { + clusterService.addListener(this); + this.serviceRegistry = serviceRegistry; + this.serviceAssignments = new ConcurrentHashMap<>(); + } + + @Override + public void clusterChanged(ClusterChangedEvent event) { + boolean clusterTopologyChanged = event.nodesChanged(); + + // TODO: feature flag per node? We should not reroute to nodes not having eis and/or the inference plugin enabled + // Every node should land on the same grouping by calculation, so no need to put anything into the cluster state + if (clusterTopologyChanged) { + updateAssignments(event); + } + } + + public boolean isTaskTypeReroutingSupported(String serviceName, TaskType taskType) { + return SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.getOrDefault(serviceName, Collections.emptyList()) + .stream() + .anyMatch(rateLimitConfig -> taskType.equals(rateLimitConfig.taskType)); + } + + public RateLimitAssignment getRateLimitAssignment(String service, TaskType taskType) { + var assignmentsPerTaskType = serviceAssignments.get(service); + + if (assignmentsPerTaskType == null) { + return null; + } + + return assignmentsPerTaskType.get(taskType); + } + + /** + * Updates instances of {@link RateLimitAssignment} for each service and task type when the cluster topology changes. + * For each service and supported task type, calculates which nodes should handle requests + * and what their local rate limits should be per inference endpoint. + */ + private void updateAssignments(ClusterChangedEvent event) { + var newClusterState = event.state(); + var nodes = newClusterState.nodes().getAllNodes(); + + // Sort nodes by id (every node lands on the same result) + var sortedNodes = nodes.stream().sorted(Comparator.comparing(DiscoveryNode::getId)).toList(); + + // Sort inference services by name (every node lands on the same result) + var sortedServices = new ArrayList<>(serviceRegistry.getServices().values()); + sortedServices.sort(Comparator.comparing(InferenceService::name)); + + for (String serviceName : SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet()) { + Optional service = serviceRegistry.getService(serviceName); + + if (service.isPresent()) { + var inferenceService = service.get(); + + for (NodeLocalRateLimitConfig rateLimitConfig : SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.get(serviceName)) { + Map perTaskTypeAssignments = new HashMap<>(); + TaskType taskType = rateLimitConfig.taskType(); + + // Calculate node assignments needed for re-routing + var assignedNodes = calculateServiceAssignment(rateLimitConfig.maxNodesPerGroupingStrategy(), sortedNodes); + + // Update rate limits to be "node-local" + var numAssignedNodes = assignedNodes.size(); + updateRateLimits(inferenceService, numAssignedNodes); + + perTaskTypeAssignments.put(taskType, new RateLimitAssignment(assignedNodes)); + serviceAssignments.put(serviceName, perTaskTypeAssignments); + } + } else { + logger.warn( + "Service [{}] is configured for node-local rate limiting but was not found in the service registry", + serviceName + ); + } + } + } + + private List calculateServiceAssignment( + MaxNodesPerGroupingStrategy maxNodesPerGroupingStrategy, + List sortedNodes + ) { + int numberOfNodes = sortedNodes.size(); + int nodesPerGrouping = Math.min(numberOfNodes, maxNodesPerGroupingStrategy.calculate(numberOfNodes)); + + List assignedNodes = new ArrayList<>(); + + // TODO: here we can probably be smarter: if |num nodes in cluster| > |num nodes per task types| + // -> make sure a service provider is not assigned the same nodes for all task types; only relevant as soon as we support more task + // types + for (int j = 0; j < nodesPerGrouping; j++) { + var assignedNode = sortedNodes.get(j % numberOfNodes); + assignedNodes.add(assignedNode); + } + + return assignedNodes; + } + + private void updateRateLimits(InferenceService service, int responsibleNodes) { + if ((service instanceof SenderService) == false) { + return; + } + + SenderService senderService = (SenderService) service; + Sender sender = senderService.getSender(); + // TODO: this needs to take in service and task type as soon as multiple services/task types are supported + sender.updateRateLimitDivisor(responsibleNodes); + } + + InferenceServiceRegistry serviceRegistry() { + return serviceRegistry; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceServiceRateLimitCalculator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceServiceRateLimitCalculator.java new file mode 100644 index 0000000000000..e05637f629ec6 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceServiceRateLimitCalculator.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.inference.TaskType; + +public interface InferenceServiceRateLimitCalculator extends ClusterStateListener { + + boolean isTaskTypeReroutingSupported(String serviceName, TaskType taskType); + + RateLimitAssignment getRateLimitAssignment(String service, TaskType taskType); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/NoopNodeLocalRateLimitCalculator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/NoopNodeLocalRateLimitCalculator.java new file mode 100644 index 0000000000000..a07217d9e9af7 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/NoopNodeLocalRateLimitCalculator.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.inference.TaskType; + +public class NoopNodeLocalRateLimitCalculator implements InferenceServiceRateLimitCalculator { + + @Override + public void clusterChanged(ClusterChangedEvent event) { + // Do nothing + } + + public boolean isTaskTypeReroutingSupported(String serviceName, TaskType taskType) { + return false; + } + + public RateLimitAssignment getRateLimitAssignment(String service, TaskType taskType) { + return null; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimitAssignment.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimitAssignment.java new file mode 100644 index 0000000000000..de8d85c96271c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimitAssignment.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.cluster.node.DiscoveryNode; + +import java.util.List; + +/** + * Record for storing rate limit assignment information. + * + * @param responsibleNodes - nodes responsible for a certain service and task type + */ +public record RateLimitAssignment(List responsibleNodes) {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java index b74e473155aec..c7509b44ac1a8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java @@ -63,7 +63,7 @@ public RateLimiter(double accumulatedTokensLimit, double tokensPerTimeUnit, Time setRate(accumulatedTokensLimit, tokensPerTimeUnit, unit); } - public final synchronized void setRate(double newAccumulatedTokensLimit, double newTokensPerTimeUnit, TimeUnit newUnit) { + public synchronized void setRate(double newAccumulatedTokensLimit, double newTokensPerTimeUnit, TimeUnit newUnit) { Objects.requireNonNull(newUnit); if (newAccumulatedTokensLimit < 0) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionCreator.java index 7cad7c42bdcf1..6b1097256e97f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionCreator.java @@ -12,9 +12,11 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.SenderExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.IbmWatsonxEmbeddingsRequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.IbmWatsonxRerankRequestManager; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; import java.util.Map; import java.util.Objects; @@ -22,7 +24,6 @@ import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; public class IbmWatsonxActionCreator implements IbmWatsonxActionVisitor { - private final Sender sender; private final ServiceComponents serviceComponents; @@ -41,6 +42,17 @@ public ExecutableAction create(IbmWatsonxEmbeddingsModel model, Map taskSettings) { + var overriddenModel = IbmWatsonxRerankModel.of(model, taskSettings); + var requestCreator = IbmWatsonxRerankRequestManager.of(overriddenModel, serviceComponents.threadPool()); + var failedToSendRequestErrorMessage = constructFailedToSendRequestMessage( + overriddenModel.getServiceSettings().uri(), + "Ibm Watsonx rerank" + ); + return new SenderExecutableAction(sender, requestCreator, failedToSendRequestErrorMessage); + } + protected IbmWatsonxEmbeddingsRequestManager getEmbeddingsRequestManager( IbmWatsonxEmbeddingsModel model, Truncator truncator, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionVisitor.java index 0a13ec2fb4645..474533040e0c3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionVisitor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionVisitor.java @@ -9,9 +9,12 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; import java.util.Map; public interface IbmWatsonxActionVisitor { ExecutableAction create(IbmWatsonxEmbeddingsModel model, Map taskSettings); + + ExecutableAction create(IbmWatsonxRerankModel model, Map taskSettings); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java index ec4550b036d23..c8e544c26f293 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java @@ -88,6 +88,11 @@ protected AmazonBedrockRequestSender( ); } + @Override + public void updateRateLimitDivisor(int rateLimitDivisor) { + executorService.updateRateLimitDivisor(rateLimitDivisor); + } + @Override public void start() { if (started.compareAndSet(false, true)) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/anthropic/AnthropicResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/anthropic/AnthropicResponseHandler.java index d9a78a56af0d6..915b0bf412a03 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/anthropic/AnthropicResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/anthropic/AnthropicResponseHandler.java @@ -44,16 +44,8 @@ public class AnthropicResponseHandler extends BaseResponseHandler { static final String SERVER_BUSY = "Received an Anthropic server is temporarily overloaded status code"; - private final boolean canHandleStreamingResponses; - public AnthropicResponseHandler(String requestType, ResponseParser parseFunction, boolean canHandleStreamingResponses) { - super(requestType, parseFunction, ErrorMessageResponseEntity::fromResponse); - this.canHandleStreamingResponses = canHandleStreamingResponses; - } - - @Override - public boolean canHandleStreamingResponses() { - return canHandleStreamingResponses; + super(requestType, parseFunction, ErrorMessageResponseEntity::fromResponse, canHandleStreamingResponses); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandler.java index e3a74785caa4b..9227d55dc8938 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandler.java @@ -34,16 +34,9 @@ public class CohereResponseHandler extends BaseResponseHandler { static final String TEXTS_ARRAY_TOO_LARGE_MESSAGE_MATCHER = "invalid request: total number of texts must be at most"; static final String TEXTS_ARRAY_ERROR_MESSAGE = "Received a texts array too large response"; - private final boolean canHandleStreamingResponse; public CohereResponseHandler(String requestType, ResponseParser parseFunction, boolean canHandleStreamingResponse) { - super(requestType, parseFunction, CohereErrorResponseEntity::fromResponse); - this.canHandleStreamingResponse = canHandleStreamingResponse; - } - - @Override - public boolean canHandleStreamingResponses() { - return canHandleStreamingResponse; + super(requestType, parseFunction, CohereErrorResponseEntity::fromResponse, canHandleStreamingResponse); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceResponseHandler.java index b11b4a743fb27..bd34e746cb2f8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceResponseHandler.java @@ -21,6 +21,10 @@ public ElasticInferenceServiceResponseHandler(String requestType, ResponseParser super(requestType, parseFunction, ElasticInferenceServiceErrorResponseEntity::fromResponse); } + public ElasticInferenceServiceResponseHandler(String requestType, ResponseParser parseFunction, boolean canHandleStreamingResponses) { + super(requestType, parseFunction, ElasticInferenceServiceErrorResponseEntity::fromResponse, canHandleStreamingResponses); + } + @Override protected void checkForFailureStatusCode(Request request, HttpResult result) throws RetryException { if (result.isSuccessfulResponse()) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceUnifiedChatCompletionResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceUnifiedChatCompletionResponseHandler.java index c0bccb9b2cd49..a240035468b8a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceUnifiedChatCompletionResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceUnifiedChatCompletionResponseHandler.java @@ -8,33 +8,79 @@ package org.elasticsearch.xpack.inference.external.elastic; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.inference.results.StreamingUnifiedChatCompletionResults; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ErrorResponse; import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.openai.OpenAiUnifiedStreamingProcessor; import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.external.response.elastic.ElasticInferenceServiceErrorResponseEntity; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventParser; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventProcessor; +import java.util.Locale; import java.util.concurrent.Flow; +import static org.elasticsearch.core.Strings.format; + public class ElasticInferenceServiceUnifiedChatCompletionResponseHandler extends ElasticInferenceServiceResponseHandler { public ElasticInferenceServiceUnifiedChatCompletionResponseHandler(String requestType, ResponseParser parseFunction) { - super(requestType, parseFunction); - } - - @Override - public boolean canHandleStreamingResponses() { - return true; + super(requestType, parseFunction, true); } @Override public InferenceServiceResults parseResult(Request request, Flow.Publisher flow) { var serverSentEventProcessor = new ServerSentEventProcessor(new ServerSentEventParser()); - var openAiProcessor = new OpenAiUnifiedStreamingProcessor(); // EIS uses the unified API spec + // EIS uses the unified API spec + var openAiProcessor = new OpenAiUnifiedStreamingProcessor((m, e) -> buildMidStreamError(request, m, e)); flow.subscribe(serverSentEventProcessor); serverSentEventProcessor.subscribe(openAiProcessor); return new StreamingUnifiedChatCompletionResults(openAiProcessor); } + + @Override + protected Exception buildError(String message, Request request, HttpResult result, ErrorResponse errorResponse) { + assert request.isStreaming() : "Only streaming requests support this format"; + var responseStatusCode = result.response().getStatusLine().getStatusCode(); + if (request.isStreaming()) { + var restStatus = toRestStatus(responseStatusCode); + return new UnifiedChatCompletionException( + restStatus, + errorMessage(message, request, result, errorResponse, responseStatusCode), + "error", + restStatus.name().toLowerCase(Locale.ROOT) + ); + } else { + return super.buildError(message, request, result, errorResponse); + } + } + + private static Exception buildMidStreamError(Request request, String message, Exception e) { + var errorResponse = ElasticInferenceServiceErrorResponseEntity.fromString(message); + if (errorResponse.errorStructureFound()) { + return new UnifiedChatCompletionException( + RestStatus.INTERNAL_SERVER_ERROR, + format( + "%s for request from inference entity id [%s]. Error message: [%s]", + SERVER_ERROR_OBJECT, + request.getInferenceEntityId(), + errorResponse.getErrorMessage() + ), + "error", + "stream_error" + ); + } else if (e != null) { + return UnifiedChatCompletionException.fromThrowable(e); + } else { + return new UnifiedChatCompletionException( + RestStatus.INTERNAL_SERVER_ERROR, + format("%s for request from inference entity id [%s]", SERVER_ERROR_OBJECT, request.getInferenceEntityId()), + "error", + "stream_error" + ); + } + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioResponseHandler.java index d61e82cb83b45..a22be46bf7576 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioResponseHandler.java @@ -28,7 +28,6 @@ public class GoogleAiStudioResponseHandler extends BaseResponseHandler { static final String GOOGLE_AI_STUDIO_UNAVAILABLE = "The Google AI Studio service may be temporarily overloaded or down"; - private final boolean canHandleStreamingResponses; private final CheckedFunction content; public GoogleAiStudioResponseHandler(String requestType, ResponseParser parseFunction) { @@ -44,8 +43,7 @@ public GoogleAiStudioResponseHandler( boolean canHandleStreamingResponses, CheckedFunction content ) { - super(requestType, parseFunction, GoogleAiStudioErrorResponseEntity::fromResponse); - this.canHandleStreamingResponses = canHandleStreamingResponses; + super(requestType, parseFunction, GoogleAiStudioErrorResponseEntity::fromResponse, canHandleStreamingResponses); this.content = content; } @@ -88,11 +86,6 @@ private static String resourceNotFoundError(Request request) { return format("Resource not found at [%s]", request.getURI()); } - @Override - public boolean canHandleStreamingResponses() { - return canHandleStreamingResponses; - } - @Override public InferenceServiceResults parseResult(Request request, Flow.Publisher flow) { var serverSentEventProcessor = new ServerSentEventProcessor(new ServerSentEventParser()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java index 63c042ce8a623..6c7c6e0d114c7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java @@ -21,6 +21,8 @@ public interface RequestExecutor { void shutdown(); + void updateRateLimitDivisor(int newDivisor); + boolean isShutdown(); boolean isTerminated(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java index 1b0dd893ada6f..cb5ed53fc5587 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java @@ -38,11 +38,27 @@ public abstract class BaseResponseHandler implements ResponseHandler { protected final String requestType; private final ResponseParser parseFunction; private final Function errorParseFunction; + private final boolean canHandleStreamingResponses; public BaseResponseHandler(String requestType, ResponseParser parseFunction, Function errorParseFunction) { + this(requestType, parseFunction, errorParseFunction, false); + } + + public BaseResponseHandler( + String requestType, + ResponseParser parseFunction, + Function errorParseFunction, + boolean canHandleStreamingResponses + ) { this.requestType = Objects.requireNonNull(requestType); this.parseFunction = Objects.requireNonNull(parseFunction); this.errorParseFunction = Objects.requireNonNull(errorParseFunction); + this.canHandleStreamingResponses = canHandleStreamingResponses; + } + + @Override + public boolean canHandleStreamingResponses() { + return canHandleStreamingResponses; } @Override @@ -91,31 +107,24 @@ protected Exception buildError(String message, Request request, HttpResult resul protected Exception buildError(String message, Request request, HttpResult result, ErrorResponse errorResponse) { var responseStatusCode = result.response().getStatusLine().getStatusCode(); + return new ElasticsearchStatusException( + errorMessage(message, request, result, errorResponse, responseStatusCode), + toRestStatus(responseStatusCode) + ); + } - if (errorResponse == null + protected String errorMessage(String message, Request request, HttpResult result, ErrorResponse errorResponse, int statusCode) { + return (errorResponse == null || errorResponse.errorStructureFound() == false - || Strings.isNullOrEmpty(errorResponse.getErrorMessage())) { - return new ElasticsearchStatusException( - format( - "%s for request from inference entity id [%s] status [%s]", + || Strings.isNullOrEmpty(errorResponse.getErrorMessage())) + ? format("%s for request from inference entity id [%s] status [%s]", message, request.getInferenceEntityId(), statusCode) + : format( + "%s for request from inference entity id [%s] status [%s]. Error message: [%s]", message, request.getInferenceEntityId(), - responseStatusCode - ), - toRestStatus(responseStatusCode) - ); - } - - return new ElasticsearchStatusException( - format( - "%s for request from inference entity id [%s] status [%s]. Error message: [%s]", - message, - request.getInferenceEntityId(), - responseStatusCode, - errorResponse.getErrorMessage() - ), - toRestStatus(responseStatusCode) - ); + statusCode, + errorResponse.getErrorMessage() + ); } public static RestStatus toRestStatus(int statusCode) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandler.java index 35fe241ffae4f..0452391a76023 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandler.java @@ -52,11 +52,8 @@ public interface ResponseHandler { /** * Returns {@code true} if the response handler can handle streaming results, or {@code false} if can only parse the entire payload. - * Defaults to {@code false}. */ - default boolean canHandleStreamingResponses() { - return false; - } + boolean canHandleStreamingResponses(); /** * A method for parsing the streamed response from the server. Implementations must invoke the diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java index 42671b8166537..689c9e2ec8fc1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java @@ -111,6 +111,10 @@ public void start() { } } + public void updateRateLimitDivisor(int rateLimitDivisor) { + service.updateRateLimitDivisor(rateLimitDivisor); + } + private void waitForStartToComplete() { try { if (startCompleted.await(START_COMPLETED_WAIT_TIME.getSeconds(), TimeUnit.SECONDS) == false) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/IbmWatsonxRerankRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/IbmWatsonxRerankRequestManager.java new file mode 100644 index 0000000000000..f503771510e72 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/IbmWatsonxRerankRequestManager.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.ibmwatsonx.IbmWatsonxResponseHandler; +import org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxRerankRequest; +import org.elasticsearch.xpack.inference.external.response.ibmwatsonx.IbmWatsonxRankedResponseEntity; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; + +import java.util.List; +import java.util.Objects; +import java.util.function.Supplier; + +public class IbmWatsonxRerankRequestManager extends IbmWatsonxRequestManager { + private static final Logger logger = LogManager.getLogger(IbmWatsonxRerankRequestManager.class); + private static final ResponseHandler HANDLER = createIbmWatsonxResponseHandler(); + + private static ResponseHandler createIbmWatsonxResponseHandler() { + return new IbmWatsonxResponseHandler( + "ibm watsonx rerank", + (request, response) -> IbmWatsonxRankedResponseEntity.fromResponse(response) + ); + } + + public static IbmWatsonxRerankRequestManager of(IbmWatsonxRerankModel model, ThreadPool threadPool) { + return new IbmWatsonxRerankRequestManager(Objects.requireNonNull(model), Objects.requireNonNull(threadPool)); + } + + private final IbmWatsonxRerankModel model; + + public IbmWatsonxRerankRequestManager(IbmWatsonxRerankModel model, ThreadPool threadPool) { + super(threadPool, model); + this.model = model; + } + + @Override + public void execute( + InferenceInputs inferenceInputs, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + ActionListener listener + ) { + var rerankInput = QueryAndDocsInputs.of(inferenceInputs); + + execute( + new ExecutableInferenceRequest( + requestSender, + logger, + getRerankRequest(rerankInput.getQuery(), rerankInput.getChunks(), model), + HANDLER, + hasRequestCompletedFunction, + listener + ) + ); + } + + protected IbmWatsonxRerankRequest getRerankRequest(String query, List chunks, IbmWatsonxRerankModel model) { + return new IbmWatsonxRerankRequest(query, chunks, model); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java index ad1324d0a315f..5ec2acab70596 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java @@ -19,6 +19,7 @@ import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.common.AdjustableCapacityBlockingQueue; +import org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator; import org.elasticsearch.xpack.inference.common.RateLimiter; import org.elasticsearch.xpack.inference.external.http.RequestExecutor; import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; @@ -36,6 +37,7 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; @@ -92,12 +94,22 @@ interface RateLimiterCreator { RateLimiter create(double accumulatedTokensLimit, double tokensPerTimeUnit, TimeUnit unit); } + // TODO: for later (after 8.18) + // TODO: pass in divisor to RateLimiterCreator + // TODO: another map for service/task-type-key -> set of RateLimitingEndpointHandler (used for updates; update divisor and then update + // all endpoint handlers) + // TODO: one map for service/task-type-key -> divisor (this gets also read when we create an inference endpoint) + // TODO: divisor value read/writes need to be synchronized in some way + // default for testing static final RateLimiterCreator DEFAULT_RATE_LIMIT_CREATOR = RateLimiter::new; private static final Logger logger = LogManager.getLogger(RequestExecutorService.class); private static final TimeValue RATE_LIMIT_GROUP_CLEANUP_INTERVAL = TimeValue.timeValueDays(1); private final ConcurrentMap rateLimitGroupings = new ConcurrentHashMap<>(); + // TODO: add one atomic integer (number of nodes); also explain the assumption and why this works + // TODO: document that this impacts chat completion (and increase the default rate limit) + private final AtomicInteger rateLimitDivisor = new AtomicInteger(1); private final ThreadPool threadPool; private final CountDownLatch startupLatch; private final CountDownLatch terminationLatch = new CountDownLatch(1); @@ -174,6 +186,19 @@ public int queueSize() { return rateLimitGroupings.values().stream().mapToInt(RateLimitingEndpointHandler::queueSize).sum(); } + @Override + public void updateRateLimitDivisor(int numResponsibleNodes) { + // in the unlikely case where we get an invalid value, we'll just ignore it + if (numResponsibleNodes <= 0) { + return; + } + + rateLimitDivisor.set(numResponsibleNodes); + for (var rateLimitingEndpointHandler : rateLimitGroupings.values()) { + rateLimitingEndpointHandler.updateTokensPerTimeUnit(rateLimitDivisor.get()); + } + } + /** * Begin servicing tasks. *

    @@ -299,9 +324,12 @@ public void execute( clock, requestManager.rateLimitSettings(), this::isShutdown, - rateLimiterCreator + rateLimiterCreator, + rateLimitDivisor.get() ); + // TODO: add or create/compute if absent set for new map (service/task-type-key -> rate limit endpoint handler) + endpointHandler.init(); return endpointHandler; }); @@ -314,7 +342,7 @@ public void execute( * This allows many requests to be serialized if they are being sent too fast. If the rate limit has not been met they will be sent * as soon as a thread is available. */ - private static class RateLimitingEndpointHandler { + static class RateLimitingEndpointHandler { private static final TimeValue NO_TASKS_AVAILABLE = TimeValue.MAX_VALUE; private static final TimeValue EXECUTED_A_TASK = TimeValue.ZERO; @@ -329,6 +357,8 @@ private static class RateLimitingEndpointHandler { private final Clock clock; private final RateLimiter rateLimiter; private final RequestExecutorServiceSettings requestExecutorServiceSettings; + private final RateLimitSettings rateLimitSettings; + private final Long originalRequestsPerTimeUnit; RateLimitingEndpointHandler( String id, @@ -338,7 +368,8 @@ private static class RateLimitingEndpointHandler { Clock clock, RateLimitSettings rateLimitSettings, Supplier isShutdownMethod, - RateLimiterCreator rateLimiterCreator + RateLimiterCreator rateLimiterCreator, + Integer rateLimitDivisor ) { this.requestExecutorServiceSettings = Objects.requireNonNull(settings); this.id = Objects.requireNonNull(id); @@ -346,6 +377,8 @@ private static class RateLimitingEndpointHandler { this.requestSender = Objects.requireNonNull(requestSender); this.clock = Objects.requireNonNull(clock); this.isShutdownMethod = Objects.requireNonNull(isShutdownMethod); + this.rateLimitSettings = Objects.requireNonNull(rateLimitSettings); + this.originalRequestsPerTimeUnit = rateLimitSettings.requestsPerTimeUnit(); Objects.requireNonNull(rateLimitSettings); Objects.requireNonNull(rateLimiterCreator); @@ -355,12 +388,29 @@ private static class RateLimitingEndpointHandler { rateLimitSettings.timeUnit() ); + this.updateTokensPerTimeUnit(rateLimitDivisor); } public void init() { requestExecutorServiceSettings.registerQueueCapacityCallback(id, this::onCapacityChange); } + /** + * This method is solely called by {@link InferenceServiceNodeLocalRateLimitCalculator} to update + * rate limits, so they're "node-local". + * The general idea is described in {@link InferenceServiceNodeLocalRateLimitCalculator} in more detail. + * + * @param divisor - divisor to divide the initial requests per time unit by + */ + public synchronized void updateTokensPerTimeUnit(Integer divisor) { + double updatedTokensPerTimeUnit = (double) originalRequestsPerTimeUnit / divisor; + rateLimiter.setRate(ACCUMULATED_TOKENS_LIMIT, updatedTokensPerTimeUnit, rateLimitSettings.timeUnit()); + } + + public String id() { + return id; + } + private void onCapacityChange(int capacity) { logger.debug(() -> Strings.format("Executor service grouping [%s] setting queue capacity to [%s]", id, capacity)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManager.java index 853d6fdcb2473..aa606e8c7cc5c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManager.java @@ -30,4 +30,6 @@ void execute( // executePreparedRequest() which will execute all prepared requests aka sends the batch String inferenceEntityId(); + + // TODO: add service() and taskType() } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java index 3975a554586b7..fed92263f9999 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java @@ -27,6 +27,8 @@ void send( ActionListener listener ); + void updateRateLimitDivisor(int rateLimitDivisor); + void sendWithoutQueuing( Logger logger, Request request, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ibmwatsonx/IbmWatsonxResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ibmwatsonx/IbmWatsonxResponseHandler.java index 6d1d3fb2a4f91..1f28a8cd61026 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ibmwatsonx/IbmWatsonxResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ibmwatsonx/IbmWatsonxResponseHandler.java @@ -17,7 +17,6 @@ import static org.elasticsearch.core.Strings.format; public class IbmWatsonxResponseHandler extends BaseResponseHandler { - public IbmWatsonxResponseHandler(String requestType, ResponseParser parseFunction) { super(requestType, parseFunction, IbmWatsonxErrorResponseEntity::fromResponse); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java index 7607e5e4ed3a2..99f2a7c31e7dd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java @@ -8,15 +8,26 @@ package org.elasticsearch.xpack.inference.external.openai; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ErrorResponse; import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.http.retry.RetryException; import org.elasticsearch.xpack.inference.external.request.Request; +import java.util.function.Function; + public class OpenAiChatCompletionResponseHandler extends OpenAiResponseHandler { public OpenAiChatCompletionResponseHandler(String requestType, ResponseParser parseFunction) { super(requestType, parseFunction, true); } + protected OpenAiChatCompletionResponseHandler( + String requestType, + ResponseParser parseFunction, + Function errorParseFunction + ) { + super(requestType, parseFunction, errorParseFunction, true); + } + @Override protected RetryException buildExceptionHandling429(Request request, HttpResult result) { // We don't retry, if the chat completion input is too large diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java index cf867fb1a0ab0..8698955868a70 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java @@ -13,6 +13,7 @@ import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.retry.BaseResponseHandler; import org.elasticsearch.xpack.inference.external.http.retry.ContentTooLargeException; +import org.elasticsearch.xpack.inference.external.http.retry.ErrorResponse; import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.http.retry.RetryException; import org.elasticsearch.xpack.inference.external.request.Request; @@ -21,6 +22,7 @@ import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventProcessor; import java.util.concurrent.Flow; +import java.util.function.Function; import static org.elasticsearch.xpack.inference.external.http.retry.ResponseHandlerUtils.getFirstHeaderOrUnknown; @@ -41,11 +43,17 @@ public class OpenAiResponseHandler extends BaseResponseHandler { static final String OPENAI_SERVER_BUSY = "Received a server busy error status code"; - private final boolean canHandleStreamingResponses; - public OpenAiResponseHandler(String requestType, ResponseParser parseFunction, boolean canHandleStreamingResponses) { - super(requestType, parseFunction, ErrorMessageResponseEntity::fromResponse); - this.canHandleStreamingResponses = canHandleStreamingResponses; + this(requestType, parseFunction, ErrorMessageResponseEntity::fromResponse, canHandleStreamingResponses); + } + + protected OpenAiResponseHandler( + String requestType, + ResponseParser parseFunction, + Function errorParseFunction, + boolean canHandleStreamingResponses + ) { + super(requestType, parseFunction, errorParseFunction, canHandleStreamingResponses); } /** @@ -121,11 +129,6 @@ static String buildRateLimitErrorMessage(HttpResult result) { return RATE_LIMIT + ". " + usageMessage; } - @Override - public boolean canHandleStreamingResponses() { - return canHandleStreamingResponses; - } - @Override public InferenceServiceResults parseResult(Request request, Flow.Publisher flow) { var serverSentEventProcessor = new ServerSentEventProcessor(new ServerSentEventParser()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandler.java index fce2556efc5e0..b2096253bdeb7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandler.java @@ -7,28 +7,171 @@ package org.elasticsearch.xpack.inference.external.openai; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.results.StreamingUnifiedChatCompletionResults; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ErrorResponse; import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventParser; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventProcessor; +import java.util.Locale; +import java.util.Objects; +import java.util.Optional; import java.util.concurrent.Flow; +import static org.elasticsearch.core.Strings.format; + public class OpenAiUnifiedChatCompletionResponseHandler extends OpenAiChatCompletionResponseHandler { public OpenAiUnifiedChatCompletionResponseHandler(String requestType, ResponseParser parseFunction) { - super(requestType, parseFunction); + super(requestType, parseFunction, OpenAiErrorResponse::fromResponse); } @Override public InferenceServiceResults parseResult(Request request, Flow.Publisher flow) { var serverSentEventProcessor = new ServerSentEventProcessor(new ServerSentEventParser()); - var openAiProcessor = new OpenAiUnifiedStreamingProcessor(); + var openAiProcessor = new OpenAiUnifiedStreamingProcessor((m, e) -> buildMidStreamError(request, m, e)); flow.subscribe(serverSentEventProcessor); serverSentEventProcessor.subscribe(openAiProcessor); return new StreamingUnifiedChatCompletionResults(openAiProcessor); } + + @Override + protected Exception buildError(String message, Request request, HttpResult result, ErrorResponse errorResponse) { + assert request.isStreaming() : "Only streaming requests support this format"; + var responseStatusCode = result.response().getStatusLine().getStatusCode(); + if (request.isStreaming()) { + var errorMessage = errorMessage(message, request, result, errorResponse, responseStatusCode); + var restStatus = toRestStatus(responseStatusCode); + return errorResponse instanceof OpenAiErrorResponse oer + ? new UnifiedChatCompletionException(restStatus, errorMessage, oer.type(), oer.code(), oer.param()) + : new UnifiedChatCompletionException( + restStatus, + errorMessage, + errorResponse != null ? errorResponse.getClass().getSimpleName() : "unknown", + restStatus.name().toLowerCase(Locale.ROOT) + ); + } else { + return super.buildError(message, request, result, errorResponse); + } + } + + private static Exception buildMidStreamError(Request request, String message, Exception e) { + var errorResponse = OpenAiErrorResponse.fromString(message); + if (errorResponse instanceof OpenAiErrorResponse oer) { + return new UnifiedChatCompletionException( + RestStatus.INTERNAL_SERVER_ERROR, + format( + "%s for request from inference entity id [%s]. Error message: [%s]", + SERVER_ERROR_OBJECT, + request.getInferenceEntityId(), + errorResponse.getErrorMessage() + ), + oer.type(), + oer.code(), + oer.param() + ); + } else if (e != null) { + return UnifiedChatCompletionException.fromThrowable(e); + } else { + return new UnifiedChatCompletionException( + RestStatus.INTERNAL_SERVER_ERROR, + format("%s for request from inference entity id [%s]", SERVER_ERROR_OBJECT, request.getInferenceEntityId()), + errorResponse != null ? errorResponse.getClass().getSimpleName() : "unknown", + "stream_error" + ); + } + } + + private static class OpenAiErrorResponse extends ErrorResponse { + private static final ConstructingObjectParser, Void> ERROR_PARSER = new ConstructingObjectParser<>( + "open_ai_error", + true, + args -> Optional.ofNullable((OpenAiErrorResponse) args[0]) + ); + private static final ConstructingObjectParser ERROR_BODY_PARSER = new ConstructingObjectParser<>( + "open_ai_error", + true, + args -> new OpenAiErrorResponse((String) args[0], (String) args[1], (String) args[2], (String) args[3]) + ); + + static { + ERROR_BODY_PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("message")); + ERROR_BODY_PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), new ParseField("code")); + ERROR_BODY_PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), new ParseField("param")); + ERROR_BODY_PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("type")); + + ERROR_PARSER.declareObjectOrNull( + ConstructingObjectParser.optionalConstructorArg(), + ERROR_BODY_PARSER, + null, + new ParseField("error") + ); + } + + private static ErrorResponse fromResponse(HttpResult response) { + try ( + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(XContentParserConfiguration.EMPTY, response.body()) + ) { + return ERROR_PARSER.apply(parser, null).orElse(ErrorResponse.UNDEFINED_ERROR); + } catch (Exception e) { + // swallow the error + } + + return ErrorResponse.UNDEFINED_ERROR; + } + + private static ErrorResponse fromString(String response) { + try ( + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(XContentParserConfiguration.EMPTY, response) + ) { + return ERROR_PARSER.apply(parser, null).orElse(ErrorResponse.UNDEFINED_ERROR); + } catch (Exception e) { + // swallow the error + } + + return ErrorResponse.UNDEFINED_ERROR; + } + + @Nullable + private final String code; + @Nullable + private final String param; + private final String type; + + OpenAiErrorResponse(String errorMessage, @Nullable String code, @Nullable String param, String type) { + super(errorMessage); + this.code = code; + this.param = param; + this.type = Objects.requireNonNull(type); + } + + @Nullable + public String code() { + return code; + } + + @Nullable + public String param() { + return param; + } + + public String type() { + return type; + } + } + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedStreamingProcessor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedStreamingProcessor.java index 599d71df3dcfa..bfd4456279a8a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedStreamingProcessor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedStreamingProcessor.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.core.inference.results.StreamingUnifiedChatCompletionResults; import org.elasticsearch.xpack.inference.common.DelegatingProcessor; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEvent; +import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEventField; import java.io.IOException; import java.util.ArrayDeque; @@ -28,6 +29,7 @@ import java.util.Iterator; import java.util.List; import java.util.concurrent.LinkedBlockingDeque; +import java.util.function.BiFunction; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; @@ -57,7 +59,13 @@ public class OpenAiUnifiedStreamingProcessor extends DelegatingProcessor errorParser; private final Deque buffer = new LinkedBlockingDeque<>(); + private volatile boolean previousEventWasError = false; + + public OpenAiUnifiedStreamingProcessor(BiFunction errorParser) { + this.errorParser = errorParser; + } @Override protected void upstreamRequest(long n) { @@ -71,7 +79,25 @@ protected void upstreamRequest(long n) { @Override protected void next(Deque item) throws Exception { var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); - var results = parseEvent(item, OpenAiUnifiedStreamingProcessor::parse, parserConfig, logger); + + var results = new ArrayDeque(item.size()); + for (var event : item) { + if (ServerSentEventField.EVENT == event.name() && "error".equals(event.value())) { + previousEventWasError = true; + } else if (ServerSentEventField.DATA == event.name() && event.hasValue()) { + if (previousEventWasError) { + throw errorParser.apply(event.value(), null); + } + + try { + var delta = parse(parserConfig, event); + delta.forEachRemaining(results::offer); + } catch (Exception e) { + logger.warn("Failed to parse event from inference provider: {}", event); + throw errorParser.apply(event.value(), e); + } + } + } if (results.isEmpty()) { upstream().request(1); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequest.java index 6acaf74a33338..18fc7d9f8c32d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequest.java @@ -55,7 +55,11 @@ public HttpRequest createHttpRequest() { var httpPost = new HttpPost(uri); var usageContext = inputTypeToUsageContext(inputType); var requestEntity = Strings.toString( - new ElasticInferenceServiceSparseEmbeddingsRequestEntity(truncationResult.input(), usageContext) + new ElasticInferenceServiceSparseEmbeddingsRequestEntity( + truncationResult.input(), + model.getServiceSettings().modelId(), + usageContext + ) ); ByteArrayEntity byteEntity = new ByteArrayEntity(requestEntity.getBytes(StandardCharsets.UTF_8)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntity.java index deecd9186aca5..0ba6b46da05e5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntity.java @@ -18,14 +18,17 @@ public record ElasticInferenceServiceSparseEmbeddingsRequestEntity( List inputs, + String modelId, @Nullable ElasticInferenceServiceUsageContext usageContext ) implements ToXContentObject { private static final String INPUT_FIELD = "input"; + private static final String MODEL_FIELD = "model"; private static final String USAGE_CONTEXT = "usage_context"; public ElasticInferenceServiceSparseEmbeddingsRequestEntity { Objects.requireNonNull(inputs); + Objects.requireNonNull(modelId); } @Override @@ -33,14 +36,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.startArray(INPUT_FIELD); - { - for (String input : inputs) { - builder.value(input); - } + for (String input : inputs) { + builder.value(input); } builder.endArray(); + builder.field(MODEL_FIELD, modelId); + // optional field if ((usageContext == ElasticInferenceServiceUsageContext.UNSPECIFIED) == false) { builder.field(USAGE_CONTEXT, usageContext); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequest.java new file mode 100644 index 0000000000000..cfc1f367be45c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequest.java @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.ibmwatsonx; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ByteArrayEntity; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankTaskSettings; + +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Objects; + +public class IbmWatsonxRerankRequest implements IbmWatsonxRequest { + + private final String query; + private final List input; + private final IbmWatsonxRerankTaskSettings taskSettings; + private final IbmWatsonxRerankModel model; + + public IbmWatsonxRerankRequest(String query, List input, IbmWatsonxRerankModel model) { + Objects.requireNonNull(model); + + this.input = Objects.requireNonNull(input); + this.query = Objects.requireNonNull(query); + taskSettings = model.getTaskSettings(); + this.model = model; + } + + @Override + public HttpRequest createHttpRequest() { + URI uri; + + try { + uri = new URI(model.uri().toString()); + } catch (URISyntaxException ex) { + throw new IllegalArgumentException("cannot parse URI patter"); + } + + HttpPost httpPost = new HttpPost(uri); + + ByteArrayEntity byteEntity = new ByteArrayEntity( + Strings.toString( + new IbmWatsonxRerankRequestEntity( + query, + input, + taskSettings, + model.getServiceSettings().modelId(), + model.getServiceSettings().projectId() + ) + ).getBytes(StandardCharsets.UTF_8) + ); + + httpPost.setEntity(byteEntity); + httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType()); + + decorateWithAuth(httpPost); + + return new HttpRequest(httpPost, getInferenceEntityId()); + } + + public void decorateWithAuth(HttpPost httpPost) { + IbmWatsonxRequest.decorateWithBearerToken(httpPost, model.getSecretSettings(), model.getInferenceEntityId()); + } + + @Override + public String getInferenceEntityId() { + return model.getInferenceEntityId(); + } + + @Override + public URI getURI() { + return model.uri(); + } + + @Override + public Request truncate() { + return this; + } + + public String getQuery() { + return query; + } + + public List getInput() { + return input; + } + + public IbmWatsonxRerankModel getModel() { + return model; + } + + @Override + public boolean[] getTruncationInfo() { + return null; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequestEntity.java new file mode 100644 index 0000000000000..36e5951ebdc15 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequestEntity.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.ibmwatsonx; + +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankTaskSettings; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public record IbmWatsonxRerankRequestEntity( + String query, + List inputs, + IbmWatsonxRerankTaskSettings taskSettings, + String modelId, + String projectId +) implements ToXContentObject { + + private static final String INPUTS_FIELD = "inputs"; + private static final String QUERY_FIELD = "query"; + private static final String MODEL_ID_FIELD = "model_id"; + private static final String PROJECT_ID_FIELD = "project_id"; + + public IbmWatsonxRerankRequestEntity { + Objects.requireNonNull(query); + Objects.requireNonNull(inputs); + Objects.requireNonNull(modelId); + Objects.requireNonNull(projectId); + Objects.requireNonNull(taskSettings); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.field(MODEL_ID_FIELD, modelId); + builder.field(QUERY_FIELD, query); + builder.startArray(INPUTS_FIELD); + for (String input : inputs) { + builder.startObject(); + builder.field("text", input); + builder.endObject(); + } + builder.endArray(); + builder.field(PROJECT_ID_FIELD, projectId); + + builder.startObject("parameters"); + { + if (taskSettings.getTruncateInputTokens() != null) { + builder.field("truncate_input_tokens", taskSettings.getTruncateInputTokens()); + } + + builder.startObject("return_options"); + { + if (taskSettings.getDoesReturnDocuments() != null) { + builder.field("inputs", taskSettings.getDoesReturnDocuments()); + } + if (taskSettings.getTopNDocumentsOnly() != null) { + builder.field("top_n", taskSettings.getTopNDocumentsOnly()); + } + } + builder.endObject(); + } + builder.endObject(); + + builder.endObject(); + + return builder; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxUtils.java index a506a33385dfb..91679288e5ae3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxUtils.java @@ -13,6 +13,7 @@ public class IbmWatsonxUtils { public static final String V1 = "v1"; public static final String TEXT = "text"; public static final String EMBEDDINGS = "embeddings"; + public static final String RERANKS = "reranks"; private IbmWatsonxUtils() {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/AmazonBedrockResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/AmazonBedrockResponseHandler.java index 9dc15ea667c1d..79bb4e6ddb35b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/AmazonBedrockResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/AmazonBedrockResponseHandler.java @@ -15,6 +15,12 @@ import org.elasticsearch.xpack.inference.logging.ThrottlerManager; public abstract class AmazonBedrockResponseHandler implements ResponseHandler { + + @Override + public boolean canHandleStreamingResponses() { + return false; + } + @Override public final void validateResponse(ThrottlerManager throttlerManager, Logger logger, Request request, HttpResult result) throws RetryException { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java index 3fa9635d38e8c..2e574d477b057 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java @@ -17,6 +17,8 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.InferenceByteEmbedding; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingBitResults; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; @@ -43,7 +45,9 @@ public class CohereEmbeddingsResponseEntity { toLowerCase(CohereEmbeddingType.FLOAT), CohereEmbeddingsResponseEntity::parseFloatEmbeddingsArray, toLowerCase(CohereEmbeddingType.INT8), - CohereEmbeddingsResponseEntity::parseByteEmbeddingsArray + CohereEmbeddingsResponseEntity::parseByteEmbeddingsArray, + toLowerCase(CohereEmbeddingType.BINARY), + CohereEmbeddingsResponseEntity::parseBitEmbeddingsArray ); private static final String VALID_EMBEDDING_TYPES_STRING = supportedEmbeddingTypes(); @@ -184,17 +188,24 @@ private static InferenceServiceResults parseEmbeddingsObject(XContentParser pars ); } + private static InferenceServiceResults parseBitEmbeddingsArray(XContentParser parser) throws IOException { + // Cohere returns array of binary embeddings encoded as bytes with int8 precision so we can reuse the byte parser + var embeddingList = parseList(parser, CohereEmbeddingsResponseEntity::parseByteArrayEntry); + + return new InferenceTextEmbeddingBitResults(embeddingList); + } + private static InferenceServiceResults parseByteEmbeddingsArray(XContentParser parser) throws IOException { var embeddingList = parseList(parser, CohereEmbeddingsResponseEntity::parseByteArrayEntry); return new InferenceTextEmbeddingByteResults(embeddingList); } - private static InferenceTextEmbeddingByteResults.InferenceByteEmbedding parseByteArrayEntry(XContentParser parser) throws IOException { + private static InferenceByteEmbedding parseByteArrayEntry(XContentParser parser) throws IOException { ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); List embeddingValuesList = parseList(parser, CohereEmbeddingsResponseEntity::parseEmbeddingInt8Entry); - return InferenceTextEmbeddingByteResults.InferenceByteEmbedding.of(embeddingValuesList); + return InferenceByteEmbedding.of(embeddingValuesList); } private static Byte parseEmbeddingInt8Entry(XContentParser parser) throws IOException { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceErrorResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceErrorResponseEntity.java index 696be7b2acdd2..29b0903901694 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceErrorResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceErrorResponseEntity.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -16,6 +17,18 @@ import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.retry.ErrorResponse; +import java.io.IOException; + +/** + * An example error response would look like + * + * + * { + * "error": "some error" + * } + * + * + */ public class ElasticInferenceServiceErrorResponseEntity extends ErrorResponse { private static final Logger logger = LogManager.getLogger(ElasticInferenceServiceErrorResponseEntity.class); @@ -24,24 +37,18 @@ private ElasticInferenceServiceErrorResponseEntity(String errorMessage) { super(errorMessage); } - /** - * An example error response would look like - * - * - * { - * "error": "some error" - * } - * - * - * @param response The error response - * @return An error entity if the response is JSON with the above structure - * or {@link ErrorResponse#UNDEFINED_ERROR} if the error field wasn't found - */ public static ErrorResponse fromResponse(HttpResult response) { - try ( - XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON) - .createParser(XContentParserConfiguration.EMPTY, response.body()) - ) { + return fromParser( + () -> XContentFactory.xContent(XContentType.JSON).createParser(XContentParserConfiguration.EMPTY, response.body()) + ); + } + + public static ErrorResponse fromString(String response) { + return fromParser(() -> XContentFactory.xContent(XContentType.JSON).createParser(XContentParserConfiguration.EMPTY, response)); + } + + private static ErrorResponse fromParser(CheckedSupplier jsonParserFactory) { + try (XContentParser jsonParser = jsonParserFactory.get()) { var responseMap = jsonParser.map(); var error = (String) responseMap.get("error"); if (error != null) { @@ -50,7 +57,6 @@ public static ErrorResponse fromResponse(HttpResult response) { } catch (Exception e) { logger.debug("Failed to parse error response", e); } - return ErrorResponse.UNDEFINED_ERROR; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntity.java new file mode 100644 index 0000000000000..05f369bd8961e --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntity.java @@ -0,0 +1,157 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * this file was contributed to by a generative AI + */ + +package org.elasticsearch.xpack.inference.external.response.ibmwatsonx; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.io.IOException; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.common.xcontent.XContentParserUtils.parseList; +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; + +public class IbmWatsonxRankedResponseEntity { + + private static final Logger logger = LogManager.getLogger(IbmWatsonxRankedResponseEntity.class); + + /** + * Parses the Ibm Watsonx ranked response. + * + * For a request like: + * "model": "rerank-english-v2.0", + * "query": "database", + * "return_documents": true, + * "top_n": 3, + * "input": ["greenland", "google","john", "mysql","potter", "grammar"] + *

    + * The response will look like (without whitespace): + * { + * "rerank": [ + * { + * "index": 3, + * "relevance_score": 0.7989932 + * }, + * { + * "index": 5, + * "relevance_score": 0.61281824 + * }, + * { + * "index": 1, + * "relevance_score": 0.5762553 + * }, + * { + * "index": 4, + * "relevance_score": 0.47395563 + * }, + * { + * "index": 0, + * "relevance_score": 0.4338926 + * }, + * { + * "index": 2, + * "relevance_score": 0.42638257 + * } + * ], + * } + * + * @param response the http response from ibm watsonx + * @return the parsed response + * @throws IOException if there is an error parsing the response + */ + public static InferenceServiceResults fromResponse(HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + moveToFirstToken(jsonParser); + + XContentParser.Token token = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "results", FAILED_TO_FIND_FIELD_TEMPLATE); // TODO error message + + token = jsonParser.currentToken(); + if (token == XContentParser.Token.START_ARRAY) { + return new RankedDocsResults(parseList(jsonParser, IbmWatsonxRankedResponseEntity::parseRankedDocObject)); + } else { + throwUnknownToken(token, jsonParser); + } + + // This should never be reached. The above code should either return successfully or hit the throwUnknownToken + // or throw a parsing exception + throw new IllegalStateException("Reached an invalid state while parsing the Watsonx response"); + } + } + + private static RankedDocsResults.RankedDoc parseRankedDocObject(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + int index = -1; + float score = -1; + String documentText = null; + parser.nextToken(); + while (parser.currentToken() != XContentParser.Token.END_OBJECT) { + if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { + switch (parser.currentName()) { + case "index": + parser.nextToken(); // move to VALUE_NUMBER + index = parser.intValue(); + parser.nextToken(); // move to next FIELD_NAME or END_OBJECT + break; + case "score": + parser.nextToken(); // move to VALUE_NUMBER + score = parser.floatValue(); + parser.nextToken(); // move to next FIELD_NAME or END_OBJECT + break; + case "input": + parser.nextToken(); // move to START_OBJECT; document text is wrapped in an object + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + do { + if (parser.currentToken() == XContentParser.Token.FIELD_NAME && parser.currentName().equals("text")) { + parser.nextToken(); // move to VALUE_STRING + documentText = parser.text(); + } + } while (parser.nextToken() != XContentParser.Token.END_OBJECT); + parser.nextToken();// move past END_OBJECT + // parser should now be at the next FIELD_NAME or END_OBJECT + break; + default: + throwUnknownField(parser.currentName(), parser); + } + } else { + parser.nextToken(); + } + } + + if (index == -1) { + logger.warn("Failed to find required field [index] in Watsonx rerank response"); + } + if (score == -1) { + logger.warn("Failed to find required field [relevance_score] in Watsonx rerank response"); + } + // documentText may or may not be present depending on the request parameter + + return new RankedDocsResults.RankedDoc(index, score, documentText); + } + + private IbmWatsonxRankedResponseEntity() {} + + static String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Watsonx rerank response"; +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 1acdff7a751ae..3bebd8086d792 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -73,6 +73,7 @@ import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.search.SparseVectorQueryBuilder; +import org.elasticsearch.xpack.inference.highlight.SemanticTextHighlighter; import java.io.IOException; import java.io.UncheckedIOException; @@ -116,6 +117,7 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie public static final NodeFeature SEMANTIC_TEXT_ALWAYS_EMIT_INFERENCE_ID_FIX = new NodeFeature( "semantic_text.always_emit_inference_id_fix" ); + public static final NodeFeature SEMANTIC_TEXT_SKIP_INFERENCE_FIELDS = new NodeFeature("semantic_text.skip_inference_fields"); public static final String CONTENT_TYPE = "semantic_text"; public static final String DEFAULT_ELSER_2_INFERENCE_ID = DEFAULT_ELSER_ID; @@ -580,6 +582,11 @@ public String familyTypeName() { return TextFieldMapper.CONTENT_TYPE; } + @Override + public String getDefaultHighlighter() { + return SemanticTextHighlighter.NAME; + } + public String getInferenceId() { return inferenceId; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticKnnVectorQueryRewriteInterceptor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticKnnVectorQueryRewriteInterceptor.java index 57805d5277ffc..9e513a1ed9226 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticKnnVectorQueryRewriteInterceptor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticKnnVectorQueryRewriteInterceptor.java @@ -28,6 +28,7 @@ public class SemanticKnnVectorQueryRewriteInterceptor extends SemanticQueryRewri public static final NodeFeature SEMANTIC_KNN_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED = new NodeFeature( "search.semantic_knn_vector_query_rewrite_interception_supported" ); + public static final NodeFeature SEMANTIC_KNN_FILTER_FIX = new NodeFeature("search.semantic_knn_filter_fix"); public SemanticKnnVectorQueryRewriteInterceptor() {} @@ -147,6 +148,7 @@ private KnnVectorQueryBuilder addIndexFilterToKnnVectorQuery(Collection ); } + copy.addFilterQueries(original.filterQueries()); copy.addFilterQuery(new TermsQueryBuilder(IndexFieldMapper.NAME, indices)); return copy; } @@ -165,8 +167,9 @@ private KnnVectorQueryBuilder buildNewKnnVectorQuery( KnnVectorQueryBuilder original, QueryVectorBuilder queryVectorBuilder ) { + KnnVectorQueryBuilder newQueryBuilder; if (original.queryVectorBuilder() != null) { - return new KnnVectorQueryBuilder( + newQueryBuilder = new KnnVectorQueryBuilder( fieldName, queryVectorBuilder, original.k(), @@ -174,7 +177,7 @@ private KnnVectorQueryBuilder buildNewKnnVectorQuery( original.getVectorSimilarity() ); } else { - return new KnnVectorQueryBuilder( + newQueryBuilder = new KnnVectorQueryBuilder( fieldName, original.queryVector(), original.k(), @@ -183,6 +186,9 @@ private KnnVectorQueryBuilder buildNewKnnVectorQuery( original.getVectorSimilarity() ); } + + newQueryBuilder.addFilterQueries(original.filterQueries()); + return newQueryBuilder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java index 285739fe0936f..eafdb6366afd4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java @@ -332,11 +332,12 @@ private static String getInferenceIdForForField(Collection indexM protected boolean doEquals(SemanticQueryBuilder other) { return Objects.equals(fieldName, other.fieldName) && Objects.equals(query, other.query) - && Objects.equals(inferenceResults, other.inferenceResults); + && Objects.equals(inferenceResults, other.inferenceResults) + && Objects.equals(inferenceResultsSupplier, other.inferenceResultsSupplier); } @Override protected int doHashCode() { - return Objects.hash(fieldName, query, inferenceResults); + return Objects.hash(fieldName, query, inferenceResults, inferenceResultsSupplier); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java index 165c42fdb7d1f..fa6cc3db0ef9f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java @@ -29,6 +29,7 @@ import static org.elasticsearch.search.rank.RankBuilder.DEFAULT_RANK_WINDOW_SIZE; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.DEFAULT_RERANK_ID; /** * A {@code RetrieverBuilder} for parsing and constructing a text similarity reranker retriever. @@ -47,10 +48,11 @@ public class TextSimilarityRankRetrieverBuilder extends CompoundRetrieverBuilder public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(TextSimilarityRankBuilder.NAME, args -> { RetrieverBuilder retrieverBuilder = (RetrieverBuilder) args[0]; - String inferenceId = (String) args[1]; + String inferenceId = args[1] == null ? DEFAULT_RERANK_ID : (String) args[1]; String inferenceText = (String) args[2]; String field = (String) args[3]; int rankWindowSize = args[4] == null ? DEFAULT_RANK_WINDOW_SIZE : (int) args[4]; + return new TextSimilarityRankRetrieverBuilder(retrieverBuilder, inferenceId, inferenceText, field, rankWindowSize); }); @@ -60,7 +62,7 @@ public class TextSimilarityRankRetrieverBuilder extends CompoundRetrieverBuilder c.trackRetrieverUsage(innerRetriever.getName()); return innerRetriever; }, RETRIEVER_FIELD); - PARSER.declareString(constructorArg(), INFERENCE_ID_FIELD); + PARSER.declareString(optionalConstructorArg(), INFERENCE_ID_FIELD); PARSER.declareString(constructorArg(), INFERENCE_TEXT_FIELD); PARSER.declareString(constructorArg(), FIELD_FIELD); PARSER.declareInt(optionalConstructorArg(), RANK_WINDOW_SIZE_FIELD); @@ -171,6 +173,10 @@ public String getName() { return TextSimilarityRankBuilder.NAME; } + public String inferenceId() { + return inferenceId; + } + public int rankWindowSize() { return rankWindowSize; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index a9642a685aec9..ca7595f78da06 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.engine.VersionConflictEngineException; @@ -61,6 +62,7 @@ import java.util.Collections; import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; @@ -111,7 +113,7 @@ public static UnparsedModel unparsedModelFromMap(ModelConfigMap modelConfigMap) public ModelRegistry(Client client) { this.client = new OriginSettingClient(client, ClientHelper.INFERENCE_ORIGIN); - defaultConfigIds = new HashMap<>(); + defaultConfigIds = new ConcurrentHashMap<>(); } /** @@ -124,11 +126,20 @@ public boolean containsDefaultConfigId(String inferenceEntityId) { return defaultConfigIds.containsKey(inferenceEntityId); } + /** + * Adds the default configuration information if it does not already exist internally. + * @param defaultConfigId the default endpoint information + */ + public synchronized void putDefaultIdIfAbsent(InferenceService.DefaultConfigId defaultConfigId) { + defaultConfigIds.putIfAbsent(defaultConfigId.inferenceId(), defaultConfigId); + } + /** * Set the default inference ids provided by the services - * @param defaultConfigId The default + * @param defaultConfigId The default endpoint information + * @throws IllegalStateException if the {@link InferenceService.DefaultConfigId#inferenceId()} already exists internally */ - public synchronized void addDefaultIds(InferenceService.DefaultConfigId defaultConfigId) { + public synchronized void addDefaultIds(InferenceService.DefaultConfigId defaultConfigId) throws IllegalStateException { var config = defaultConfigIds.get(defaultConfigId.inferenceId()); if (config != null) { throw new IllegalStateException( @@ -644,11 +655,32 @@ private static BulkItemResponse.Failure getFirstBulkFailure(BulkResponse bulkRes return null; } + public synchronized void removeDefaultConfigs(Set inferenceEntityIds, ActionListener listener) { + if (inferenceEntityIds.isEmpty()) { + listener.onResponse(true); + return; + } + + defaultConfigIds.keySet().removeAll(inferenceEntityIds); + deleteModels(inferenceEntityIds, listener); + } + public void deleteModel(String inferenceEntityId, ActionListener listener) { - if (preventDeletionLock.contains(inferenceEntityId)) { + deleteModels(Set.of(inferenceEntityId), listener); + } + + public void deleteModels(Set inferenceEntityIds, ActionListener listener) { + var lockedInferenceIds = new HashSet<>(inferenceEntityIds); + lockedInferenceIds.retainAll(preventDeletionLock); + + if (lockedInferenceIds.isEmpty() == false) { listener.onFailure( new ElasticsearchStatusException( - "Model is currently being updated, you may delete the model once the update completes", + Strings.format( + "The inference endpoint(s) %s are currently being updated, please wait until after they are " + + "finished updating to delete.", + lockedInferenceIds + ), RestStatus.CONFLICT ) ); @@ -657,7 +689,7 @@ public void deleteModel(String inferenceEntityId, ActionListener listen DeleteByQueryRequest request = new DeleteByQueryRequest().setAbortOnVersionConflict(false); request.indices(InferenceIndex.INDEX_PATTERN, InferenceSecretsIndex.INDEX_PATTERN); - request.setQuery(documentIdQuery(inferenceEntityId)); + request.setQuery(documentIdsQuery(inferenceEntityIds)); request.setRefresh(true); client.execute(DeleteByQueryAction.INSTANCE, request, listener.delegateFailureAndWrap((l, r) -> l.onResponse(Boolean.TRUE))); @@ -695,6 +727,11 @@ private QueryBuilder documentIdQuery(String inferenceEntityId) { return QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds(Model.documentId(inferenceEntityId))); } + private QueryBuilder documentIdsQuery(Set inferenceEntityIds) { + var documentIdsArray = inferenceEntityIds.stream().map(Model::documentId).toArray(String[]::new); + return QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds(documentIdsArray)); + } + static Optional idMatchedDefault( String inferenceId, List defaultConfigIds diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/BaseInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/BaseInferenceAction.java index d911158e82296..06a0849b91d4e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/BaseInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/BaseInferenceAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.InferenceActionProxy; import java.io.IOException; @@ -41,21 +42,22 @@ static TimeValue parseTimeout(RestRequest restRequest) { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { var params = parseParams(restRequest); + var content = restRequest.requiredContent(); + var inferTimeout = parseTimeout(restRequest); - InferenceAction.Request.Builder requestBuilder; - try (var parser = restRequest.contentParser()) { - requestBuilder = InferenceAction.Request.parseRequest(params.inferenceEntityId(), params.taskType(), parser); - } + var request = new InferenceActionProxy.Request( + params.taskType(), + params.inferenceEntityId(), + content, + restRequest.getXContentType(), + inferTimeout, + shouldStream() + ); - var inferTimeout = parseTimeout(restRequest); - requestBuilder.setInferenceTimeout(inferTimeout); - var request = prepareInferenceRequest(requestBuilder); - return channel -> client.execute(InferenceAction.INSTANCE, request, listener(channel)); + return channel -> client.execute(InferenceActionProxy.INSTANCE, request, ActionListener.withRef(listener(channel), content)); } - protected InferenceAction.Request prepareInferenceRequest(InferenceAction.Request.Builder builder) { - return builder.build(); - } + protected abstract boolean shouldStream(); protected abstract ActionListener listener(RestChannel channel); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java index 57c06df8d8dfe..b1edec79dfb72 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java @@ -24,21 +24,14 @@ public final class Paths { static final String INFERENCE_SERVICES_PATH = "_inference/_services"; static final String TASK_TYPE_INFERENCE_SERVICES_PATH = "_inference/_services/{" + TASK_TYPE + "}"; - static final String STREAM_INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/_stream"; + public static final String STREAM_SUFFIX = "_stream"; + static final String STREAM_INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/" + STREAM_SUFFIX; static final String STREAM_TASK_TYPE_INFERENCE_ID_PATH = "_inference/{" - + TASK_TYPE_OR_INFERENCE_ID - + "}/{" - + INFERENCE_ID - + "}/_stream"; - - public static final String UNIFIED_SUFFIX = "_unified"; - static final String UNIFIED_INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/" + UNIFIED_SUFFIX; - static final String UNIFIED_TASK_TYPE_INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/{" + INFERENCE_ID + "}/" - + UNIFIED_SUFFIX; + + STREAM_SUFFIX; private Paths() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java index 0fbc2f8214cbb..55083dcd4c888 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java @@ -32,6 +32,11 @@ public List routes() { return List.of(new Route(POST, INFERENCE_ID_PATH), new Route(POST, TASK_TYPE_INFERENCE_ID_PATH)); } + @Override + protected boolean shouldStream() { + return false; + } + @Override protected ActionListener listener(RestChannel channel) { return new RestChunkedToXContentListener<>(channel); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceAction.java index 881af435b29b6..f37f4e9fb1f9b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceAction.java @@ -42,12 +42,12 @@ public List routes() { } @Override - protected InferenceAction.Request prepareInferenceRequest(InferenceAction.Request.Builder builder) { - return builder.setStream(true).build(); + protected ActionListener listener(RestChannel channel) { + return new ServerSentEventsRestActionListener(channel, threadPool); } @Override - protected ActionListener listener(RestChannel channel) { - return new ServerSentEventsRestActionListener(channel, threadPool); + protected boolean shouldStream() { + return true; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceAction.java deleted file mode 100644 index 51f1bc48c8306..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceAction.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.rest; - -import org.apache.lucene.util.SetOnce; -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.Scope; -import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.xpack.inference.rest.Paths.UNIFIED_INFERENCE_ID_PATH; -import static org.elasticsearch.xpack.inference.rest.Paths.UNIFIED_TASK_TYPE_INFERENCE_ID_PATH; - -@ServerlessScope(Scope.PUBLIC) -public class RestUnifiedCompletionInferenceAction extends BaseRestHandler { - private final SetOnce threadPool; - - public RestUnifiedCompletionInferenceAction(SetOnce threadPool) { - super(); - this.threadPool = Objects.requireNonNull(threadPool); - } - - @Override - public String getName() { - return "unified_inference_action"; - } - - @Override - public List routes() { - return List.of(new Route(POST, UNIFIED_INFERENCE_ID_PATH), new Route(POST, UNIFIED_TASK_TYPE_INFERENCE_ID_PATH)); - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - var params = BaseInferenceAction.parseParams(restRequest); - - var inferTimeout = BaseInferenceAction.parseTimeout(restRequest); - - UnifiedCompletionAction.Request request; - try (var parser = restRequest.contentParser()) { - request = UnifiedCompletionAction.Request.parseRequest(params.inferenceEntityId(), params.taskType(), inferTimeout, parser); - } - - return channel -> client.execute( - UnifiedCompletionAction.INSTANCE, - request, - new ServerSentEventsRestActionListener(channel, threadPool) - ); - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java index 120731a4f8e66..7b3c54c60cdcc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java @@ -7,13 +7,11 @@ package org.elasticsearch.xpack.inference.rest; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; @@ -48,7 +46,8 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient inferenceEntityId = restRequest.param(INFERENCE_ID); taskType = TaskType.fromStringOrStatusException(restRequest.param(TASK_TYPE_OR_INFERENCE_ID)); } else { - throw new ElasticsearchStatusException("Inference ID must be provided in the path", RestStatus.BAD_REQUEST); + inferenceEntityId = restRequest.param(TASK_TYPE_OR_INFERENCE_ID); + taskType = TaskType.ANY; } var content = restRequest.requiredContent(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java index 6991e1325f3bc..cadf3e5f1806b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java @@ -35,15 +35,19 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.XContentFormattedException; import java.io.IOException; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.util.Iterator; +import java.util.Map; import java.util.Objects; import java.util.concurrent.Flow; import java.util.concurrent.atomic.AtomicBoolean; +import static org.elasticsearch.xpack.core.inference.results.XContentFormattedException.X_CONTENT_PARAM; + /** * A version of {@link org.elasticsearch.rest.action.RestChunkedToXContentListener} that reads from a {@link Flow.Publisher} and encodes * the response in Server-Sent Events. @@ -72,7 +76,7 @@ public ServerSentEventsRestActionListener(RestChannel channel, SetOnce threadPool) { this.channel = channel; - this.params = params; + this.params = new ToXContent.DelegatingMapParams(Map.of(X_CONTENT_PARAM, String.valueOf(channel.detailedErrorsEnabled())), params); this.threadPool = Objects.requireNonNull(threadPool); } @@ -150,6 +154,12 @@ public void onFailure(Exception e) { } private ChunkedToXContent errorChunk(Throwable t) { + // if we've already formatted it, just return that format + if (ExceptionsHelper.unwrapCause(t) instanceof XContentFormattedException xContentFormattedException) { + return xContentFormattedException; + } + + // else, try to parse the format and return something that the ES client knows how to interpret var status = ExceptionsHelper.status(t); Exception e; @@ -158,7 +168,8 @@ private ChunkedToXContent errorChunk(Throwable t) { } else { // if not exception, then error, and we should not let it escape. rethrow on another thread, and inform the user we're stopping. ExceptionsHelper.maybeDieOnAnotherThread(t); - e = new RuntimeException("Fatal error while streaming response", t); + e = new RuntimeException("Fatal error while streaming response. Please retry the request."); + logger.error(e.getMessage(), t); } return params -> Iterators.concat( ChunkedToXContentHelper.startObject(), diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java index ac6e57d31b740..56bf6c1359a56 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java @@ -37,7 +37,7 @@ import java.util.Set; public abstract class SenderService implements InferenceService { - protected static final Set COMPLETION_ONLY = EnumSet.of(TaskType.COMPLETION, TaskType.ANY); + protected static final Set COMPLETION_ONLY = EnumSet.of(TaskType.COMPLETION); private final Sender sender; private final ServiceComponents serviceComponents; @@ -47,7 +47,7 @@ public SenderService(HttpRequestSender.Factory factory, ServiceComponents servic this.serviceComponents = Objects.requireNonNull(serviceComponents); } - protected Sender getSender() { + public Sender getSender() { return sender; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 1ddae3cc8df95..13d641101a1cf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -42,7 +42,7 @@ import static org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings.ENABLED; import static org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings.MAX_NUMBER_OF_ALLOCATIONS; import static org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings.MIN_NUMBER_OF_ALLOCATIONS; -import static org.elasticsearch.xpack.inference.rest.Paths.UNIFIED_SUFFIX; +import static org.elasticsearch.xpack.inference.rest.Paths.STREAM_SUFFIX; import static org.elasticsearch.xpack.inference.services.ServiceFields.SIMILARITY; public final class ServiceUtils { @@ -796,7 +796,7 @@ public static String useChatCompletionUrlMessage(Model model) { model.getTaskType(), model.getTaskType(), model.getInferenceEntityId(), - UNIFIED_SUFFIX + STREAM_SUFFIX ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java index 0fd0c281d8bc6..589ca1e033f06 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java @@ -409,7 +409,7 @@ public static InferenceServiceConfiguration get() { HTTP_SCHEMA_NAME, new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("") .setLabel("HTTP Schema") - .setRequired(true) + .setRequired(false) .setSensitive(false) .setUpdatable(false) .setType(SettingsConfigurationFieldType.STRING) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java index e13c668197a8f..493acd3c0cd1a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java @@ -54,6 +54,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -413,6 +414,20 @@ public static InferenceServiceConfiguration get() { .build() ); + configurationMap.put( + DIMENSIONS, + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING)).setDescription( + "The number of dimensions the resulting embeddings should have. For more information refer to " + + "https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-titan-embed-text.html." + ) + .setLabel("Dimensions") + .setRequired(false) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.INTEGER) + .build() + ); + configurationMap.putAll(AmazonBedrockSecretSettings.Configuration.get()); configurationMap.putAll( RateLimitSettings.toSettingsConfigurationWithDescription( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java index 88d5b54398d06..34a5c2b4cc1e9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java @@ -53,6 +53,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -441,6 +442,20 @@ public static InferenceServiceConfiguration get() { .build() ); + configurationMap.put( + DIMENSIONS, + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING)).setDescription( + "The number of dimensions the resulting embeddings should have. For more information refer to " + + "https://learn.microsoft.com/en-us/azure/ai-studio/reference/reference-model-inference-embeddings." + ) + .setLabel("Dimensions") + .setRequired(false) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.INTEGER) + .build() + ); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java index 5b622d68f2c25..9a77b63337978 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java @@ -50,6 +50,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -382,6 +383,20 @@ public static InferenceServiceConfiguration get() { .build() ); + configurationMap.put( + DIMENSIONS, + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING)).setDescription( + "The number of dimensions the resulting embeddings should have. For more information refer to " + + "https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#request-body-1." + ) + .setLabel("Dimensions") + .setRequired(false) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.INTEGER) + .build() + ); + configurationMap.putAll(AzureOpenAiSecretSettings.Configuration.get()); configurationMap.putAll( RateLimitSettings.toSettingsConfigurationWithDescription( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index 60326a8a34ca3..6c2d3bb96d74d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -25,6 +25,7 @@ import org.elasticsearch.inference.SettingsConfiguration; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; @@ -51,6 +52,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.inference.services.ServiceFields.MODEL_ID; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -363,6 +365,19 @@ public static InferenceServiceConfiguration get() { () -> { var configurationMap = new HashMap(); + configurationMap.put( + MODEL_ID, + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( + "The name of the model to use for the inference task." + ) + .setLabel("Model ID") + .setRequired(false) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.STRING) + .build() + ); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingType.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingType.java index 11e405df3cde9..c3790bec9f193 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingType.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingType.java @@ -36,18 +36,29 @@ public enum CohereEmbeddingType { /** * This is a synonym for INT8 */ - BYTE(DenseVectorFieldMapper.ElementType.BYTE, RequestConstants.INT8); + BYTE(DenseVectorFieldMapper.ElementType.BYTE, RequestConstants.INT8), + /** + * Use this when you want to get back binary embeddings. Valid only for v3 models. + */ + BIT(DenseVectorFieldMapper.ElementType.BIT, RequestConstants.BIT), + /** + * This is a synonym for BIT + */ + BINARY(DenseVectorFieldMapper.ElementType.BIT, RequestConstants.BIT); private static final class RequestConstants { private static final String FLOAT = "float"; private static final String INT8 = "int8"; + private static final String BIT = "binary"; } private static final Map ELEMENT_TYPE_TO_COHERE_EMBEDDING = Map.of( DenseVectorFieldMapper.ElementType.FLOAT, FLOAT, DenseVectorFieldMapper.ElementType.BYTE, - BYTE + BYTE, + DenseVectorFieldMapper.ElementType.BIT, + BIT ); static final EnumSet SUPPORTED_ELEMENT_TYPES = EnumSet.copyOf( ELEMENT_TYPE_TO_COHERE_EMBEDDING.keySet() @@ -116,6 +127,17 @@ public static CohereEmbeddingType translateToVersion(CohereEmbeddingType embeddi return INT8; } + if (embeddingType == BIT) { + if (version.onOrAfter(TransportVersions.COHERE_BIT_EMBEDDING_TYPE_SUPPORT_ADDED) + || version.isPatchFrom(TransportVersions.COHERE_BIT_EMBEDDING_TYPE_SUPPORT_ADDED_BACKPORT_8_X)) { + // BIT embedding type is supported in these versions + return embeddingType; + } else { + // BIT embedding type is not supported in these versions + return INT8; + } + } + return embeddingType; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java index 8b8723b54d683..fee66a9f84ac9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java @@ -57,6 +57,7 @@ import org.elasticsearch.xpack.inference.telemetry.TraceContext; import java.util.ArrayList; +import java.util.Comparator; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; @@ -65,11 +66,14 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.TreeSet; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.inference.results.ResultUtils.createInvalidChunkedResultException; +import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; import static org.elasticsearch.xpack.inference.services.ServiceFields.MAX_INPUT_TOKENS; import static org.elasticsearch.xpack.inference.services.ServiceFields.MODEL_ID; @@ -88,14 +92,24 @@ public class ElasticInferenceService extends SenderService { private static final Logger logger = LogManager.getLogger(ElasticInferenceService.class); private static final EnumSet IMPLEMENTED_TASK_TYPES = EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.CHAT_COMPLETION); private static final String SERVICE_NAME = "Elastic"; + + // rainbow-sprinkles static final String DEFAULT_CHAT_COMPLETION_MODEL_ID_V1 = "rainbow-sprinkles"; - static final String DEFAULT_CHAT_COMPLETION_ENDPOINT_ID_V1 = Strings.format(".%s-elastic", DEFAULT_CHAT_COMPLETION_MODEL_ID_V1); + static final String DEFAULT_CHAT_COMPLETION_ENDPOINT_ID_V1 = defaultEndpointId(DEFAULT_CHAT_COMPLETION_MODEL_ID_V1); + + // elser-v2 + static final String DEFAULT_ELSER_MODEL_ID_V2 = "elser-v2"; + static final String DEFAULT_ELSER_ENDPOINT_ID_V2 = defaultEndpointId(DEFAULT_ELSER_MODEL_ID_V2); /** * The task types that the {@link InferenceAction.Request} can accept. */ private static final EnumSet SUPPORTED_INFERENCE_ACTION_TASK_TYPES = EnumSet.of(TaskType.SPARSE_EMBEDDING); + private static String defaultEndpointId(String modelId) { + return Strings.format(".%s-elastic", modelId); + } + private final ElasticInferenceServiceComponents elasticInferenceServiceComponents; private Configuration configuration; private final AtomicReference authRef = new AtomicReference<>(AuthorizedContent.empty()); @@ -140,6 +154,19 @@ private static Map initDefaultEndpoints( elasticInferenceServiceComponents ), MinimalServiceSettings.chatCompletion() + ), + DEFAULT_ELSER_MODEL_ID_V2, + new DefaultModelConfig( + new ElasticInferenceServiceSparseEmbeddingsModel( + DEFAULT_ELSER_ENDPOINT_ID_V2, + TaskType.SPARSE_EMBEDDING, + NAME, + new ElasticInferenceServiceSparseEmbeddingsServiceSettings(DEFAULT_ELSER_MODEL_ID_V2, null, null), + EmptyTaskSettings.INSTANCE, + EmptySecretSettings.INSTANCE, + elasticInferenceServiceComponents + ), + MinimalServiceSettings.sparseEmbedding() ) ); } @@ -158,10 +185,7 @@ static AuthorizedContent empty() { private void getAuthorization() { try { - ActionListener listener = ActionListener.wrap(result -> { - setAuthorizedContent(result); - authorizationCompletedLatch.countDown(); - }, e -> { + ActionListener listener = ActionListener.wrap(this::setAuthorizedContent, e -> { // we don't need to do anything if there was a failure, everything is disabled by default authorizationCompletedLatch.countDown(); }); @@ -177,18 +201,30 @@ private synchronized void setAuthorizedContent(ElasticInferenceServiceAuthorizat var authorizedTaskTypesAndModels = auth.newLimitedToTaskTypes(EnumSet.copyOf(IMPLEMENTED_TASK_TYPES)); // recalculate which default config ids and models are authorized now - var authorizedDefaultConfigIds = getAuthorizedDefaultConfigIds(auth); - var authorizedDefaultModelObjects = getAuthorizedDefaultModelsObjects(auth); + var authorizedDefaultModelIds = getAuthorizedDefaultModelIds(auth); + + var authorizedDefaultConfigIds = getAuthorizedDefaultConfigIds(authorizedDefaultModelIds, auth); + var authorizedDefaultModelObjects = getAuthorizedDefaultModelsObjects(authorizedDefaultModelIds); authRef.set(new AuthorizedContent(authorizedTaskTypesAndModels, authorizedDefaultConfigIds, authorizedDefaultModelObjects)); configuration = new Configuration(authRef.get().taskTypesAndModels.getAuthorizedTaskTypes()); - defaultConfigIds().forEach(modelRegistry::addDefaultIds); + defaultConfigIds().forEach(modelRegistry::putDefaultIdIfAbsent); + handleRevokedDefaultConfigs(authorizedDefaultModelIds); } - private List getAuthorizedDefaultConfigIds(ElasticInferenceServiceAuthorization auth) { - var authorizedDefaultModelIds = getAuthorizedDefaultModelIds(auth); + private Set getAuthorizedDefaultModelIds(ElasticInferenceServiceAuthorization auth) { + var authorizedModels = auth.getAuthorizedModelIds(); + var authorizedDefaultModelIds = new TreeSet<>(defaultModelsConfigs.keySet()); + authorizedDefaultModelIds.retainAll(authorizedModels); + return authorizedDefaultModelIds; + } + + private List getAuthorizedDefaultConfigIds( + Set authorizedDefaultModelIds, + ElasticInferenceServiceAuthorization auth + ) { var authorizedConfigIds = new ArrayList(); for (var id : authorizedDefaultModelIds) { var modelConfig = defaultModelsConfigs.get(id); @@ -207,20 +243,11 @@ private List getAuthorizedDefaultConfigIds(ElasticInferenceServ } } + authorizedConfigIds.sort(Comparator.comparing(DefaultConfigId::inferenceId)); return authorizedConfigIds; } - private Set getAuthorizedDefaultModelIds(ElasticInferenceServiceAuthorization auth) { - var authorizedModels = auth.getAuthorizedModelIds(); - var authorizedDefaultModelIds = new HashSet<>(defaultModelsConfigs.keySet()); - authorizedDefaultModelIds.retainAll(authorizedModels); - - return authorizedDefaultModelIds; - } - - private List getAuthorizedDefaultModelsObjects(ElasticInferenceServiceAuthorization auth) { - var authorizedDefaultModelIds = getAuthorizedDefaultModelIds(auth); - + private List getAuthorizedDefaultModelsObjects(Set authorizedDefaultModelIds) { var authorizedModels = new ArrayList(); for (var id : authorizedDefaultModelIds) { var modelConfig = defaultModelsConfigs.get(id); @@ -229,11 +256,43 @@ private List getAuthorizedDefaultModelsObjects(ElasticInfere } } + authorizedModels.sort(Comparator.comparing(modelConfig -> modelConfig.model.getInferenceEntityId())); return authorizedModels; } - // Default for testing - void waitForAuthorizationToComplete(TimeValue waitTime) { + private void handleRevokedDefaultConfigs(Set authorizedDefaultModelIds) { + // if a model was initially returned in the authorization response but is absent, then we'll assume authorization was revoked + var unauthorizedDefaultModelIds = new HashSet<>(defaultModelsConfigs.keySet()); + unauthorizedDefaultModelIds.removeAll(authorizedDefaultModelIds); + + // get all the default inference endpoint ids for the unauthorized model ids + var unauthorizedDefaultInferenceEndpointIds = unauthorizedDefaultModelIds.stream() + .map(defaultModelsConfigs::get) // get all the model configs + .filter(Objects::nonNull) // limit to only non-null + .map(modelConfig -> modelConfig.model.getInferenceEntityId()) // get the inference ids + .collect(Collectors.toSet()); + + var deleteInferenceEndpointsListener = ActionListener.wrap(result -> { + logger.trace(Strings.format("Successfully revoked access to default inference endpoint IDs: %s", unauthorizedDefaultModelIds)); + authorizationCompletedLatch.countDown(); + }, e -> { + logger.warn( + Strings.format("Failed to revoke access to default inference endpoint IDs: %s, error: %s", unauthorizedDefaultModelIds, e) + ); + authorizationCompletedLatch.countDown(); + }); + + getServiceComponents().threadPool() + .executor(UTILITY_THREAD_POOL_NAME) + .execute(() -> modelRegistry.removeDefaultConfigs(unauthorizedDefaultInferenceEndpointIds, deleteInferenceEndpointsListener)); + } + + /** + * Waits the specified amount of time for the authorization call to complete. This is mainly to make testing easier. + * @param waitTime the max time to wait + * @throws IllegalStateException if the wait time is exceeded or the call receives an {@link InterruptedException} + */ + public void waitForAuthorizationToComplete(TimeValue waitTime) { try { if (authorizationCompletedLatch.await(waitTime.getSeconds(), TimeUnit.SECONDS) == false) { throw new IllegalStateException("The wait time has expired for authorization to complete."); @@ -248,10 +307,6 @@ public synchronized Set supportedStreamingTasks() { var authorizedStreamingTaskTypes = EnumSet.of(TaskType.CHAT_COMPLETION); authorizedStreamingTaskTypes.retainAll(authRef.get().taskTypesAndModels.getAuthorizedTaskTypes()); - if (authorizedStreamingTaskTypes.isEmpty() == false) { - authorizedStreamingTaskTypes.add(TaskType.ANY); - } - return authorizedStreamingTaskTypes; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java index 623c25222446c..4ec270eef3a62 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java @@ -10,14 +10,12 @@ import org.elasticsearch.common.util.FeatureFlag; /** - * Elastic Inference Service (EIS) feature flag. When the feature is complete, this flag will be removed. - * Enable feature via JVM option: `-Des.elastic_inference_service_feature_flag_enabled=true`. + * Elastic Inference Service feature flag. Not being used anymore, but we'll keep it until the controller is no longer + * passing -Des.elastic_inference_service_feature_flag_enabled=true at startup. */ public class ElasticInferenceServiceFeature { - // TODO when we remove this also look in InferenceGetServicesIT and remove references to the deprecated URL setting @Deprecated - public static final FeatureFlag DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("eis"); - public static final FeatureFlag ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("elastic_inference_service"); + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java index 3b6b1088cc9cf..98d55fd799598 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.services.elastic; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.ssl.SSLConfigurationSettings; @@ -20,18 +21,16 @@ */ public class ElasticInferenceServiceSettings { - // TODO when we remove this look at InferenceGetServicesIT and remove the setting there as well + public static final String ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX = "xpack.inference.elastic.http.ssl."; + @Deprecated static final Setting EIS_GATEWAY_URL = Setting.simpleString("xpack.inference.eis.gateway.url", Setting.Property.NodeScope); - public static final String ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX = "xpack.inference.elastic.http.ssl."; - static final Setting ELASTIC_INFERENCE_SERVICE_URL = Setting.simpleString( "xpack.inference.elastic.url", Setting.Property.NodeScope ); - // Adjust this variable to be volatile, if the setting can be updated at some point in time @Deprecated private final String eisGatewayUrl; @@ -63,13 +62,8 @@ public static List> getSettingsDefinitions() { return settings; } - @Deprecated - public String getEisGatewayUrl() { - return eisGatewayUrl; - } - public String getElasticInferenceServiceUrl() { - return elasticInferenceServiceUrl; + return Strings.isEmpty(elasticInferenceServiceUrl) ? eisGatewayUrl : elasticInferenceServiceUrl; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java index 4c1cac4d7a77b..ac6a389914a10 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java @@ -20,15 +20,11 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.elastic.ElasticInferenceServiceActionVisitor; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; -import org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels; import java.net.URI; import java.net.URISyntaxException; -import java.util.Locale; import java.util.Map; -import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; - public class ElasticInferenceServiceSparseEmbeddingsModel extends ElasticInferenceServiceExecutableActionModel { private final URI uri; @@ -95,36 +91,15 @@ public URI uri() { } private URI createUri() throws ElasticsearchStatusException { - String modelId = getServiceSettings().modelId(); - String modelIdUriPath; - - switch (modelId) { - case ElserModels.ELSER_V2_MODEL -> modelIdUriPath = "ELSERv2"; - default -> throw new ElasticsearchStatusException( - String.format( - Locale.ROOT, - "Unsupported model [%s] for service [%s] and task type [%s]", - modelId, - ELASTIC_INFERENCE_SERVICE_IDENTIFIER, - TaskType.SPARSE_EMBEDDING - ), - RestStatus.BAD_REQUEST - ); - } - try { // TODO, consider transforming the base URL into a URI for better error handling. - return new URI( - elasticInferenceServiceComponents().elasticInferenceServiceUrl() + "/api/v1/embed/text/sparse/" + modelIdUriPath - ); + return new URI(elasticInferenceServiceComponents().elasticInferenceServiceUrl() + "/api/v1/embed/text/sparse"); } catch (URISyntaxException e) { throw new ElasticsearchStatusException( "Failed to create URI for service [" + this.getConfigurations().getService() + "] with taskType [" + this.getTaskType() - + "] with model [" - + this.getServiceSettings().modelId() + "]: " + e.getMessage(), RestStatus.BAD_REQUEST, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettings.java index 3af404aeef36b..9ac42f66a0c4b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettings.java @@ -17,7 +17,6 @@ import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; -import org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels; import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; @@ -61,10 +60,6 @@ public static ElasticInferenceServiceSparseEmbeddingsServiceSettings fromMap( context ); - if (modelId != null && ElserModels.isValidEisModel(modelId) == false) { - validationException.addValidationError("unknown ELSER model id [" + modelId + "]"); - } - if (validationException.validationErrors().isEmpty() == false) { throw validationException; } @@ -80,7 +75,7 @@ public static ElasticInferenceServiceSparseEmbeddingsServiceSettings fromMap( public ElasticInferenceServiceSparseEmbeddingsServiceSettings( String modelId, @Nullable Integer maxInputTokens, - RateLimitSettings rateLimitSettings + @Nullable RateLimitSettings rateLimitSettings ) { this.modelId = Objects.requireNonNull(modelId); this.maxInputTokens = maxInputTokens; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettings.java index 3c8182a7d41a4..293ca1bcb41c0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettings.java @@ -36,7 +36,7 @@ public class ElasticInferenceServiceCompletionServiceSettings extends FilteredXC public static final String NAME = "elastic_inference_service_completion_service_settings"; // TODO what value do we put here? - private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(240L); + private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(720L); public static ElasticInferenceServiceCompletionServiceSettings fromMap(Map map, ConfigurationParseContext context) { ValidationException validationException = new ValidationException(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java index 3cc7e0c6c2b53..4591418419ded 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import java.io.IOException; @@ -20,15 +19,6 @@ public CustomElandInternalServiceSettings(ElasticsearchInternalServiceSettings o super(other); } - public CustomElandInternalServiceSettings( - Integer numAllocations, - int numThreads, - String modelId, - AdaptiveAllocationsSettings adaptiveAllocationsSettings - ) { - super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); - } - public CustomElandInternalServiceSettings(StreamInput in) throws IOException { super(in); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettings.java index 133be5e2b7623..d5f4143b65d36 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettings.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.SimilarityMeasure; @@ -105,33 +106,17 @@ private static CommonFields commonFieldsFromMap(Map map, Validat private final SimilarityMeasure similarityMeasure; private final DenseVectorFieldMapper.ElementType elementType; - public CustomElandInternalTextEmbeddingServiceSettings( - int numAllocations, - int numThreads, - String modelId, - AdaptiveAllocationsSettings adaptiveAllocationsSettings - ) { - this( - numAllocations, - numThreads, - modelId, - adaptiveAllocationsSettings, - null, - SimilarityMeasure.COSINE, - DenseVectorFieldMapper.ElementType.FLOAT - ); - } - - public CustomElandInternalTextEmbeddingServiceSettings( + CustomElandInternalTextEmbeddingServiceSettings( int numAllocations, int numThreads, String modelId, AdaptiveAllocationsSettings adaptiveAllocationsSettings, + @Nullable String deploymentId, Integer dimensions, SimilarityMeasure similarityMeasure, DenseVectorFieldMapper.ElementType elementType ) { - super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); + super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, deploymentId); this.dimensions = dimensions; this.similarityMeasure = Objects.requireNonNull(similarityMeasure); this.elementType = Objects.requireNonNull(elementType); @@ -159,7 +144,8 @@ private CustomElandInternalTextEmbeddingServiceSettings(CommonFields commonField commonFields.internalServiceSettings.getNumAllocations(), commonFields.internalServiceSettings.getNumThreads(), commonFields.internalServiceSettings.modelId(), - commonFields.internalServiceSettings.getAdaptiveAllocationsSettings() + commonFields.internalServiceSettings.getAdaptiveAllocationsSettings(), + commonFields.internalServiceSettings.getDeploymentId() ); this.dimensions = dimensions; similarityMeasure = commonFields.similarityMeasure; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java index 316dc092e03c7..2b7904e615682 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java @@ -14,21 +14,27 @@ import java.io.IOException; import java.util.Map; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.RERANKER_ID; + public class ElasticRerankerServiceSettings extends ElasticsearchInternalServiceSettings { public static final String NAME = "elastic_reranker_service_settings"; + public static ElasticRerankerServiceSettings defaultEndpointSettings() { + return new ElasticRerankerServiceSettings(null, 1, RERANKER_ID, new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32)); + } + public ElasticRerankerServiceSettings(ElasticsearchInternalServiceSettings other) { super(other); } - public ElasticRerankerServiceSettings( + private ElasticRerankerServiceSettings( Integer numAllocations, int numThreads, String modelId, AdaptiveAllocationsSettings adaptiveAllocationsSettings ) { - super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); + super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, null); } public ElasticRerankerServiceSettings(StreamInput in) throws IOException { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 9dfa21a323c33..ddc5e3e1aa36c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.LazyInitializable; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; @@ -33,13 +34,13 @@ import org.elasticsearch.inference.UnifiedCompletionRequest; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.ml.action.GetDeploymentStatsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; -import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentStats; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; @@ -110,8 +111,11 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi private static final Logger logger = LogManager.getLogger(ElasticsearchInternalService.class); private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(ElasticsearchInternalService.class); + private final Settings settings; + public ElasticsearchInternalService(InferenceServiceExtension.InferenceServiceFactoryContext context) { super(context); + this.settings = context.settings(); } // for testing @@ -120,6 +124,7 @@ public ElasticsearchInternalService(InferenceServiceExtension.InferenceServiceFa Consumer> platformArch ) { super(context, platformArch); + this.settings = context.settings(); } @Override @@ -562,6 +567,7 @@ private static CustomElandEmbeddingModel updateModelWithEmbeddingDetails(CustomE model.getServiceSettings().getNumThreads(), model.getServiceSettings().modelId(), model.getServiceSettings().getAdaptiveAllocationsSettings(), + model.getServiceSettings().getDeploymentId(), embeddingSize, model.getServiceSettings().similarity(), model.getServiceSettings().elementType() @@ -837,18 +843,26 @@ public List defaultConfigIds() { @Override public void updateModelsWithDynamicFields(List models, ActionListener> listener) { - if (models.isEmpty()) { listener.onResponse(models); return; } - var modelsByDeploymentIds = new HashMap(); + // if ML is disabled, do not update Deployment Stats (there won't be changes) + if (XPackSettings.MACHINE_LEARNING_ENABLED.get(settings) == false) { + listener.onResponse(models); + return; + } + + var modelsByDeploymentIds = new HashMap>(); for (var model : models) { assert model instanceof ElasticsearchInternalModel; if (model instanceof ElasticsearchInternalModel esModel) { - modelsByDeploymentIds.put(esModel.mlNodeDeploymentId(), esModel); + modelsByDeploymentIds.merge(esModel.mlNodeDeploymentId(), new ArrayList<>(List.of(esModel)), (a, b) -> { + a.addAll(b); + return a; + }); } else { listener.onFailure( new ElasticsearchStatusException( @@ -867,10 +881,13 @@ public void updateModelsWithDynamicFields(List models, ActionListener { for (var deploymentStats : stats.getStats().results()) { - var model = modelsByDeploymentIds.get(deploymentStats.getDeploymentId()); - model.updateNumAllocations(deploymentStats.getNumberOfAllocations()); + var modelsForDeploymentId = modelsByDeploymentIds.get(deploymentStats.getDeploymentId()); + modelsForDeploymentId.forEach(model -> model.updateNumAllocations(deploymentStats.getNumberOfAllocations())); } - listener.onResponse(new ArrayList<>(modelsByDeploymentIds.values())); + var updatedModels = new ArrayList(); + modelsByDeploymentIds.values().forEach(updatedModels::addAll); + + listener.onResponse(updatedModels); }, e -> { logger.warn("Get deployment stats failed, cannot update the endpoint's number of allocations", e); // continue with the original response @@ -903,12 +920,7 @@ private List defaultConfigs(boolean useLinuxOptimizedModel) { DEFAULT_ELSER_ID, TaskType.SPARSE_EMBEDDING, NAME, - new ElserInternalServiceSettings( - null, - 1, - useLinuxOptimizedModel ? ELSER_V2_MODEL_LINUX_X86 : ELSER_V2_MODEL, - new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32) - ), + ElserInternalServiceSettings.defaultEndpointSettings(useLinuxOptimizedModel), ElserMlNodeTaskSettings.DEFAULT, ChunkingSettingsBuilder.DEFAULT_SETTINGS ); @@ -916,19 +928,14 @@ private List defaultConfigs(boolean useLinuxOptimizedModel) { DEFAULT_E5_ID, TaskType.TEXT_EMBEDDING, NAME, - new MultilingualE5SmallInternalServiceSettings( - null, - 1, - useLinuxOptimizedModel ? MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 : MULTILINGUAL_E5_SMALL_MODEL_ID, - new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32) - ), + MultilingualE5SmallInternalServiceSettings.defaultEndpointSettings(useLinuxOptimizedModel), ChunkingSettingsBuilder.DEFAULT_SETTINGS ); var defaultRerank = new ElasticRerankerModel( DEFAULT_RERANK_ID, TaskType.RERANK, NAME, - new ElasticRerankerServiceSettings(null, 1, RERANKER_ID, new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32)), + ElasticRerankerServiceSettings.defaultEndpointSettings(), RerankTaskSettings.DEFAULT_SETTINGS ); return List.of(defaultElser, defaultE5, defaultRerank); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java index 244108edc3dd4..98730f33d10f9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.ToXContentObject; @@ -108,25 +109,12 @@ protected static ElasticsearchInternalServiceSettings.Builder fromMap( .setDeploymentId(deploymentId); } - public ElasticsearchInternalServiceSettings( - Integer numAllocations, - int numThreads, - String modelId, - AdaptiveAllocationsSettings adaptiveAllocationsSettings - ) { - this.numAllocations = numAllocations; - this.numThreads = numThreads; - this.modelId = Objects.requireNonNull(modelId); - this.adaptiveAllocationsSettings = adaptiveAllocationsSettings; - this.deploymentId = null; - } - public ElasticsearchInternalServiceSettings( Integer numAllocations, int numThreads, String modelId, AdaptiveAllocationsSettings adaptiveAllocationsSettings, - String deploymentId + @Nullable String deploymentId ) { this.numAllocations = numAllocations; this.numThreads = numThreads; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettings.java index da9164bf3f288..b94b9feb8a049 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettings.java @@ -9,14 +9,14 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import java.io.IOException; -import java.util.Arrays; -import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels.ELSER_V2_MODEL; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels.ELSER_V2_MODEL_LINUX_X86; public class ElserInternalServiceSettings extends ElasticsearchInternalServiceSettings { @@ -26,37 +26,26 @@ public static MinimalServiceSettings minimalServiceSettings() { return MinimalServiceSettings.sparseEmbedding(); } - public static Builder fromRequestMap(Map map) { - ValidationException validationException = new ValidationException(); - var baseSettings = ElasticsearchInternalServiceSettings.fromMap(map, validationException); - - String modelId = baseSettings.getModelId(); - if (modelId != null && ElserModels.isValidModel(modelId) == false) { - var ve = new ValidationException(); - ve.addValidationError( - "Unknown ELSER model ID [" + modelId + "]. Valid models are " + Arrays.toString(ElserModels.VALID_ELSER_MODEL_IDS.toArray()) - ); - throw ve; - } - - if (validationException.validationErrors().isEmpty() == false) { - throw validationException; - } - - return baseSettings; + public static ElserInternalServiceSettings defaultEndpointSettings(boolean useLinuxOptimizedModel) { + return new ElserInternalServiceSettings( + null, + 1, + useLinuxOptimizedModel ? ELSER_V2_MODEL_LINUX_X86 : ELSER_V2_MODEL, + new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32) + ); } public ElserInternalServiceSettings(ElasticsearchInternalServiceSettings other) { super(other); } - public ElserInternalServiceSettings( + private ElserInternalServiceSettings( Integer numAllocations, int numThreads, String modelId, AdaptiveAllocationsSettings adaptiveAllocationsSettings ) { - this(new ElasticsearchInternalServiceSettings(numAllocations, numThreads, modelId, adaptiveAllocationsSettings)); + this(new ElasticsearchInternalServiceSettings(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, null)); } public ElserInternalServiceSettings(StreamInput in) throws IOException { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java index 317cc48172fca..45d52d3c8deaa 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java @@ -18,6 +18,9 @@ import java.util.Arrays; import java.util.Map; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86; + public class MultilingualE5SmallInternalServiceSettings extends ElasticsearchInternalServiceSettings { public static final String NAME = "multilingual_e5_small_service_settings"; @@ -29,17 +32,26 @@ public static MinimalServiceSettings minimalServiceSettings() { return MinimalServiceSettings.textEmbedding(DIMENSIONS, SIMILARITY, DenseVectorFieldMapper.ElementType.FLOAT); } + public static MultilingualE5SmallInternalServiceSettings defaultEndpointSettings(boolean useLinuxOptimizedModel) { + return new MultilingualE5SmallInternalServiceSettings( + null, + 1, + useLinuxOptimizedModel ? MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 : MULTILINGUAL_E5_SMALL_MODEL_ID, + new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32) + ); + } + public MultilingualE5SmallInternalServiceSettings(ElasticsearchInternalServiceSettings other) { super(other); } - public MultilingualE5SmallInternalServiceSettings( + MultilingualE5SmallInternalServiceSettings( Integer numAllocations, int numThreads, String modelId, AdaptiveAllocationsSettings adaptiveAllocationsSettings ) { - super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); + super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, null); } public MultilingualE5SmallInternalServiceSettings(StreamInput in) throws IOException { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java index 55397b2398d39..3e921f669e864 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java @@ -327,6 +327,8 @@ public static InferenceServiceConfiguration get() { () -> { var configurationMap = new HashMap(); + // TODO whether the model ID is required or not depends on the task type + // For rerank it is optional, for text_embedding it is required configurationMap.put( MODEL_ID, new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("ID of the LLM you're using.") diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxModel.java index 4f0b425cdaa51..09706f70e3684 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxModel.java @@ -12,6 +12,7 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.ibmwatsonx.IbmWatsonxActionVisitor; @@ -38,6 +39,12 @@ public IbmWatsonxModel(IbmWatsonxModel model, ServiceSettings serviceSettings) { rateLimitServiceSettings = model.rateLimitServiceSettings(); } + public IbmWatsonxModel(IbmWatsonxModel model, TaskSettings taskSettings) { + super(model, taskSettings); + + rateLimitServiceSettings = model.rateLimitServiceSettings(); + } + public abstract ExecutableAction accept(IbmWatsonxActionVisitor creator, Map taskSettings, InputType inputType); public IbmWatsonxRateLimitServiceSettings rateLimitServiceSettings() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java index 477225f00d22b..3fa423c2dae19 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java @@ -41,6 +41,7 @@ import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModel; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsServiceSettings; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; import org.elasticsearch.xpack.inference.services.validation.ModelValidatorBuilder; import java.util.EnumSet; @@ -138,6 +139,15 @@ private static IbmWatsonxModel createModel( secretSettings, context ); + case RERANK -> new IbmWatsonxRerankModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + taskSettings, + secretSettings, + context + ); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModel.java new file mode 100644 index 0000000000000..cb4c509d88c2b --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModel.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank; + +import org.apache.http.client.utils.URIBuilder; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.ibmwatsonx.IbmWatsonxActionVisitor; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.IbmWatsonxModel; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxUtils.ML; +import static org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxUtils.RERANKS; +import static org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxUtils.TEXT; +import static org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxUtils.V1; + +public class IbmWatsonxRerankModel extends IbmWatsonxModel { + public static IbmWatsonxRerankModel of(IbmWatsonxRerankModel model, Map taskSettings) { + var requestTaskSettings = IbmWatsonxRerankTaskSettings.fromMap(taskSettings); + return new IbmWatsonxRerankModel(model, IbmWatsonxRerankTaskSettings.of(model.getTaskSettings(), requestTaskSettings)); + } + + public IbmWatsonxRerankModel( + String modelId, + TaskType taskType, + String service, + Map serviceSettings, + Map taskSettings, + @Nullable Map secrets, + ConfigurationParseContext context + ) { + this( + modelId, + taskType, + service, + IbmWatsonxRerankServiceSettings.fromMap(serviceSettings, context), + IbmWatsonxRerankTaskSettings.fromMap(taskSettings), + DefaultSecretSettings.fromMap(secrets) + ); + } + + // should only be used for testing + IbmWatsonxRerankModel( + String modelId, + TaskType taskType, + String service, + IbmWatsonxRerankServiceSettings serviceSettings, + IbmWatsonxRerankTaskSettings taskSettings, + @Nullable DefaultSecretSettings secretSettings + ) { + super( + new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), + new ModelSecrets(secretSettings), + serviceSettings + ); + } + + private IbmWatsonxRerankModel(IbmWatsonxRerankModel model, IbmWatsonxRerankTaskSettings taskSettings) { + super(model, taskSettings); + } + + @Override + public IbmWatsonxRerankServiceSettings getServiceSettings() { + return (IbmWatsonxRerankServiceSettings) super.getServiceSettings(); + } + + @Override + public IbmWatsonxRerankTaskSettings getTaskSettings() { + return (IbmWatsonxRerankTaskSettings) super.getTaskSettings(); + } + + @Override + public DefaultSecretSettings getSecretSettings() { + return (DefaultSecretSettings) super.getSecretSettings(); + } + + public URI uri() { + URI uri; + try { + uri = buildUri(this.getServiceSettings().uri().toString(), this.getServiceSettings().apiVersion()); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + + return uri; + } + + /** + * Accepts a visitor to create an executable action. The returned action will not return documents in the response. + * @param visitor _ + * @param taskSettings _ + * @param inputType ignored for rerank task + * @return the rerank action + */ + @Override + public ExecutableAction accept(IbmWatsonxActionVisitor visitor, Map taskSettings, InputType inputType) { + return visitor.create(this, taskSettings); + } + + public static URI buildUri(String uri, String apiVersion) throws URISyntaxException { + return new URIBuilder().setScheme("https") + .setHost(uri) + .setPathSegments(ML, V1, TEXT, RERANKS) + .setParameter("version", apiVersion) + .build(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankServiceSettings.java new file mode 100644 index 0000000000000..969622f9ba54f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankServiceSettings.java @@ -0,0 +1,190 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.IbmWatsonxRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.IbmWatsonxService; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.io.IOException; +import java.net.URI; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceFields.MODEL_ID; +import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.ibmwatsonx.IbmWatsonxServiceFields.API_VERSION; +import static org.elasticsearch.xpack.inference.services.ibmwatsonx.IbmWatsonxServiceFields.PROJECT_ID; + +public class IbmWatsonxRerankServiceSettings extends FilteredXContentObject implements ServiceSettings, IbmWatsonxRateLimitServiceSettings { + public static final String NAME = "ibm_watsonx_rerank_service_settings"; + + /** + * Rate limits are defined at + * Watson Machine Learning plans. + * For Lite plan, you've 120 requests per minute. + */ + private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(120); + + public static IbmWatsonxRerankServiceSettings fromMap(Map map, ConfigurationParseContext context) { + ValidationException validationException = new ValidationException(); + + String url = extractRequiredString(map, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); + URI uri = convertToUri(url, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); + String apiVersion = extractRequiredString(map, API_VERSION, ModelConfigurations.SERVICE_SETTINGS, validationException); + + String modelId = extractRequiredString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + String projectId = extractRequiredString(map, PROJECT_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + + RateLimitSettings rateLimitSettings = RateLimitSettings.of( + map, + DEFAULT_RATE_LIMIT_SETTINGS, + validationException, + IbmWatsonxService.NAME, + context + ); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new IbmWatsonxRerankServiceSettings(uri, apiVersion, modelId, projectId, rateLimitSettings); + } + + private final URI uri; + + private final String apiVersion; + + private final String modelId; + + private final String projectId; + + private final RateLimitSettings rateLimitSettings; + + public IbmWatsonxRerankServiceSettings( + URI uri, + String apiVersion, + String modelId, + String projectId, + @Nullable RateLimitSettings rateLimitSettings + ) { + this.uri = uri; + this.apiVersion = apiVersion; + this.projectId = projectId; + this.modelId = modelId; + this.rateLimitSettings = Objects.requireNonNullElse(rateLimitSettings, DEFAULT_RATE_LIMIT_SETTINGS); + } + + public IbmWatsonxRerankServiceSettings(StreamInput in) throws IOException { + this.uri = createUri(in.readString()); + this.apiVersion = in.readString(); + this.modelId = in.readString(); + this.projectId = in.readString(); + this.rateLimitSettings = new RateLimitSettings(in); + + } + + public URI uri() { + return uri; + } + + public String apiVersion() { + return apiVersion; + } + + @Override + public String modelId() { + return modelId; + } + + public String projectId() { + return projectId; + } + + @Override + public RateLimitSettings rateLimitSettings() { + return rateLimitSettings; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + toXContentFragmentOfExposedFields(builder, params); + + builder.endObject(); + return builder; + } + + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + builder.field(URL, uri.toString()); + + builder.field(API_VERSION, apiVersion); + + builder.field(MODEL_ID, modelId); + + builder.field(PROJECT_ID, projectId); + + rateLimitSettings.toXContent(builder, params); + + return builder; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_IBM_WATSONX_RERANK_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(uri.toString()); + out.writeString(apiVersion); + + out.writeString(modelId); + out.writeString(projectId); + + rateLimitSettings.writeTo(out); + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + IbmWatsonxRerankServiceSettings that = (IbmWatsonxRerankServiceSettings) object; + return Objects.equals(uri, that.uri) + && Objects.equals(apiVersion, that.apiVersion) + && Objects.equals(modelId, that.modelId) + && Objects.equals(projectId, that.projectId) + && Objects.equals(rateLimitSettings, that.rateLimitSettings); + } + + @Override + public int hashCode() { + return Objects.hash(uri, apiVersion, modelId, projectId, rateLimitSettings); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankTaskSettings.java new file mode 100644 index 0000000000000..12f4b8f6fa33e --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankTaskSettings.java @@ -0,0 +1,192 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalBoolean; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveInteger; + +public class IbmWatsonxRerankTaskSettings implements TaskSettings { + + public static final String NAME = "ibm_watsonx_rerank_task_settings"; + public static final String RETURN_DOCUMENTS = "return_documents"; + public static final String TOP_N_DOCS_ONLY = "top_n"; + public static final String TRUNCATE_INPUT_TOKENS = "truncate_input_tokens"; + + static final IbmWatsonxRerankTaskSettings EMPTY_SETTINGS = new IbmWatsonxRerankTaskSettings(null, null, null); + + public static IbmWatsonxRerankTaskSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + if (map == null || map.isEmpty()) { + return EMPTY_SETTINGS; + } + + Boolean returnDocuments = extractOptionalBoolean(map, RETURN_DOCUMENTS, validationException); + Integer topNDocumentsOnly = extractOptionalPositiveInteger( + map, + TOP_N_DOCS_ONLY, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + Integer truncateInputTokens = extractOptionalPositiveInteger( + map, + TRUNCATE_INPUT_TOKENS, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return of(topNDocumentsOnly, returnDocuments, truncateInputTokens); + } + + /** + * Creates a new {@link IbmWatsonxRerankTaskSettings} + * by preferring non-null fields from the request settings over the original settings. + * + * @param originalSettings the settings stored as part of the inference entity configuration + * @param requestTaskSettings the settings passed in within the task_settings field of the request + * @return a constructed {@link IbmWatsonxRerankTaskSettings} + */ + public static IbmWatsonxRerankTaskSettings of( + IbmWatsonxRerankTaskSettings originalSettings, + IbmWatsonxRerankTaskSettings requestTaskSettings + ) { + return new IbmWatsonxRerankTaskSettings( + requestTaskSettings.getTopNDocumentsOnly() != null + ? requestTaskSettings.getTopNDocumentsOnly() + : originalSettings.getTopNDocumentsOnly(), + requestTaskSettings.getReturnDocuments() != null + ? requestTaskSettings.getReturnDocuments() + : originalSettings.getReturnDocuments(), + requestTaskSettings.getTruncateInputTokens() != null + ? requestTaskSettings.getTruncateInputTokens() + : originalSettings.getTruncateInputTokens() + ); + } + + public static IbmWatsonxRerankTaskSettings of(Integer topNDocumentsOnly, Boolean returnDocuments, Integer maxChunksPerDoc) { + return new IbmWatsonxRerankTaskSettings(topNDocumentsOnly, returnDocuments, maxChunksPerDoc); + } + + private final Integer topNDocumentsOnly; + private final Boolean returnDocuments; + private final Integer truncateInputTokens; + + public IbmWatsonxRerankTaskSettings(StreamInput in) throws IOException { + this(in.readOptionalInt(), in.readOptionalBoolean(), in.readOptionalInt()); + } + + public IbmWatsonxRerankTaskSettings( + @Nullable Integer topNDocumentsOnly, + @Nullable Boolean doReturnDocuments, + @Nullable Integer truncateInputTokens + ) { + this.topNDocumentsOnly = topNDocumentsOnly; + this.returnDocuments = doReturnDocuments; + this.truncateInputTokens = truncateInputTokens; + } + + @Override + public boolean isEmpty() { + return topNDocumentsOnly == null && returnDocuments == null && truncateInputTokens == null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (topNDocumentsOnly != null) { + builder.field(TOP_N_DOCS_ONLY, topNDocumentsOnly); + } + if (returnDocuments != null) { + builder.field(RETURN_DOCUMENTS, returnDocuments); + } + if (truncateInputTokens != null) { + builder.field(TRUNCATE_INPUT_TOKENS, truncateInputTokens); + } + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_IBM_WATSONX_RERANK_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalInt(topNDocumentsOnly); + out.writeOptionalBoolean(returnDocuments); + out.writeOptionalInt(truncateInputTokens); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IbmWatsonxRerankTaskSettings that = (IbmWatsonxRerankTaskSettings) o; + return Objects.equals(returnDocuments, that.returnDocuments) + && Objects.equals(topNDocumentsOnly, that.topNDocumentsOnly) + && Objects.equals(truncateInputTokens, that.truncateInputTokens); + } + + @Override + public int hashCode() { + return Objects.hash(returnDocuments, topNDocumentsOnly, truncateInputTokens); + } + + public static String invalidInputTypeMessage(InputType inputType) { + return Strings.format("received invalid input type value [%s]", inputType.toString()); + } + + public Boolean getDoesReturnDocuments() { + return returnDocuments; + } + + public Integer getTopNDocumentsOnly() { + return topNDocumentsOnly; + } + + public Boolean getReturnDocuments() { + return returnDocuments; + } + + public Integer getTruncateInputTokens() { + return truncateInputTokens; + } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + IbmWatsonxRerankTaskSettings updatedSettings = IbmWatsonxRerankTaskSettings.fromMap(new HashMap<>(newSettings)); + return IbmWatsonxRerankTaskSettings.of(this, updatedSettings); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIService.java index 7ad70fc88054d..37add1e264704 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIService.java @@ -25,6 +25,7 @@ import org.elasticsearch.inference.SettingsConfiguration; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; @@ -49,6 +50,7 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -339,6 +341,33 @@ public static InferenceServiceConfiguration get() { () -> { var configurationMap = new HashMap(); + configurationMap.put( + JinaAIServiceSettings.MODEL_ID, + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( + "The name of the model to use for the inference task." + ) + .setLabel("Model ID") + .setRequired(true) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.STRING) + .build() + ); + + configurationMap.put( + DIMENSIONS, + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING)).setDescription( + "The number of dimensions the resulting embeddings should have. For more information refer to " + + "https://api.jina.ai/redoc#tag/embeddings/operation/create_embedding_v1_embeddings_post." + ) + .setLabel("Dimensions") + .setRequired(false) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.INTEGER) + .build() + ); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 0ce5bc801b59f..94312a39882fd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -56,8 +56,8 @@ import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; import static org.elasticsearch.xpack.inference.external.action.openai.OpenAiActionCreator.COMPLETION_ERROR_PREFIX; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; import static org.elasticsearch.xpack.inference.services.ServiceFields.MODEL_ID; -import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -376,7 +376,7 @@ public TransportVersion getMinimalSupportedVersion() { @Override public Set supportedStreamingTasks() { - return EnumSet.of(TaskType.COMPLETION, TaskType.CHAT_COMPLETION, TaskType.ANY); + return EnumSet.of(TaskType.COMPLETION, TaskType.CHAT_COMPLETION); } /** @@ -440,19 +440,16 @@ public static InferenceServiceConfiguration get() { ); configurationMap.put( - URL, - new SettingsConfiguration.Builder(SUPPORTED_TASK_TYPES_FOR_SERVICES_API).setDefaultValue( - "https://api.openai.com/v1/chat/completions" + DIMENSIONS, + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING)).setDescription( + "The number of dimensions the resulting embeddings should have. For more information refer to " + + "https://platform.openai.com/docs/api-reference/embeddings/create#embeddings-create-dimensions." ) - .setDescription( - "The OpenAI API endpoint URL. For more information on the URL, refer to the " - + "https://platform.openai.com/docs/api-reference." - ) - .setLabel("URL") - .setRequired(true) + .setLabel("Dimensions") + .setRequired(false) .setSensitive(false) .setUpdatable(false) - .setType(SettingsConfigurationFieldType.STRING) + .setType(SettingsConfigurationFieldType.INTEGER) .build() ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilder.java index e4de3d6beb800..1c4306c4edd46 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilder.java @@ -29,7 +29,7 @@ public static ModelValidator buildModelValidator(TaskType taskType) { case SPARSE_EMBEDDING, RERANK, ANY -> { return new SimpleModelValidator(new SimpleServiceIntegrationValidator()); } - default -> throw new IllegalArgumentException(Strings.format("Can't validate inference model of for task type %s ", taskType)); + default -> throw new IllegalArgumentException(Strings.format("Can't validate inference model for task type %s", taskType)); } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java deleted file mode 100644 index d1db5b8b12cc6..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference; - -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSettings; -import org.junit.After; -import org.junit.Before; - -import static org.hamcrest.Matchers.is; - -public class InferencePluginTests extends ESTestCase { - private InferencePlugin inferencePlugin; - - private Boolean elasticInferenceServiceEnabled = true; - - private void setElasticInferenceServiceEnabled(Boolean elasticInferenceServiceEnabled) { - this.elasticInferenceServiceEnabled = elasticInferenceServiceEnabled; - } - - @Before - public void setUp() throws Exception { - super.setUp(); - - Settings settings = Settings.builder().build(); - inferencePlugin = new InferencePlugin(settings) { - @Override - protected Boolean isElasticInferenceServiceEnabled() { - return elasticInferenceServiceEnabled; - } - }; - } - - @After - public void tearDown() throws Exception { - super.tearDown(); - } - - public void testElasticInferenceServiceSettingsPresent() throws Exception { - setElasticInferenceServiceEnabled(true); // enable elastic inference service - boolean anyMatch = inferencePlugin.getSettings() - .stream() - .map(Setting::getKey) - .anyMatch(key -> key.startsWith(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX)); - - assertThat("xpack.inference.elastic settings are present", anyMatch, is(true)); - } - - public void testElasticInferenceServiceSettingsNotPresent() throws Exception { - setElasticInferenceServiceEnabled(false); // disable elastic inference service - boolean noneMatch = inferencePlugin.getSettings() - .stream() - .map(Setting::getKey) - .noneMatch(key -> key.startsWith(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX)); - - assertThat("xpack.inference.elastic settings are not present", noneMatch, is(true)); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java index c0fc818e421d0..56966ca40c478 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java @@ -7,9 +7,10 @@ package org.elasticsearch.xpack.inference.action; -import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceRegistry; @@ -21,11 +22,13 @@ import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.BaseInferenceActionRequest; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; +import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; import org.junit.Before; @@ -44,9 +47,9 @@ import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.assertArg; -import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -61,6 +64,9 @@ public abstract class BaseTransportInferenceActionTestCase createAction( ModelRegistry modelRegistry, InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, - StreamingTaskManager streamingTaskManager + StreamingTaskManager streamingTaskManager, + InferenceServiceRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, + NodeClient nodeClient, + ThreadPool threadPool ); protected abstract Request createRequest(); @@ -111,8 +127,7 @@ public void testMetricsAfterModelRegistryError() { return null; }).when(modelRegistry).getModelWithSecrets(any(), any()); - var listener = doExecute(taskType); - verify(listener).onFailure(same(expectedException)); + doExecute(taskType); verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { assertThat(attributes.get("service"), nullValue()); @@ -132,7 +147,13 @@ protected ActionListener doExecute(TaskType taskType, when(request.getInferenceEntityId()).thenReturn(inferenceId); when(request.getTaskType()).thenReturn(taskType); when(request.isStreaming()).thenReturn(stream); - ActionListener listener = mock(); + ActionListener listener = spy(new ActionListener<>() { + @Override + public void onResponse(InferenceAction.Response o) {} + + @Override + public void onFailure(Exception e) {} + }); action.doExecute(mock(), request, listener); return listener; } @@ -145,9 +166,9 @@ public void testMetricsAfterMissingService() { var listener = doExecute(taskType); verify(listener).onFailure(assertArg(e -> { - assertThat(e, isA(ElasticsearchStatusException.class)); + assertThat(e, isA(ElasticsearchException.class)); assertThat(e.getMessage(), is("Unknown service [" + serviceId + "] for model [" + inferenceId + "]. ")); - assertThat(((ElasticsearchStatusException) e).status(), is(RestStatus.BAD_REQUEST)); + assertThat(((ElasticsearchException) e).status(), is(RestStatus.BAD_REQUEST)); })); verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { assertThat(attributes.get("service"), is(serviceId)); @@ -176,7 +197,7 @@ public void testMetricsAfterUnknownTaskType() { var listener = doExecute(requestTaskType); verify(listener).onFailure(assertArg(e -> { - assertThat(e, isA(ElasticsearchStatusException.class)); + assertThat(e, isA(ElasticsearchException.class)); assertThat( e.getMessage(), is( @@ -187,7 +208,7 @@ public void testMetricsAfterUnknownTaskType() { + "]" ) ); - assertThat(((ElasticsearchStatusException) e).status(), is(RestStatus.BAD_REQUEST)); + assertThat(((ElasticsearchException) e).status(), is(RestStatus.BAD_REQUEST)); })); verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { assertThat(attributes.get("service"), is(serviceId)); @@ -205,7 +226,6 @@ public void testMetricsAfterInferError() { var listener = doExecute(taskType); - verify(listener).onFailure(same(expectedException)); verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { assertThat(attributes.get("service"), is(serviceId)); assertThat(attributes.get("task_type"), is(taskType.toString())); @@ -223,8 +243,8 @@ public void testMetricsAfterStreamUnsupported() { var listener = doExecute(taskType, true); verify(listener).onFailure(assertArg(e -> { - assertThat(e, isA(ElasticsearchStatusException.class)); - var ese = (ElasticsearchStatusException) e; + assertThat(e, isA(ElasticsearchException.class)); + var ese = (ElasticsearchException) e; assertThat(ese.getMessage(), is("Streaming is not allowed for service [" + serviceId + "].")); assertThat(ese.status(), is(expectedStatus)); })); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointActionTests.java new file mode 100644 index 0000000000000..a640e64c2022d --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointActionTests.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.inference.MinimalServiceSettings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.inference.action.DeleteInferenceEndpointAction; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.junit.After; +import org.junit.Before; + +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class TransportDeleteInferenceEndpointActionTests extends ESTestCase { + + private static final TimeValue TIMEOUT = TimeValue.timeValueSeconds(30); + + private TransportDeleteInferenceEndpointAction action; + private ThreadPool threadPool; + private ModelRegistry modelRegistry; + + @Before + public void setUp() throws Exception { + super.setUp(); + modelRegistry = new ModelRegistry(mock(Client.class)); + threadPool = createThreadPool(inferenceUtilityPool()); + action = new TransportDeleteInferenceEndpointAction( + mock(TransportService.class), + mock(ClusterService.class), + threadPool, + mock(ActionFilters.class), + mock(IndexNameExpressionResolver.class), + modelRegistry, + mock(InferenceServiceRegistry.class) + ); + } + + @After + public void tearDown() throws Exception { + super.tearDown(); + terminate(threadPool); + } + + public void testFailsToDelete_ADefaultEndpoint() { + modelRegistry.addDefaultIds( + new InferenceService.DefaultConfigId("model-id", MinimalServiceSettings.chatCompletion(), mock(InferenceService.class)) + ); + + var listener = new PlainActionFuture(); + + action.masterOperation( + mock(Task.class), + new DeleteInferenceEndpointAction.Request("model-id", TaskType.CHAT_COMPLETION, true, false), + mock(ClusterState.class), + listener + ); + + var exception = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + exception.getMessage(), + is("[model-id] is a reserved inference endpoint. " + "Cannot delete a reserved inference endpoint.") + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionProxyTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionProxyTests.java new file mode 100644 index 0000000000000..a9e6ec55a6224 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionProxyTests.java @@ -0,0 +1,191 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnparsedModel; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.InferenceActionProxy; +import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.junit.After; +import org.junit.Before; + +import java.util.Collections; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class TransportInferenceActionProxyTests extends ESTestCase { + private Client client; + private ThreadPool threadPool; + private TransportInferenceActionProxy action; + private ModelRegistry modelRegistry; + + @Before + public void setUp() throws Exception { + super.setUp(); + client = mock(Client.class); + threadPool = new TestThreadPool("test"); + when(client.threadPool()).thenReturn(threadPool); + modelRegistry = mock(ModelRegistry.class); + + action = new TransportInferenceActionProxy(mock(TransportService.class), mock(ActionFilters.class), modelRegistry, client); + } + + @After + public void tearDown() throws Exception { + super.tearDown(); + terminate(threadPool); + } + + public void testExecutesAUnifiedCompletionRequest_WhenTaskTypeIsChatCompletion_InRequest() { + String requestJson = """ + { + "model": "gpt-4o", + "messages": [ + { + "role": "user", + "content": [ + { + "text": "some text", + "type": "string" + } + ] + } + ] + } + """; + + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) mock(ActionListener.class); + var request = new InferenceActionProxy.Request( + TaskType.CHAT_COMPLETION, + "id", + new BytesArray(requestJson), + XContentType.JSON, + TimeValue.ONE_MINUTE, + true + ); + + action.doExecute(mock(Task.class), request, listener); + + verify(client, times(1)).execute(eq(UnifiedCompletionAction.INSTANCE), any(), any()); + } + + public void testExecutesAUnifiedCompletionRequest_WhenTaskTypeIsChatCompletion_FromStorage() { + String requestJson = """ + { + "model": "gpt-4o", + "messages": [ + { + "role": "user", + "content": [ + { + "text": "some text", + "type": "string" + } + ] + } + ] + } + """; + + doAnswer(invocation -> { + ActionListener listener = invocation.getArgument(1); + listener.onResponse( + new UnparsedModel("id", TaskType.CHAT_COMPLETION, "service", Collections.emptyMap(), Collections.emptyMap()) + ); + + return Void.TYPE; + }).when(modelRegistry).getModelWithSecrets(any(), any()); + + var listener = new PlainActionFuture(); + var request = new InferenceActionProxy.Request( + TaskType.ANY, + "id", + new BytesArray(requestJson), + XContentType.JSON, + TimeValue.ONE_MINUTE, + true + ); + + action.doExecute(mock(Task.class), request, listener); + + verify(client, times(1)).execute(eq(UnifiedCompletionAction.INSTANCE), any(), any()); + } + + public void testExecutesAnInferenceAction_WhenTaskTypeIsCompletion_InRequest() { + String requestJson = """ + { + "input": ["some text"] + } + """; + + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) mock(ActionListener.class); + var request = new InferenceActionProxy.Request( + TaskType.COMPLETION, + "id", + new BytesArray(requestJson), + XContentType.JSON, + TimeValue.ONE_MINUTE, + true + ); + + action.doExecute(mock(Task.class), request, listener); + + verify(client, times(1)).execute(eq(InferenceAction.INSTANCE), any(), any()); + } + + public void testExecutesAnInferenceAction_WhenTaskTypeIsCompletion_FromStorage() { + String requestJson = """ + { + "input": ["some text"] + } + """; + + doAnswer(invocation -> { + ActionListener listener = invocation.getArgument(1); + listener.onResponse(new UnparsedModel("id", TaskType.COMPLETION, "service", Collections.emptyMap(), Collections.emptyMap())); + + return Void.TYPE; + }).when(modelRegistry).getModelWithSecrets(any(), any()); + + var listener = new PlainActionFuture(); + var request = new InferenceActionProxy.Request( + TaskType.ANY, + "id", + new BytesArray(requestJson), + XContentType.JSON, + TimeValue.ONE_MINUTE, + true + ); + + action.doExecute(mock(Task.class), request, listener); + + verify(client, times(1)).execute(eq(InferenceAction.INSTANCE), any(), any()); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java index c303e029cb415..3129f0865a249 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java @@ -8,16 +8,32 @@ package org.elasticsearch.xpack.inference.action; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.TaskType; import org.elasticsearch.license.MockLicenseState; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; +import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; +import org.elasticsearch.xpack.inference.common.RateLimitAssignment; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; +import java.util.List; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.same; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class TransportInferenceActionTests extends BaseTransportInferenceActionTestCase { @@ -33,7 +49,10 @@ protected BaseTransportInferenceAction createAction( ModelRegistry modelRegistry, InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, - StreamingTaskManager streamingTaskManager + StreamingTaskManager streamingTaskManager, + InferenceServiceRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, + NodeClient nodeClient, + ThreadPool threadPool ) { return new TransportInferenceAction( transportService, @@ -42,7 +61,10 @@ protected BaseTransportInferenceAction createAction( modelRegistry, serviceRegistry, inferenceStats, - streamingTaskManager + streamingTaskManager, + inferenceServiceNodeLocalRateLimitCalculator, + nodeClient, + threadPool ); } @@ -50,4 +72,112 @@ protected BaseTransportInferenceAction createAction( protected InferenceAction.Request createRequest() { return mock(); } + + public void testNoRerouting_WhenTaskTypeNotSupported() { + TaskType unsupportedTaskType = TaskType.COMPLETION; + mockService(listener -> listener.onResponse(mock())); + + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, unsupportedTaskType)).thenReturn(false); + + var listener = doExecute(unsupportedTaskType); + + verify(listener).onResponse(any()); + // Verify request was handled locally (not rerouted using TransportService) + verify(transportService, never()).sendRequest(any(), any(), any(), any()); + } + + public void testNoRerouting_WhenNoGroupingCalculatedYet() { + mockService(listener -> listener.onResponse(mock())); + + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + when(inferenceServiceRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(null); + + var listener = doExecute(taskType); + + verify(listener).onResponse(any()); + // Verify request was handled locally (not rerouted using TransportService) + verify(transportService, never()).sendRequest(any(), any(), any(), any()); + } + + public void testNoRerouting_WhenEmptyNodeList() { + mockService(listener -> listener.onResponse(mock())); + + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + when(inferenceServiceRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn( + new RateLimitAssignment(List.of()) + ); + + var listener = doExecute(taskType); + + verify(listener).onResponse(any()); + // Verify request was handled locally (not rerouted using TransportService) + verify(transportService, never()).sendRequest(any(), any(), any(), any()); + } + + public void testRerouting_ToOtherNode() { + DiscoveryNode otherNode = mock(DiscoveryNode.class); + when(otherNode.getId()).thenReturn("other-node"); + + // The local node is different to the "other-node" responsible for serviceId + when(nodeClient.getLocalNodeId()).thenReturn("local-node"); + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + // Requests for serviceId are always routed to "other-node" + var assignment = new RateLimitAssignment(List.of(otherNode)); + when(inferenceServiceRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); + + mockService(listener -> listener.onResponse(mock())); + var listener = doExecute(taskType); + + // Verify request was rerouted + verify(transportService).sendRequest(same(otherNode), eq(InferenceAction.NAME), any(), any()); + // Verify local execution didn't happen + verify(listener, never()).onResponse(any()); + } + + public void testRerouting_ToLocalNode_WithoutGoingThroughTransportLayerAgain() { + DiscoveryNode localNode = mock(DiscoveryNode.class); + String localNodeId = "local-node"; + when(localNode.getId()).thenReturn(localNodeId); + + // The local node is the only one responsible for serviceId + when(nodeClient.getLocalNodeId()).thenReturn(localNodeId); + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + var assignment = new RateLimitAssignment(List.of(localNode)); + when(inferenceServiceRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); + + mockService(listener -> listener.onResponse(mock())); + var listener = doExecute(taskType); + + verify(listener).onResponse(any()); + // Verify request was handled locally (not rerouted using TransportService) + verify(transportService, never()).sendRequest(any(), any(), any(), any()); + } + + public void testRerouting_HandlesTransportException_FromOtherNode() { + DiscoveryNode otherNode = mock(DiscoveryNode.class); + when(otherNode.getId()).thenReturn("other-node"); + + when(nodeClient.getLocalNodeId()).thenReturn("local-node"); + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + var assignment = new RateLimitAssignment(List.of(otherNode)); + when(inferenceServiceRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); + + mockService(listener -> listener.onResponse(mock())); + + TransportException expectedException = new TransportException("Failed to route"); + doAnswer(invocation -> { + TransportResponseHandler handler = invocation.getArgument(3); + handler.handleException(expectedException); + return null; + }).when(transportService).sendRequest(any(), any(), any(), any()); + + var listener = doExecute(taskType); + + // Verify request was rerouted + verify(transportService).sendRequest(same(otherNode), eq(InferenceAction.NAME), any(), any()); + // Verify local execution didn't happen + verify(listener, never()).onResponse(any()); + // Verify exception was propagated from "other-node" to "local-node" + verify(listener).onFailure(same(expectedException)); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java index e8e7d9ac21bed..7dac6a1015aae 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java @@ -7,15 +7,18 @@ package org.elasticsearch.xpack.inference.action; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.TaskType; import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; +import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; @@ -45,7 +48,10 @@ protected BaseTransportInferenceAction createAc ModelRegistry modelRegistry, InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, - StreamingTaskManager streamingTaskManager + StreamingTaskManager streamingTaskManager, + InferenceServiceRateLimitCalculator inferenceServiceRateLimitCalculator, + NodeClient nodeClient, + ThreadPool threadPool ) { return new TransportUnifiedCompletionInferenceAction( transportService, @@ -54,7 +60,10 @@ protected BaseTransportInferenceAction createAc modelRegistry, serviceRegistry, inferenceStats, - streamingTaskManager + streamingTaskManager, + inferenceServiceRateLimitCalculator, + nodeClient, + threadPool ); } @@ -72,12 +81,12 @@ public void testThrows_IncompatibleTaskTypeException_WhenUsingATextEmbeddingInfe var listener = doExecute(requestTaskType); verify(listener).onFailure(assertArg(e -> { - assertThat(e, isA(ElasticsearchStatusException.class)); + assertThat(e, isA(UnifiedChatCompletionException.class)); assertThat( e.getMessage(), is("Incompatible task_type for unified API, the requested type [" + requestTaskType + "] must be one of [chat_completion]") ); - assertThat(((ElasticsearchStatusException) e).status(), is(RestStatus.BAD_REQUEST)); + assertThat(((UnifiedChatCompletionException) e).status(), is(RestStatus.BAD_REQUEST)); })); verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { assertThat(attributes.get("service"), is(serviceId)); @@ -97,12 +106,12 @@ public void testThrows_IncompatibleTaskTypeException_WhenUsingRequestIsAny_Model var listener = doExecute(requestTaskType); verify(listener).onFailure(assertArg(e -> { - assertThat(e, isA(ElasticsearchStatusException.class)); + assertThat(e, isA(UnifiedChatCompletionException.class)); assertThat( e.getMessage(), is("Incompatible task_type for unified API, the requested type [" + requestTaskType + "] must be one of [chat_completion]") ); - assertThat(((ElasticsearchStatusException) e).status(), is(RestStatus.BAD_REQUEST)); + assertThat(((UnifiedChatCompletionException) e).status(), is(RestStatus.BAD_REQUEST)); })); verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { assertThat(attributes.get("service"), is(serviceId)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/UpdateInferenceModelActionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/UpdateInferenceModelActionRequestTests.java new file mode 100644 index 0000000000000..ab1e1d9c4cb23 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/UpdateInferenceModelActionRequestTests.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.action.UpdateInferenceModelAction; +import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; + +import java.io.IOException; + +public class UpdateInferenceModelActionRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return UpdateInferenceModelAction.Request::new; + } + + @Override + protected UpdateInferenceModelAction.Request createTestInstance() { + return new UpdateInferenceModelAction.Request( + randomAlphaOfLength(5), + randomBytesReference(50), + randomFrom(XContentType.values()), + randomFrom(TaskType.values()), + randomTimeValue() + ); + } + + @Override + protected UpdateInferenceModelAction.Request mutateInstance(UpdateInferenceModelAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(InferenceNamedWriteablesProvider.getNamedWriteables()); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/UpdateInferenceModelActionResponseTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/UpdateInferenceModelActionResponseTests.java new file mode 100644 index 0000000000000..02208511d1484 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/UpdateInferenceModelActionResponseTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.inference.action.UpdateInferenceModelAction; +import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; +import org.elasticsearch.xpack.inference.ModelConfigurationsTests; + +import java.io.IOException; + +public class UpdateInferenceModelActionResponseTests extends AbstractWireSerializingTestCase { + @Override + protected Writeable.Reader instanceReader() { + return UpdateInferenceModelAction.Response::new; + } + + @Override + protected UpdateInferenceModelAction.Response createTestInstance() { + return new UpdateInferenceModelAction.Response(ModelConfigurationsTests.createRandomInstance()); + } + + @Override + protected UpdateInferenceModelAction.Response mutateInstance(UpdateInferenceModelAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(InferenceNamedWriteablesProvider.getNamedWriteables()); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java index 03249163c7f82..f0b82f49d4e98 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingFloat; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingSparse; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceError; +import org.elasticsearch.xpack.core.inference.results.InferenceByteEmbedding; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; @@ -368,16 +369,16 @@ public void testMergingListener_Byte() { // 4 inputs in 2 batches { - var embeddings = new ArrayList(); + var embeddings = new ArrayList(); for (int i = 0; i < batchSize; i++) { - embeddings.add(new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { randomByte() })); + embeddings.add(new InferenceByteEmbedding(new byte[] { randomByte() })); } batches.get(0).listener().onResponse(new InferenceTextEmbeddingByteResults(embeddings)); } { - var embeddings = new ArrayList(); + var embeddings = new ArrayList(); for (int i = 0; i < 4; i++) { // 4 requests in the 2nd batch - embeddings.add(new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { randomByte() })); + embeddings.add(new InferenceByteEmbedding(new byte[] { randomByte() })); } batches.get(1).listener().onResponse(new InferenceTextEmbeddingByteResults(embeddings)); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java new file mode 100644 index 0000000000000..5c7eeacd6d1d5 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java @@ -0,0 +1,246 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; +import org.elasticsearch.xpack.inference.services.SenderService; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator.DEFAULT_MAX_NODES_PER_GROUPING; +import static org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator.SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, numDataNodes = 0) +public class InferenceServiceNodeLocalRateLimitCalculatorTests extends ESIntegTestCase { + + private static final Integer RATE_LIMIT_ASSIGNMENT_MAX_WAIT_TIME_IN_SECONDS = 15; + + public void setUp() throws Exception { + super.setUp(); + assumeTrue( + "If inference_cluster_aware_rate_limiting_feature_flag_enabled=false we'll fallback to " + + "NoopNodeLocalRateLimitCalculator, which shouldn't be tested by this class.", + InferenceAPIClusterAwareRateLimitingFeature.INFERENCE_API_CLUSTER_AWARE_RATE_LIMITING_FEATURE_FLAG.isEnabled() + ); + } + + public void testInitialClusterGrouping_Correct() throws Exception { + // Start with 2-5 nodes + var numNodes = randomIntBetween(2, 5); + var nodeNames = internalCluster().startNodes(numNodes); + ensureStableCluster(numNodes); + + var firstCalculator = getCalculatorInstance(internalCluster(), nodeNames.getFirst()); + waitForRateLimitingAssignments(firstCalculator); + + RateLimitAssignment firstAssignment = firstCalculator.getRateLimitAssignment( + ElasticInferenceService.NAME, + TaskType.SPARSE_EMBEDDING + ); + + // Verify that all other nodes land on the same assignment + for (String nodeName : nodeNames.subList(1, nodeNames.size())) { + var calculator = getCalculatorInstance(internalCluster(), nodeName); + waitForRateLimitingAssignments(calculator); + var currentAssignment = calculator.getRateLimitAssignment(ElasticInferenceService.NAME, TaskType.SPARSE_EMBEDDING); + assertEquals(firstAssignment, currentAssignment); + } + } + + public void testNumberOfNodesPerGroup_Decreases_When_NodeLeavesCluster() throws Exception { + // Start with 3-5 nodes + var numNodes = randomIntBetween(3, 5); + var nodeNames = internalCluster().startNodes(numNodes); + ensureStableCluster(numNodes); + + var nodeLeftInCluster = nodeNames.getFirst(); + var currentNumberOfNodes = numNodes; + + // Stop all nodes except one + for (String nodeName : nodeNames) { + if (nodeName.equals(nodeLeftInCluster)) { + continue; + } + internalCluster().stopNode(nodeName); + currentNumberOfNodes--; + ensureStableCluster(currentNumberOfNodes); + } + + var calculator = getCalculatorInstance(internalCluster(), nodeLeftInCluster); + waitForRateLimitingAssignments(calculator); + + Set supportedServices = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet(); + + // Check assignments for each supported service + for (var service : supportedServices) { + var assignment = calculator.getRateLimitAssignment(service, TaskType.SPARSE_EMBEDDING); + + assertNotNull(assignment); + // Should have exactly one responsible node + assertEquals(1, assignment.responsibleNodes().size()); + // That node should be our remaining node + assertEquals(nodeLeftInCluster, assignment.responsibleNodes().get(0).getName()); + } + } + + public void testGrouping_RespectsMaxNodesPerGroupingLimit() throws Exception { + // Start with more nodes possible per grouping + var numNodes = DEFAULT_MAX_NODES_PER_GROUPING + randomIntBetween(1, 3); + var nodeNames = internalCluster().startNodes(numNodes); + ensureStableCluster(numNodes); + + var calculator = getCalculatorInstance(internalCluster(), nodeNames.getFirst()); + waitForRateLimitingAssignments(calculator); + + Set supportedServices = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet(); + + for (var service : supportedServices) { + var assignment = calculator.getRateLimitAssignment(service, TaskType.SPARSE_EMBEDDING); + + assertNotNull(assignment); + assertThat(DEFAULT_MAX_NODES_PER_GROUPING, equalTo(assignment.responsibleNodes().size())); + } + } + + public void testInitialRateLimitsCalculation_Correct() throws Exception { + // Start with max nodes per grouping (=3) + int numNodes = DEFAULT_MAX_NODES_PER_GROUPING; + var nodeNames = internalCluster().startNodes(numNodes); + ensureStableCluster(numNodes); + + var calculator = getCalculatorInstance(internalCluster(), nodeNames.getFirst()); + waitForRateLimitingAssignments(calculator); + + Set supportedServices = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet(); + + for (var serviceName : supportedServices) { + try (var serviceRegistry = calculator.serviceRegistry()) { + var serviceOptional = serviceRegistry.getService(serviceName); + assertTrue(serviceOptional.isPresent()); + var service = serviceOptional.get(); + + if ((service instanceof SenderService senderService)) { + var sender = senderService.getSender(); + if (sender instanceof HttpRequestSender) { + var assignment = calculator.getRateLimitAssignment(service.name(), TaskType.SPARSE_EMBEDDING); + + assertNotNull(assignment); + assertThat(DEFAULT_MAX_NODES_PER_GROUPING, equalTo(assignment.responsibleNodes().size())); + } + } + } + + } + } + + public void testRateLimits_Decrease_OnNodeJoin() throws Exception { + // Start with 2 nodes + var initialNodes = 2; + var nodeNames = internalCluster().startNodes(initialNodes); + ensureStableCluster(initialNodes); + + var calculator = getCalculatorInstance(internalCluster(), nodeNames.getFirst()); + waitForRateLimitingAssignments(calculator); + + for (var serviceName : SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet()) { + var configs = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.get(serviceName); + for (var config : configs) { + // Get initial assignments and rate limits + var initialAssignment = calculator.getRateLimitAssignment(serviceName, config.taskType()); + assertEquals(2, initialAssignment.responsibleNodes().size()); + + // Add a new node + internalCluster().startNode(); + ensureStableCluster(initialNodes + 1); + waitForRateLimitingAssignments(calculator); + + // Get updated assignments + var updatedAssignment = calculator.getRateLimitAssignment(serviceName, config.taskType()); + + // Verify number of responsible nodes increased + assertEquals(3, updatedAssignment.responsibleNodes().size()); + } + } + } + + public void testRateLimits_Increase_OnNodeLeave() throws Exception { + // Start with max nodes per grouping (=3) + int numNodes = DEFAULT_MAX_NODES_PER_GROUPING; + var nodeNames = internalCluster().startNodes(numNodes); + ensureStableCluster(numNodes); + + var calculator = getCalculatorInstance(internalCluster(), nodeNames.getFirst()); + waitForRateLimitingAssignments(calculator); + + for (var serviceName : SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet()) { + var configs = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.get(serviceName); + for (var config : configs) { + // Get initial assignments and rate limits + var initialAssignment = calculator.getRateLimitAssignment(serviceName, config.taskType()); + assertThat(DEFAULT_MAX_NODES_PER_GROUPING, equalTo(initialAssignment.responsibleNodes().size())); + + // Remove a node + var nodeToRemove = nodeNames.get(numNodes - 1); + internalCluster().stopNode(nodeToRemove); + ensureStableCluster(numNodes - 1); + waitForRateLimitingAssignments(calculator); + + // Get updated assignments + var updatedAssignment = calculator.getRateLimitAssignment(serviceName, config.taskType()); + + // Verify number of responsible nodes decreased + assertThat(2, equalTo(updatedAssignment.responsibleNodes().size())); + } + } + } + + @Override + protected Collection> nodePlugins() { + return Arrays.asList(LocalStateInferencePlugin.class); + } + + private InferenceServiceNodeLocalRateLimitCalculator getCalculatorInstance(InternalTestCluster internalTestCluster, String nodeName) { + InferenceServiceRateLimitCalculator calculatorInstance = internalTestCluster.getInstance( + InferenceServiceRateLimitCalculator.class, + nodeName + ); + assertThat( + "[" + + InferenceServiceNodeLocalRateLimitCalculatorTests.class.getName() + + "] should use [" + + InferenceServiceNodeLocalRateLimitCalculator.class.getName() + + "] as implementation for [" + + InferenceServiceRateLimitCalculator.class.getName() + + "]. Provided implementation was [" + + calculatorInstance.getClass().getName() + + "].", + calculatorInstance, + instanceOf(InferenceServiceNodeLocalRateLimitCalculator.class) + ); + return (InferenceServiceNodeLocalRateLimitCalculator) calculatorInstance; + } + + private void waitForRateLimitingAssignments(InferenceServiceNodeLocalRateLimitCalculator calculator) throws Exception { + assertBusy(() -> { + var assignment = calculator.getRateLimitAssignment(ElasticInferenceService.NAME, TaskType.SPARSE_EMBEDDING); + assertNotNull(assignment); + assertFalse(assignment.responsibleNodes().isEmpty()); + }, RATE_LIMIT_ASSIGNMENT_MAX_WAIT_TIME_IN_SECONDS, TimeUnit.SECONDS); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreatorTests.java index b142371ae1b4b..28e182aa2d435 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreatorTests.java @@ -90,7 +90,7 @@ public void testExecute_ReturnsSuccessfulResponse_ForElserAction() throws IOExce webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer)); + var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), "my-model-id"); var actionCreator = new ElasticInferenceServiceActionCreator( sender, createWithEmptySettings(threadPool), @@ -120,10 +120,11 @@ public void testExecute_ReturnsSuccessfulResponse_ForElserAction() throws IOExce assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - assertThat(requestMap.size(), is(1)); + assertThat(requestMap.size(), is(2)); assertThat(requestMap.get("input"), instanceOf(List.class)); var inputList = (List) requestMap.get("input"); assertThat(inputList, contains("hello world")); + assertThat(requestMap.get("model"), is("my-model-id")); } } @@ -151,7 +152,7 @@ public void testSend_FailsFromInvalidResponseFormat_ForElserAction() throws IOEx webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer)); + var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), "my-model-id"); var actionCreator = new ElasticInferenceServiceActionCreator( sender, createWithEmptySettings(threadPool), @@ -174,10 +175,11 @@ public void testSend_FailsFromInvalidResponseFormat_ForElserAction() throws IOEx assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - assertThat(requestMap.size(), is(1)); + assertThat(requestMap.size(), is(2)); assertThat(requestMap.get("input"), instanceOf(List.class)); var inputList = (List) requestMap.get("input"); assertThat(inputList, contains("hello world")); + assertThat(requestMap.get("model"), is("my-model-id")); } } @@ -208,7 +210,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating() throws IOExc webServer.enqueue(new MockResponse().setResponseCode(413).setBody(responseJsonContentTooLarge)); webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer)); + var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), "my-model-id"); var actionCreator = new ElasticInferenceServiceActionCreator( sender, createWithEmptySettings(threadPool), @@ -273,7 +275,7 @@ public void testExecute_TruncatesInputBeforeSending() throws IOException { webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); // truncated to 1 token = 3 characters - var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), 1); + var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), "my-model-id", 1); var actionCreator = new ElasticInferenceServiceActionCreator( sender, createWithEmptySettings(threadPool), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockRequestSender.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockRequestSender.java index ed5aa5ba7bea9..57b9b03b9781b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockRequestSender.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockRequestSender.java @@ -63,6 +63,11 @@ public void start() { // do nothing } + @Override + public void updateRateLimitDivisor(int rateLimitDivisor) { + // do nothing + } + @Override public void send( RequestManager requestCreator, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/AlwaysRetryingResponseHandler.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/AlwaysRetryingResponseHandler.java similarity index 96% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/AlwaysRetryingResponseHandler.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/AlwaysRetryingResponseHandler.java index 0349e858d9b22..92d13019bc944 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/AlwaysRetryingResponseHandler.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/AlwaysRetryingResponseHandler.java @@ -58,4 +58,8 @@ public InferenceServiceResults parseResult(Request request, HttpResult result) t } } + @Override + public boolean canHandleStreamingResponses() { + return false; + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSenderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSenderTests.java index 0100c2812cdc0..7e5b8e6808366 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSenderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSenderTests.java @@ -654,6 +654,11 @@ public InferenceServiceResults parseResult(Request request, HttpResult result) t public String getRequestType() { return "foo"; } + + @Override + public boolean canHandleStreamingResponses() { + return false; + } }; } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandlerTests.java new file mode 100644 index 0000000000000..4853aa8d2c563 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiUnifiedChatCompletionResponseHandlerTests.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.openai; + +import org.apache.http.HttpResponse; +import org.apache.http.StatusLine; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import static org.elasticsearch.ExceptionsHelper.unwrapCause; +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isA; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class OpenAiUnifiedChatCompletionResponseHandlerTests extends ESTestCase { + private final OpenAiUnifiedChatCompletionResponseHandler responseHandler = new OpenAiUnifiedChatCompletionResponseHandler( + "chat completions", + (a, b) -> mock() + ); + + public void testFailValidationWithAllFields() throws IOException { + var responseJson = """ + { + "error": { + "type": "not_found_error", + "message": "a message", + "code": "ahh", + "param": "model" + } + } + """; + + var errorJson = invalidResponseJson(responseJson); + + assertThat(errorJson, is(""" + {"error":{"code":"ahh","message":"Received a server error status code for request from inference entity id [abc] status [500]. \ + Error message: [a message]","param":"model","type":"not_found_error"}}""")); + } + + public void testFailValidationWithoutOptionalFields() throws IOException { + var responseJson = """ + { + "error": { + "type": "not_found_error", + "message": "a message" + } + } + """; + + var errorJson = invalidResponseJson(responseJson); + + assertThat(errorJson, is(""" + {"error":{"message":"Received a server error status code for request from inference entity id [abc] status [500]. \ + Error message: [a message]","type":"not_found_error"}}""")); + } + + public void testFailValidationWithInvalidJson() throws IOException { + var responseJson = """ + what? this isn't a json + """; + + var errorJson = invalidResponseJson(responseJson); + + assertThat(errorJson, is(""" + {"error":{"code":"bad_request","message":"Received a server error status code for request from inference entity id [abc] status\ + [500]","type":"ErrorResponse"}}""")); + } + + private String invalidResponseJson(String responseJson) throws IOException { + var exception = invalidResponse(responseJson); + assertThat(exception, isA(RetryException.class)); + assertThat(unwrapCause(exception), isA(UnifiedChatCompletionException.class)); + return toJson((UnifiedChatCompletionException) unwrapCause(exception)); + } + + private Exception invalidResponse(String responseJson) { + return expectThrows( + RetryException.class, + () -> responseHandler.validateResponse( + mock(), + mock(), + mockRequest(), + new HttpResult(mock500Response(), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + } + + private static Request mockRequest() { + var request = mock(Request.class); + when(request.getInferenceEntityId()).thenReturn("abc"); + when(request.isStreaming()).thenReturn(true); + return request; + } + + private static HttpResponse mock500Response() { + int statusCode = 500; + var statusLine = mock(StatusLine.class); + when(statusLine.getStatusCode()).thenReturn(statusCode); + + var response = mock(HttpResponse.class); + when(response.getStatusLine()).thenReturn(statusLine); + + return response; + } + + private String toJson(UnifiedChatCompletionException e) throws IOException { + try (var builder = XContentFactory.jsonBuilder()) { + e.toXContentChunked(EMPTY_PARAMS).forEachRemaining(xContent -> { + try { + xContent.toXContent(builder, EMPTY_PARAMS); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + }); + return XContentHelper.convertToJson(BytesReference.bytes(builder), false, builder.contentType()); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java index 8c8aeba4a0a07..8ca5a91e83429 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java @@ -72,6 +72,38 @@ public void testXContent_InputTypeSearch_EmbeddingTypesByte_TruncateNone() throw {"texts":["abc"],"model":"model","input_type":"search_query","embedding_types":["int8"],"truncate":"none"}""")); } + public void testXContent_InputTypeSearch_EmbeddingTypesBinary_TruncateNone() throws IOException { + var entity = new CohereEmbeddingsRequestEntity( + List.of("abc"), + new CohereEmbeddingsTaskSettings(InputType.SEARCH, CohereTruncation.NONE), + "model", + CohereEmbeddingType.BINARY + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + MatcherAssert.assertThat(xContentResult, is(""" + {"texts":["abc"],"model":"model","input_type":"search_query","embedding_types":["binary"],"truncate":"none"}""")); + } + + public void testXContent_InputTypeSearch_EmbeddingTypesBit_TruncateNone() throws IOException { + var entity = new CohereEmbeddingsRequestEntity( + List.of("abc"), + new CohereEmbeddingsTaskSettings(InputType.SEARCH, CohereTruncation.NONE), + "model", + CohereEmbeddingType.BIT + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + MatcherAssert.assertThat(xContentResult, is(""" + {"texts":["abc"],"model":"model","input_type":"search_query","embedding_types":["binary"],"truncate":"none"}""")); + } + public void testXContent_WritesNoOptionalFields_WhenTheyAreNotDefined() throws IOException { var entity = new CohereEmbeddingsRequestEntity(List.of("abc"), CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java index d30b809603eef..f3664b1e23a03 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java @@ -145,6 +145,53 @@ public void testCreateRequest_InputTypeSearch_EmbeddingTypeInt8_TruncateEnd() th ); } + public void testCreateRequest_InputTypeSearch_EmbeddingTypeBit_TruncateEnd() throws IOException { + var request = createRequest( + List.of("abc"), + CohereEmbeddingsModelTests.createModel( + "url", + "secret", + new CohereEmbeddingsTaskSettings(InputType.SEARCH, CohereTruncation.END), + null, + null, + "model", + CohereEmbeddingType.BIT + ) + ); + + var httpRequest = request.createHttpRequest(); + MatcherAssert.assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + MatcherAssert.assertThat(httpPost.getURI().toString(), is("url")); + MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + MatcherAssert.assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer secret")); + MatcherAssert.assertThat( + httpPost.getLastHeader(CohereUtils.REQUEST_SOURCE_HEADER).getValue(), + is(CohereUtils.ELASTIC_REQUEST_SOURCE) + ); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + MatcherAssert.assertThat( + requestMap, + is( + Map.of( + "texts", + List.of("abc"), + "model", + "model", + "input_type", + "search_query", + "embedding_types", + List.of("binary"), + "truncate", + "end" + ) + ) + ); + } + public void testCreateRequest_TruncateNone() throws IOException { var request = createRequest( List.of("abc"), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntityTests.java index 5920e70cfdd18..f81f6e58964f0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntityTests.java @@ -24,18 +24,21 @@ public class ElasticInferenceServiceSparseEmbeddingsRequestEntityTests extends E public void testToXContent_SingleInput_UnspecifiedUsageContext() throws IOException { var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity( List.of("abc"), + "my-model-id", ElasticInferenceServiceUsageContext.UNSPECIFIED ); String xContentString = xContentEntityToString(entity); assertThat(xContentString, equalToIgnoringWhitespaceInJsonString(""" { - "input": ["abc"] + "input": ["abc"], + "model": "my-model-id" }""")); } public void testToXContent_MultipleInputs_UnspecifiedUsageContext() throws IOException { var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity( List.of("abc", "def"), + "my-model-id", ElasticInferenceServiceUsageContext.UNSPECIFIED ); String xContentString = xContentEntityToString(entity); @@ -44,28 +47,39 @@ public void testToXContent_MultipleInputs_UnspecifiedUsageContext() throws IOExc "input": [ "abc", "def" - ] + ], + "model": "my-model-id" } """)); } public void testToXContent_MultipleInputs_SearchUsageContext() throws IOException { - var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity(List.of("abc"), ElasticInferenceServiceUsageContext.SEARCH); + var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity( + List.of("abc"), + "my-model-id", + ElasticInferenceServiceUsageContext.SEARCH + ); String xContentString = xContentEntityToString(entity); assertThat(xContentString, equalToIgnoringWhitespaceInJsonString(""" { "input": ["abc"], + "model": "my-model-id", "usage_context": "search" } """)); } public void testToXContent_MultipleInputs_IngestUsageContext() throws IOException { - var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity(List.of("abc"), ElasticInferenceServiceUsageContext.INGEST); + var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity( + List.of("abc"), + "my-model-id", + ElasticInferenceServiceUsageContext.INGEST + ); String xContentString = xContentEntityToString(entity); assertThat(xContentString, equalToIgnoringWhitespaceInJsonString(""" { "input": ["abc"], + "model": "my-model-id", "usage_context": "ingest" } """)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestTests.java index cb867f15b6d4f..9211b55236b10 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestTests.java @@ -34,8 +34,9 @@ public class ElasticInferenceServiceSparseEmbeddingsRequestTests extends ESTestC public void testCreateHttpRequest_UsageContextSearch() throws IOException { var url = "http://eis-gateway.com"; var input = "input"; + var modelId = "my-model-id"; - var request = createRequest(url, input, InputType.SEARCH); + var request = createRequest(url, modelId, input, InputType.SEARCH); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); @@ -43,16 +44,18 @@ public void testCreateHttpRequest_UsageContextSearch() throws IOException { assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - assertThat(requestMap.size(), equalTo(2)); + assertThat(requestMap.size(), equalTo(3)); assertThat(requestMap.get("input"), is(List.of(input))); + assertThat(requestMap.get("model"), is(modelId)); assertThat(requestMap.get("usage_context"), equalTo("search")); } public void testTraceContextPropagatedThroughHTTPHeaders() { var url = "http://eis-gateway.com"; var input = "input"; + var modelId = "my-model-id"; - var request = createRequest(url, input, InputType.UNSPECIFIED); + var request = createRequest(url, modelId, input, InputType.UNSPECIFIED); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); @@ -68,8 +71,9 @@ public void testTraceContextPropagatedThroughHTTPHeaders() { public void testTruncate_ReducesInputTextSizeByHalf() throws IOException { var url = "http://eis-gateway.com"; var input = "abcd"; + var modelId = "my-model-id"; - var request = createRequest(url, input, InputType.UNSPECIFIED); + var request = createRequest(url, modelId, input, InputType.UNSPECIFIED); var truncatedRequest = request.truncate(); var httpRequest = truncatedRequest.createHttpRequest(); @@ -77,15 +81,17 @@ public void testTruncate_ReducesInputTextSizeByHalf() throws IOException { var httpPost = (HttpPost) httpRequest.httpRequestBase(); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - assertThat(requestMap, aMapWithSize(1)); + assertThat(requestMap, aMapWithSize(2)); assertThat(requestMap.get("input"), is(List.of("ab"))); + assertThat(requestMap.get("model"), is(modelId)); } public void testIsTruncated_ReturnsTrue() { var url = "http://eis-gateway.com"; var input = "abcd"; + var modelId = "my-model-id"; - var request = createRequest(url, input, InputType.UNSPECIFIED); + var request = createRequest(url, modelId, input, InputType.UNSPECIFIED); assertFalse(request.getTruncationInfo()[0]); var truncatedRequest = request.truncate(); @@ -109,8 +115,8 @@ public void testInputTypeToUsageContext_Unknown_DefaultToUnspecified() { assertThat(inputTypeToUsageContext(InputType.CLUSTERING), equalTo(ElasticInferenceServiceUsageContext.UNSPECIFIED)); } - public ElasticInferenceServiceSparseEmbeddingsRequest createRequest(String url, String input, InputType inputType) { - var embeddingsModel = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(url); + public ElasticInferenceServiceSparseEmbeddingsRequest createRequest(String url, String modelId, String input, InputType inputType) { + var embeddingsModel = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(url, modelId); return new ElasticInferenceServiceSparseEmbeddingsRequest( TruncatorTests.createTruncator(), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestEntityTests.java new file mode 100644 index 0000000000000..8278b76a1cee4 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestEntityTests.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.ibmwatsonx.rerank; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxRerankRequestEntity; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankTaskSettings; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.inference.MatchersUtils.equalToIgnoringWhitespaceInJsonString; + +public class IbmWatsonxRerankRequestEntityTests extends ESTestCase { + public void testXContent_Request() throws IOException { + IbmWatsonxRerankTaskSettings taskSettings = new IbmWatsonxRerankTaskSettings(5, true, 100); + var entity = new IbmWatsonxRerankRequestEntity( + "database", + List.of("greenland", "google", "john", "mysql", "potter", "grammar"), + taskSettings, + "model", + "project_id" + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString(""" + {"model_id":"model", + "query":"database", + "inputs":[ + {"text":"greenland"}, + {"text":"google"}, + {"text":"john"}, + {"text":"mysql"}, + {"text":"potter"}, + {"text":"grammar"} + ], + "project_id":"project_id", + "parameters":{ + "truncate_input_tokens":100, + "return_options":{ + "inputs":true, + "top_n":5 + } + } + } + """)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestTests.java new file mode 100644 index 0000000000000..8c95a01bc3230 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestTests.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.ibmwatsonx.rerank; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.core.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxRerankRequest; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModelTests; + +import java.io.IOException; +import java.net.URI; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; + +public class IbmWatsonxRerankRequestTests extends ESTestCase { + private static final String AUTH_HEADER_VALUE = "foo"; + + public void testCreateRequest() throws IOException { + var model = "model"; + var projectId = "project_id"; + URI uri = null; + try { + uri = new URI("http://abc.com"); + } catch (Exception ignored) {} + var apiVersion = "2023-05-04"; + var apiKey = "api_key"; + var query = "database"; + List input = List.of("greenland", "google", "john", "mysql", "potter", "grammar"); + + var request = createRequest(model, projectId, uri, apiVersion, apiKey, query, input); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat(httpPost.getURI().toString(), endsWith(Strings.format("%s=%s", "version", apiVersion))); + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(requestMap, aMapWithSize(5)); + assertThat( + requestMap, + is( + + Map.of( + "project_id", + "project_id", + "model_id", + "model", + "inputs", + List.of( + Map.of("text", "greenland"), + Map.of("text", "google"), + Map.of("text", "john"), + Map.of("text", "mysql"), + Map.of("text", "potter"), + Map.of("text", "grammar") + ), + "query", + "database", + "parameters", + Map.of("return_options", Map.of("top_n", 2, "inputs", true), "truncate_input_tokens", 100) + ) + ) + ); + } + + public static IbmWatsonxRerankRequest createRequest( + String model, + String projectId, + URI uri, + String apiVersion, + String apiKey, + String query, + List input + ) { + var embeddingsModel = IbmWatsonxRerankModelTests.createModel(model, projectId, uri, apiVersion, apiKey); + + return new IbmWatsonxRerankWithoutAuthRequest(query, input, embeddingsModel); + } + + private static class IbmWatsonxRerankWithoutAuthRequest extends IbmWatsonxRerankRequest { + IbmWatsonxRerankWithoutAuthRequest(String query, List input, IbmWatsonxRerankModel model) { + super(query, input, model); + } + + @Override + public void decorateWithAuth(HttpPost httpPost) { + httpPost.setHeader(HttpHeaders.AUTHORIZATION, AUTH_HEADER_VALUE); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntityTests.java index 691064b947e23..42dab0a9021bf 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntityTests.java @@ -10,6 +10,8 @@ import org.apache.http.HttpResponse; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.InferenceByteEmbedding; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingBitResults; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; @@ -182,10 +184,7 @@ public void testFromResponse_UsesTheFirstValidEmbeddingsEntryInt8_WithInvalidFir new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - MatcherAssert.assertThat( - parsedResults.embeddings(), - is(List.of(new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) -1, (byte) 0 }))) - ); + MatcherAssert.assertThat(parsedResults.embeddings(), is(List.of(new InferenceByteEmbedding(new byte[] { (byte) -1, (byte) 0 })))); } public void testFromResponse_ParsesBytes() throws IOException { @@ -220,9 +219,47 @@ public void testFromResponse_ParsesBytes() throws IOException { new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); + MatcherAssert.assertThat(parsedResults.embeddings(), is(List.of(new InferenceByteEmbedding(new byte[] { (byte) -1, (byte) 0 })))); + } + + public void testFromResponse_ParsesBytes_FromBinaryEmbeddingsEntry() throws IOException { + String responseJson = """ + { + "id": "3198467e-399f-4d4a-aa2c-58af93bd6dc4", + "texts": [ + "hello" + ], + "embeddings": { + "binary": [ + [ + -55, + 74, + 101, + 67, + 83 + ] + ] + }, + "meta": { + "api_version": { + "version": "2" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_by_type" + } + """; + + InferenceTextEmbeddingBitResults parsedResults = (InferenceTextEmbeddingBitResults) CohereEmbeddingsResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + MatcherAssert.assertThat( parsedResults.embeddings(), - is(List.of(new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) -1, (byte) 0 }))) + is(List.of(new InferenceByteEmbedding(new byte[] { (byte) -55, (byte) 74, (byte) 101, (byte) 67, (byte) 83 }))) ); } @@ -318,6 +355,59 @@ public void testFromResponse_CreatesResultsForMultipleItems_ObjectFormat() throw ); } + public void testFromResponse_CreatesResultsForMultipleItems_ObjectFormat_Binary() throws IOException { + String responseJson = """ + { + "id": "3198467e-399f-4d4a-aa2c-58af93bd6dc4", + "texts": [ + "hello", + "goodbye" + ], + "embeddings": { + "binary": [ + [ + -55, + 74, + 101, + 67 + ], + [ + 34, + -64, + 97, + 65, + -42 + ] + ] + }, + "meta": { + "api_version": { + "version": "2" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_by_type" + } + """; + + InferenceTextEmbeddingBitResults parsedResults = (InferenceTextEmbeddingBitResults) CohereEmbeddingsResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat( + parsedResults.embeddings(), + is( + List.of( + new InferenceByteEmbedding(new byte[] { (byte) -55, (byte) 74, (byte) 101, (byte) 67 }), + new InferenceByteEmbedding(new byte[] { (byte) 34, (byte) -64, (byte) 97, (byte) 65, (byte) -42 }) + ) + ) + ); + } + public void testFromResponse_FailsWhenEmbeddingsFieldIsNotPresent() { String responseJson = """ { @@ -433,6 +523,82 @@ public void testFromResponse_FailsWhenEmbeddingsByteValue_IsOutsideByteRange_Pos MatcherAssert.assertThat(thrownException.getMessage(), is("Value [128] is out of range for a byte")); } + public void testFromResponse_FailsWhenEmbeddingsBinaryValue_IsOutsideByteRange_Negative() { + String responseJson = """ + { + "id": "3198467e-399f-4d4a-aa2c-58af93bd6dc4", + "texts": [ + "hello" + ], + "embeddings": { + "binary": [ + [ + -129, + 127 + ] + ] + }, + "meta": { + "api_version": { + "version": "2" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_by_type" + } + """; + + var thrownException = expectThrows( + IllegalArgumentException.class, + () -> CohereEmbeddingsResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + MatcherAssert.assertThat(thrownException.getMessage(), is("Value [-129] is out of range for a byte")); + } + + public void testFromResponse_FailsWhenEmbeddingsBinaryValue_IsOutsideByteRange_Positive() { + String responseJson = """ + { + "id": "3198467e-399f-4d4a-aa2c-58af93bd6dc4", + "texts": [ + "hello" + ], + "embeddings": { + "binary": [ + [ + -128, + 128 + ] + ] + }, + "meta": { + "api_version": { + "version": "2" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_by_type" + } + """; + + var thrownException = expectThrows( + IllegalArgumentException.class, + () -> CohereEmbeddingsResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + MatcherAssert.assertThat(thrownException.getMessage(), is("Value [128] is out of range for a byte")); + } + public void testFromResponse_FailsToFindAValidEmbeddingType() { String responseJson = """ { @@ -470,7 +636,7 @@ public void testFromResponse_FailsToFindAValidEmbeddingType() { MatcherAssert.assertThat( thrownException.getMessage(), - is("Failed to find a supported embedding type in the Cohere embeddings response. Supported types are [float, int8]") + is("Failed to find a supported embedding type in the Cohere embeddings response. Supported types are [binary, float, int8]") ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntityTests.java new file mode 100644 index 0000000000000..6b59f25896a48 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntityTests.java @@ -0,0 +1,166 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.inference.external.response.ibmwatsonx; + +import org.apache.http.HttpResponse; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class IbmWatsonxRankedResponseEntityTests extends ESTestCase { + + public void testResponseLiteral() throws IOException { + InferenceServiceResults parsedResults = IbmWatsonxRankedResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseLiteral.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class)); + List expected = responseLiteralDocs(); + for (int i = 0; i < ((RankedDocsResults) parsedResults).getRankedDocs().size(); i++) { + assertEquals(((RankedDocsResults) parsedResults).getRankedDocs().get(i).index(), expected.get(i).index()); + } + } + + public void testGeneratedResponse() throws IOException { + int numDocs = randomIntBetween(1, 10); + + List expected = new ArrayList<>(numDocs); + StringBuilder responseBuilder = new StringBuilder(); + + responseBuilder.append("{"); + responseBuilder.append("\"results\": ["); + List indices = linear(numDocs); + List scores = linearFloats(numDocs); + for (int i = 0; i < numDocs; i++) { + int index = indices.remove(randomInt(indices.size() - 1)); + + responseBuilder.append("{"); + responseBuilder.append("\"index\":").append(index).append(","); + responseBuilder.append("\"score\":").append(scores.get(i).toString()).append("}"); + expected.add(new RankedDocsResults.RankedDoc(index, scores.get(i), null)); + if (i < numDocs - 1) { + responseBuilder.append(","); + } + } + responseBuilder.append("]"); + + responseBuilder.append(randomIntBetween(1, 10)).append("}"); + + InferenceServiceResults parsedResults = IbmWatsonxRankedResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseBuilder.toString().getBytes(StandardCharsets.UTF_8)) + ); + MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class)); + for (int i = 0; i < ((RankedDocsResults) parsedResults).getRankedDocs().size(); i++) { + assertEquals(((RankedDocsResults) parsedResults).getRankedDocs().get(i).index(), expected.get(i).index()); + } + } + + private ArrayList responseLiteralDocs() { + var list = new ArrayList(); + + list.add(new RankedDocsResults.RankedDoc(2, 0.98005307F, null)); + list.add(new RankedDocsResults.RankedDoc(3, 0.27904198F, null)); + list.add(new RankedDocsResults.RankedDoc(0, 0.10194652F, null)); + return list; + } + + private final String responseLiteral = """ + { + "results": [ + { + "index": 2, + "score": 0.98005307 + }, + { + "index": 3, + "score": 0.27904198 + }, + { + "index": 0, + "score": 0.10194652 + } + ] + } + """; + + public void testResponseLiteralWithDocuments() throws IOException { + InferenceServiceResults parsedResults = IbmWatsonxRankedResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseLiteralWithDocuments.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class)); + MatcherAssert.assertThat(((RankedDocsResults) parsedResults).getRankedDocs(), is(responseLiteralDocsWithText)); + } + + private final String responseLiteralWithDocuments = """ + { + "results": [ + { + "input": { + "text": "Washington, D.C.." + }, + "index": 2, + "score": 0.98005307 + }, + { + "input": { + "text": "Capital punishment has existed in the United States since before the United States was a country. " + }, + "index": 3, + "score": 0.27904198 + }, + { + "input": { + "text": "Carson City is the capital city of the American state of Nevada." + }, + "index": 0, + "score": 0.10194652 + } + ] + } + """; + + private final List responseLiteralDocsWithText = List.of( + new RankedDocsResults.RankedDoc(2, 0.98005307F, "Washington, D.C.."), + new RankedDocsResults.RankedDoc( + 3, + 0.27904198F, + "Capital punishment has existed in the United States since before the United States was a country. " + ), + new RankedDocsResults.RankedDoc(0, 0.10194652F, "Carson City is the capital city of the American state of Nevada.") + ); + + private ArrayList linear(int n) { + ArrayList list = new ArrayList<>(); + for (int i = 0; i <= n; i++) { + list.add(i); + } + return list; + } + + // creates a list of doubles of monotonically decreasing magnitude + private ArrayList linearFloats(int n) { + ArrayList list = new ArrayList<>(); + float startValue = 1.0f; + float decrement = startValue / n + 1; + for (int i = 0; i <= n; i++) { + list.add(startValue - (i * decrement)); + } + return list; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapperTests.java index 57d71a48a4aeb..f877b0182d888 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapperTests.java @@ -55,6 +55,24 @@ public void testIsEnabled() { assertTrue(InferenceMetadataFieldsMapper.isEnabled(settings)); } + public void testIsEnabledByDefault() { + var settings = Settings.builder() + .put( + IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), + IndexVersionUtils.getPreviousVersion(InferenceMetadataFieldsMapper.USE_NEW_SEMANTIC_TEXT_FORMAT_BY_DEFAULT) + ) + .build(); + assertFalse(InferenceMetadataFieldsMapper.isEnabled(settings)); + + settings = Settings.builder() + .put( + IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), + InferenceMetadataFieldsMapper.USE_NEW_SEMANTIC_TEXT_FORMAT_BY_DEFAULT + ) + .build(); + assertTrue(InferenceMetadataFieldsMapper.isEnabled(settings)); + } + @Override public void testFieldHasValue() { assertTrue( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index e837e1b0db989..5d1c058c89da0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -136,11 +136,6 @@ protected void minimalMapping(XContentBuilder b) throws IOException { b.field("type", "semantic_text"); } - @Override - protected String minimalIsInvalidRoutingPathErrorMessage(Mapper mapper) { - return "cannot have nested fields when index is in [index.mode=time_series]"; - } - @Override protected void metaMapping(XContentBuilder b) throws IOException { super.metaMapping(b); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java index b6d455dd233ba..93c3ffe5d14fb 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java @@ -31,6 +31,7 @@ import java.util.List; import static org.elasticsearch.search.rank.RankBuilder.DEFAULT_RANK_WINDOW_SIZE; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.DEFAULT_RERANK_ID; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -108,7 +109,6 @@ public void testParserDefaults() throws IOException { } }, "field": "my-field", - "inference_id": "my-inference-id", "inference_text": "my-inference-text" }"""; @@ -118,6 +118,7 @@ public void testParserDefaults() throws IOException { new RetrieverParserContext(new SearchUsage(), nf -> true) ); assertEquals(DEFAULT_RANK_WINDOW_SIZE, parsed.rankWindowSize()); + assertEquals(DEFAULT_RERANK_ID, parsed.inferenceId()); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java index 162bcc8f09713..65e4d049ef58b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.UnparsedModel; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchResponseUtils; @@ -41,6 +42,7 @@ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Map; +import java.util.Set; import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.Strings.format; @@ -52,6 +54,8 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class ModelRegistryTests extends ESTestCase { @@ -295,6 +299,37 @@ public void testStoreModel_ThrowsException_WhenFailureIsNotAVersionConflict() { ); } + public void testRemoveDefaultConfigs_DoesNotCallClient_WhenPassedAnEmptySet() { + var client = mock(Client.class); + + var registry = new ModelRegistry(client); + var listener = new PlainActionFuture(); + + registry.removeDefaultConfigs(Set.of(), listener); + + assertTrue(listener.actionGet(TIMEOUT)); + verify(client, times(0)).execute(any(), any(), any()); + } + + public void testDeleteModels_Returns_ConflictException_WhenModelIsBeingAdded() { + var client = mockClient(); + + var registry = new ModelRegistry(client); + var model = TestModel.createRandomInstance(); + var newModel = TestModel.createRandomInstance(); + registry.updateModelTransaction(newModel, model, new PlainActionFuture<>()); + + var listener = new PlainActionFuture(); + + registry.deleteModels(Set.of(newModel.getInferenceEntityId()), listener); + var exception = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + exception.getMessage(), + containsString("are currently being updated, please wait until after they are finished updating to delete.") + ); + assertThat(exception.status(), is(RestStatus.CONFLICT)); + } + public void testIdMatchedDefault() { var defaultConfigIds = new ArrayList(); defaultConfigIds.add( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/BaseInferenceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/BaseInferenceActionTests.java index 5528c80066b0a..61d28fae402ba 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/BaseInferenceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/BaseInferenceActionTests.java @@ -21,6 +21,8 @@ import org.elasticsearch.test.rest.RestActionTestCase; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.InferenceActionProxy; +import org.elasticsearch.xpack.core.inference.results.InferenceByteEmbedding; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; import org.junit.Before; @@ -42,6 +44,11 @@ public class BaseInferenceActionTests extends RestActionTestCase { @Before public void setUpAction() { controller().registerHandler(new BaseInferenceAction() { + @Override + protected boolean shouldStream() { + return false; + } + @Override protected ActionListener listener(RestChannel channel) { return new RestChunkedToXContentListener<>(channel); @@ -102,10 +109,10 @@ public void testParseTimeout_ReturnsDefaultTimeout() { public void testUsesDefaultTimeout() { SetOnce executeCalled = new SetOnce<>(); verifyingClient.setExecuteVerifier(((actionType, actionRequest) -> { - assertThat(actionRequest, instanceOf(InferenceAction.Request.class)); + assertThat(actionRequest, instanceOf(InferenceActionProxy.Request.class)); - var request = (InferenceAction.Request) actionRequest; - assertThat(request.getInferenceTimeout(), is(InferenceAction.Request.DEFAULT_TIMEOUT)); + var request = (InferenceActionProxy.Request) actionRequest; + assertThat(request.getTimeout(), is(InferenceAction.Request.DEFAULT_TIMEOUT)); executeCalled.set(true); return createResponse(); @@ -122,10 +129,10 @@ public void testUsesDefaultTimeout() { public void testUses3SecondTimeoutFromParams() { SetOnce executeCalled = new SetOnce<>(); verifyingClient.setExecuteVerifier(((actionType, actionRequest) -> { - assertThat(actionRequest, instanceOf(InferenceAction.Request.class)); + assertThat(actionRequest, instanceOf(InferenceActionProxy.Request.class)); - var request = (InferenceAction.Request) actionRequest; - assertThat(request.getInferenceTimeout(), is(TimeValue.timeValueSeconds(3))); + var request = (InferenceActionProxy.Request) actionRequest; + assertThat(request.getTimeout(), is(TimeValue.timeValueSeconds(3))); executeCalled.set(true); return createResponse(); @@ -142,9 +149,7 @@ public void testUses3SecondTimeoutFromParams() { static InferenceAction.Response createResponse() { return new InferenceAction.Response( - new InferenceTextEmbeddingByteResults( - List.of(new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) -1 })) - ) + new InferenceTextEmbeddingByteResults(List.of(new InferenceByteEmbedding(new byte[] { (byte) -1 }))) ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestInferenceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestInferenceActionProxyTests.java similarity index 90% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestInferenceActionTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestInferenceActionProxyTests.java index 1b0df1b4a20da..433e33fe15210 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestInferenceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestInferenceActionProxyTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.test.rest.RestActionTestCase; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.InferenceActionProxy; import org.junit.Before; import static org.elasticsearch.xpack.inference.rest.BaseInferenceActionTests.createResponse; @@ -21,7 +21,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -public class RestInferenceActionTests extends RestActionTestCase { +public class RestInferenceActionProxyTests extends RestActionTestCase { @Before public void setUpAction() { @@ -31,9 +31,9 @@ public void setUpAction() { public void testStreamIsFalse() { SetOnce executeCalled = new SetOnce<>(); verifyingClient.setExecuteVerifier(((actionType, actionRequest) -> { - assertThat(actionRequest, instanceOf(InferenceAction.Request.class)); + assertThat(actionRequest, instanceOf(InferenceActionProxy.Request.class)); - var request = (InferenceAction.Request) actionRequest; + var request = (InferenceActionProxy.Request) actionRequest; assertThat(request.isStreaming(), is(false)); executeCalled.set(true); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceActionTests.java index f67680ef6b625..e69dd3fda6240 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceActionTests.java @@ -9,13 +9,18 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.rest.AbstractRestChannel; +import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.test.rest.RestActionTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.InferenceActionProxy; import org.junit.After; import org.junit.Before; @@ -42,9 +47,9 @@ public void tearDownAction() { public void testStreamIsTrue() { SetOnce executeCalled = new SetOnce<>(); verifyingClient.setExecuteVerifier(((actionType, actionRequest) -> { - assertThat(actionRequest, instanceOf(InferenceAction.Request.class)); + assertThat(actionRequest, instanceOf(InferenceActionProxy.Request.class)); - var request = (InferenceAction.Request) actionRequest; + var request = (InferenceActionProxy.Request) actionRequest; assertThat(request.isStreaming(), is(true)); executeCalled.set(true); @@ -58,4 +63,50 @@ public void testStreamIsTrue() { dispatchRequest(inferenceRequest); assertThat(executeCalled.get(), equalTo(true)); } + + public void testStreamIsTrue_ChatCompletion() { + SetOnce executeCalled = new SetOnce<>(); + verifyingClient.setExecuteVerifier(((actionType, actionRequest) -> { + assertThat(actionRequest, instanceOf(InferenceActionProxy.Request.class)); + + var request = (InferenceActionProxy.Request) actionRequest; + assertThat(request.isStreaming(), is(true)); + + executeCalled.set(true); + return createResponse(); + })); + + var requestBody = """ + { + "messages": [ + { + "content": "abc", + "role": "user" + } + ] + } + """; + + RestRequest inferenceRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) + .withPath("_inference/chat_completion/test/_stream") + .withContent(new BytesArray(requestBody), XContentType.JSON) + .build(); + + final SetOnce responseSetOnce = new SetOnce<>(); + dispatchRequest(inferenceRequest, new AbstractRestChannel(inferenceRequest, true) { + @Override + public void sendResponse(RestResponse response) { + responseSetOnce.set(response); + } + }); + + // the response content will be null when there is no error + assertNull(responseSetOnce.get().content()); + assertThat(executeCalled.get(), equalTo(true)); + } + + private void dispatchRequest(final RestRequest request, final RestChannel channel) { + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + controller().dispatchRequest(request, channel, threadContext); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceActionTests.java deleted file mode 100644 index 9dc23c890c14d..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceActionTests.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.rest; - -import org.apache.lucene.util.SetOnce; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.rest.AbstractRestChannel; -import org.elasticsearch.rest.RestChannel; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; -import org.junit.After; -import org.junit.Before; - -import static org.elasticsearch.xpack.inference.rest.BaseInferenceActionTests.createResponse; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; - -public class RestUnifiedCompletionInferenceActionTests extends RestActionTestCase { - private final SetOnce threadPool = new SetOnce<>(); - - @Before - public void setUpAction() { - threadPool.set(new TestThreadPool(getTestName())); - controller().registerHandler(new RestUnifiedCompletionInferenceAction(threadPool)); - } - - @After - public void tearDownAction() { - terminate(threadPool.get()); - } - - public void testStreamIsTrue() { - SetOnce executeCalled = new SetOnce<>(); - verifyingClient.setExecuteVerifier(((actionType, actionRequest) -> { - assertThat(actionRequest, instanceOf(UnifiedCompletionAction.Request.class)); - - var request = (UnifiedCompletionAction.Request) actionRequest; - assertThat(request.isStreaming(), is(true)); - - executeCalled.set(true); - return createResponse(); - })); - - var requestBody = """ - { - "messages": [ - { - "content": "abc", - "role": "user" - } - ] - } - """; - - RestRequest inferenceRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) - .withPath("_inference/completion/test/_unified") - .withContent(new BytesArray(requestBody), XContentType.JSON) - .build(); - - final SetOnce responseSetOnce = new SetOnce<>(); - dispatchRequest(inferenceRequest, new AbstractRestChannel(inferenceRequest, true) { - @Override - public void sendResponse(RestResponse response) { - responseSetOnce.set(response); - } - }); - - // the response content will be null when there is no error - assertNull(responseSetOnce.get().content()); - assertThat(executeCalled.get(), equalTo(true)); - } - - private void dispatchRequest(final RestRequest request, final RestChannel channel) { - ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - controller().dispatchRequest(request, channel, threadContext); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceTextEmbeddingBitResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceTextEmbeddingBitResultsTests.java new file mode 100644 index 0000000000000..45b9627371575 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceTextEmbeddingBitResultsTests.java @@ -0,0 +1,135 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.results; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.inference.results.InferenceByteEmbedding; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingBitResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class InferenceTextEmbeddingBitResultsTests extends AbstractWireSerializingTestCase { + public static InferenceTextEmbeddingBitResults createRandomResults() { + int embeddings = randomIntBetween(1, 10); + List embeddingResults = new ArrayList<>(embeddings); + + for (int i = 0; i < embeddings; i++) { + embeddingResults.add(createRandomEmbedding()); + } + + return new InferenceTextEmbeddingBitResults(embeddingResults); + } + + private static InferenceByteEmbedding createRandomEmbedding() { + int columns = randomIntBetween(1, 10); + byte[] bytes = new byte[columns]; + + for (int i = 0; i < columns; i++) { + bytes[i] = randomByte(); + } + + return new InferenceByteEmbedding(bytes); + } + + public void testToXContent_CreatesTheRightFormatForASingleEmbedding() throws IOException { + var entity = new InferenceTextEmbeddingBitResults(List.of(new InferenceByteEmbedding(new byte[] { (byte) 23 }))); + + String xContentResult = Strings.toString(entity, true, true); + assertThat(xContentResult, is(""" + { + "text_embedding_bits" : [ + { + "embedding" : [ + 23 + ] + } + ] + }""")); + } + + public void testToXContent_CreatesTheRightFormatForMultipleEmbeddings() throws IOException { + var entity = new InferenceTextEmbeddingBitResults( + List.of(new InferenceByteEmbedding(new byte[] { (byte) 23 }), new InferenceByteEmbedding(new byte[] { (byte) 24 })) + ); + + String xContentResult = Strings.toString(entity, true, true); + assertThat(xContentResult, is(""" + { + "text_embedding_bits" : [ + { + "embedding" : [ + 23 + ] + }, + { + "embedding" : [ + 24 + ] + } + ] + }""")); + } + + public void testTransformToCoordinationFormat() { + var results = new InferenceTextEmbeddingBitResults( + List.of( + new InferenceByteEmbedding(new byte[] { (byte) 23, (byte) 24 }), + new InferenceByteEmbedding(new byte[] { (byte) 25, (byte) 26 }) + ) + ).transformToCoordinationFormat(); + + assertThat( + results, + is( + List.of( + new MlTextEmbeddingResults(InferenceTextEmbeddingBitResults.TEXT_EMBEDDING_BITS, new double[] { 23F, 24F }, false), + new MlTextEmbeddingResults(InferenceTextEmbeddingBitResults.TEXT_EMBEDDING_BITS, new double[] { 25F, 26F }, false) + ) + ) + ); + } + + @Override + protected Writeable.Reader instanceReader() { + return InferenceTextEmbeddingBitResults::new; + } + + @Override + protected InferenceTextEmbeddingBitResults createTestInstance() { + return createRandomResults(); + } + + @Override + protected InferenceTextEmbeddingBitResults mutateInstance(InferenceTextEmbeddingBitResults instance) throws IOException { + // if true we reduce the embeddings list by a random amount, if false we add an embedding to the list + if (randomBoolean()) { + // -1 to remove at least one item from the list + int end = randomInt(instance.embeddings().size() - 1); + return new InferenceTextEmbeddingBitResults(instance.embeddings().subList(0, end)); + } else { + List embeddings = new ArrayList<>(instance.embeddings()); + embeddings.add(createRandomEmbedding()); + return new InferenceTextEmbeddingBitResults(embeddings); + } + } + + public static Map buildExpectationByte(List> embeddings) { + return Map.of( + InferenceTextEmbeddingBitResults.TEXT_EMBEDDING_BITS, + embeddings.stream().map(embedding -> Map.of(InferenceByteEmbedding.EMBEDDING, embedding)).toList() + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceTextEmbeddingByteResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceTextEmbeddingByteResultsTests.java index c6749e9822cf4..d932f36fb25a7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceTextEmbeddingByteResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceTextEmbeddingByteResultsTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.inference.results.InferenceByteEmbedding; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; @@ -23,7 +24,7 @@ public class InferenceTextEmbeddingByteResultsTests extends AbstractWireSerializingTestCase { public static InferenceTextEmbeddingByteResults createRandomResults() { int embeddings = randomIntBetween(1, 10); - List embeddingResults = new ArrayList<>(embeddings); + List embeddingResults = new ArrayList<>(embeddings); for (int i = 0; i < embeddings; i++) { embeddingResults.add(createRandomEmbedding()); @@ -32,7 +33,7 @@ public static InferenceTextEmbeddingByteResults createRandomResults() { return new InferenceTextEmbeddingByteResults(embeddingResults); } - private static InferenceTextEmbeddingByteResults.InferenceByteEmbedding createRandomEmbedding() { + private static InferenceByteEmbedding createRandomEmbedding() { int columns = randomIntBetween(1, 10); byte[] bytes = new byte[columns]; @@ -40,13 +41,11 @@ private static InferenceTextEmbeddingByteResults.InferenceByteEmbedding createRa bytes[i] = randomByte(); } - return new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(bytes); + return new InferenceByteEmbedding(bytes); } public void testToXContent_CreatesTheRightFormatForASingleEmbedding() throws IOException { - var entity = new InferenceTextEmbeddingByteResults( - List.of(new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 23 })) - ); + var entity = new InferenceTextEmbeddingByteResults(List.of(new InferenceByteEmbedding(new byte[] { (byte) 23 }))); String xContentResult = Strings.toString(entity, true, true); assertThat(xContentResult, is(""" @@ -63,10 +62,7 @@ public void testToXContent_CreatesTheRightFormatForASingleEmbedding() throws IOE public void testToXContent_CreatesTheRightFormatForMultipleEmbeddings() throws IOException { var entity = new InferenceTextEmbeddingByteResults( - List.of( - new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 23 }), - new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 24 }) - ) + List.of(new InferenceByteEmbedding(new byte[] { (byte) 23 }), new InferenceByteEmbedding(new byte[] { (byte) 24 })) ); String xContentResult = Strings.toString(entity, true, true); @@ -90,8 +86,8 @@ public void testToXContent_CreatesTheRightFormatForMultipleEmbeddings() throws I public void testTransformToCoordinationFormat() { var results = new InferenceTextEmbeddingByteResults( List.of( - new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 23, (byte) 24 }), - new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 25, (byte) 26 }) + new InferenceByteEmbedding(new byte[] { (byte) 23, (byte) 24 }), + new InferenceByteEmbedding(new byte[] { (byte) 25, (byte) 26 }) ) ).transformToCoordinationFormat(); @@ -124,7 +120,7 @@ protected InferenceTextEmbeddingByteResults mutateInstance(InferenceTextEmbeddin int end = randomInt(instance.embeddings().size() - 1); return new InferenceTextEmbeddingByteResults(instance.embeddings().subList(0, end)); } else { - List embeddings = new ArrayList<>(instance.embeddings()); + List embeddings = new ArrayList<>(instance.embeddings()); embeddings.add(createRandomEmbedding()); return new InferenceTextEmbeddingByteResults(embeddings); } @@ -133,9 +129,7 @@ protected InferenceTextEmbeddingByteResults mutateInstance(InferenceTextEmbeddin public static Map buildExpectationByte(List> embeddings) { return Map.of( InferenceTextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, - embeddings.stream() - .map(embedding -> Map.of(InferenceTextEmbeddingByteResults.InferenceByteEmbedding.EMBEDDING, embedding)) - .toList() + embeddings.stream().map(embedding -> Map.of(InferenceByteEmbedding.EMBEDDING, embedding)).toList() ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java index 2c405aaeaba3f..56bd690a9cdbf 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.inference.results.InferenceByteEmbedding; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; @@ -141,7 +142,7 @@ public static Map buildExpectationFloat(List embeddings public static Map buildExpectationByte(List embeddings) { return Map.of( InferenceTextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, - embeddings.stream().map(InferenceTextEmbeddingByteResults.InferenceByteEmbedding::new).toList() + embeddings.stream().map(InferenceByteEmbedding::new).toList() ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/InferenceEventsAssertion.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/InferenceEventsAssertion.java index 7cfd231be39f3..637ae726572a4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/InferenceEventsAssertion.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/InferenceEventsAssertion.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xcontent.XContentFactory; import org.hamcrest.MatcherAssert; @@ -85,6 +86,16 @@ public InferenceEventsAssertion hasErrorContaining(String message) { return this; } + public InferenceEventsAssertion hasErrorMatching(CheckedConsumer matcher) { + hasError(); + try { + matcher.accept(error); + } catch (Exception e) { + fail(e); + } + return this; + } + public InferenceEventsAssertion hasEvents(String... events) { Arrays.stream(events).forEach(this::hasEvent); return this; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java index 92544d5535acb..1ca50d1887ee1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java @@ -490,7 +490,7 @@ public void testGetConfiguration() throws Exception { "http_schema": { "description": "", "label": "HTTP Schema", - "required": true, + "required": false, "sensitive": false, "updatable": false, "type": "str", diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java index c11d4b4c7923d..6505c280c295a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java @@ -57,6 +57,7 @@ import org.junit.Before; import java.io.IOException; +import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -154,69 +155,80 @@ public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOExcepti @SuppressWarnings("checkstyle:LineLength") public void testGetConfiguration() throws Exception { try (var service = createAmazonBedrockService()) { - String content = XContentHelper.stripWhitespace(""" - { - "service": "amazonbedrock", - "name": "Amazon Bedrock", - "task_types": ["text_embedding", "completion"], - "configurations": { - "secret_key": { - "description": "A valid AWS secret key that is paired with the access_key.", - "label": "Secret Key", - "required": true, - "sensitive": true, - "updatable": true, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "provider": { - "description": "The model provider for your deployment.", - "label": "Provider", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "access_key": { - "description": "A valid AWS access key that has permissions to use Amazon Bedrock.", - "label": "Access Key", - "required": true, - "sensitive": true, - "updatable": true, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "model": { - "description": "The base model ID or an ARN to a custom model based on a foundational model.", - "label": "Model", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "rate_limit.requests_per_minute": { - "description": "By default, the amazonbedrock service sets the number of requests allowed per minute to 240.", - "label": "Rate Limit", - "required": false, - "sensitive": false, - "updatable": false, - "type": "int", - "supported_task_types": ["text_embedding", "completion"] - }, - "region": { - "description": "The region that your model or ARN is deployed in.", - "label": "Region", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] + String content = XContentHelper.stripWhitespace( + """ + { + "service": "amazonbedrock", + "name": "Amazon Bedrock", + "task_types": ["text_embedding", "completion"], + "configurations": { + "dimensions": { + "description": "The number of dimensions the resulting embeddings should have. For more information refer to https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-titan-embed-text.html.", + "label": "Dimensions", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding"] + }, + "secret_key": { + "description": "A valid AWS secret key that is paired with the access_key.", + "label": "Secret Key", + "required": true, + "sensitive": true, + "updatable": true, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "provider": { + "description": "The model provider for your deployment.", + "label": "Provider", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "access_key": { + "description": "A valid AWS access key that has permissions to use Amazon Bedrock.", + "label": "Access Key", + "required": true, + "sensitive": true, + "updatable": true, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "model": { + "description": "The base model ID or an ARN to a custom model based on a foundational model.", + "label": "Model", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "rate_limit.requests_per_minute": { + "description": "By default, the amazonbedrock service sets the number of requests allowed per minute to 240.", + "label": "Rate Limit", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding", "completion"] + }, + "region": { + "description": "The region that your model or ARN is deployed in.", + "label": "Region", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + } } } - } - """); + """ + ); InferenceServiceConfiguration configuration = InferenceServiceConfiguration.fromXContentBytes( new BytesArray(content), XContentType.JSON @@ -1370,8 +1382,8 @@ public void testInfer_UnauthorizedResponse() throws IOException { public void testSupportsStreaming() throws IOException { try (var service = new AmazonBedrockService(mock(), mock(), createWithEmptySettings(mock()))) { - assertTrue(service.canStream(TaskType.COMPLETION)); - assertTrue(service.canStream(TaskType.ANY)); + assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.COMPLETION))); + assertFalse(service.canStream(TaskType.ANY)); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java index 33101a3e02661..f48cf3b9f4852 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java @@ -47,6 +47,7 @@ import org.junit.Before; import java.io.IOException; +import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -655,8 +656,8 @@ public void testGetConfiguration() throws Exception { public void testSupportsStreaming() throws IOException { try (var service = new AnthropicService(mock(), createWithEmptySettings(mock()))) { - assertTrue(service.canStream(TaskType.COMPLETION)); - assertTrue(service.canStream(TaskType.ANY)); + assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.COMPLETION))); + assertFalse(service.canStream(TaskType.ANY)); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java index d2e4652b96488..cebea7901b956 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java @@ -61,6 +61,7 @@ import java.io.IOException; import java.net.URISyntaxException; +import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -1389,60 +1390,71 @@ public void testInfer_StreamRequest_ErrorResponse() throws Exception { @SuppressWarnings("checkstyle:LineLength") public void testGetConfiguration() throws Exception { try (var service = createService()) { - String content = XContentHelper.stripWhitespace(""" - { - "service": "azureaistudio", - "name": "Azure AI Studio", - "task_types": ["text_embedding", "completion"], - "configurations": { - "endpoint_type": { - "description": "Specifies the type of endpoint that is used in your model deployment.", - "label": "Endpoint Type", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "provider": { - "description": "The model provider for your deployment.", - "label": "Provider", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "api_key": { - "description": "API Key for the provider you're connecting to.", - "label": "API Key", - "required": true, - "sensitive": true, - "updatable": true, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "rate_limit.requests_per_minute": { - "description": "Minimize the number of rate limit errors.", - "label": "Rate Limit", - "required": false, - "sensitive": false, - "updatable": false, - "type": "int", - "supported_task_types": ["text_embedding", "completion"] - }, - "target": { - "description": "The target URL of your Azure AI Studio model deployment.", - "label": "Target", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] + String content = XContentHelper.stripWhitespace( + """ + { + "service": "azureaistudio", + "name": "Azure AI Studio", + "task_types": ["text_embedding", "completion"], + "configurations": { + "dimensions": { + "description": "The number of dimensions the resulting embeddings should have. For more information refer to https://learn.microsoft.com/en-us/azure/ai-studio/reference/reference-model-inference-embeddings.", + "label": "Dimensions", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding"] + }, + "endpoint_type": { + "description": "Specifies the type of endpoint that is used in your model deployment.", + "label": "Endpoint Type", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "provider": { + "description": "The model provider for your deployment.", + "label": "Provider", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "api_key": { + "description": "API Key for the provider you're connecting to.", + "label": "API Key", + "required": true, + "sensitive": true, + "updatable": true, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "rate_limit.requests_per_minute": { + "description": "Minimize the number of rate limit errors.", + "label": "Rate Limit", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding", "completion"] + }, + "target": { + "description": "The target URL of your Azure AI Studio model deployment.", + "label": "Target", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + } } } - } - """); + """ + ); InferenceServiceConfiguration configuration = InferenceServiceConfiguration.fromXContentBytes( new BytesArray(content), XContentType.JSON @@ -1460,8 +1472,8 @@ public void testGetConfiguration() throws Exception { public void testSupportsStreaming() throws IOException { try (var service = new AzureAiStudioService(mock(), createWithEmptySettings(mock()))) { - assertTrue(service.canStream(TaskType.COMPLETION)); - assertTrue(service.canStream(TaskType.ANY)); + assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.COMPLETION))); + assertFalse(service.canStream(TaskType.ANY)); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java index 52527d74aad19..e67a5dac0e7c2 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java @@ -54,6 +54,7 @@ import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; +import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -1473,6 +1474,15 @@ public void testGetConfiguration() throws Exception { "type": "str", "supported_task_types": ["text_embedding", "completion"] }, + "dimensions": { + "description": "The number of dimensions the resulting embeddings should have. For more information refer to https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#request-body-1.", + "label": "Dimensions", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding"] + }, "entra_id": { "description": "You must provide either an API key or an Entra ID.", "label": "Entra ID", @@ -1539,8 +1549,8 @@ public void testGetConfiguration() throws Exception { public void testSupportsStreaming() throws IOException { try (var service = new AzureOpenAiService(mock(), createWithEmptySettings(mock()))) { - assertTrue(service.canStream(TaskType.COMPLETION)); - assertTrue(service.canStream(TaskType.ANY)); + assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.COMPLETION))); + assertFalse(service.canStream(TaskType.ANY)); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 86b3edc4130da..90e5dc6890c45 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -58,6 +58,7 @@ import org.junit.Before; import java.io.IOException; +import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -1648,6 +1649,15 @@ public void testGetConfiguration() throws Exception { "type": "str", "supported_task_types": ["text_embedding", "rerank", "completion"] }, + "model_id": { + "description": "The name of the model to use for the inference task.", + "label": "Model ID", + "required": false, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "rerank", "completion"] + }, "rate_limit.requests_per_minute": { "description": "Minimize the number of rate limit errors.", "label": "Rate Limit", @@ -1677,8 +1687,8 @@ public void testGetConfiguration() throws Exception { public void testSupportsStreaming() throws IOException { try (var service = new CohereService(mock(), createWithEmptySettings(mock()))) { - assertTrue(service.canStream(TaskType.COMPLETION)); - assertTrue(service.canStream(TaskType.ANY)); + assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.COMPLETION))); + assertFalse(service.canStream(TaskType.ANY)); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingTypeTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingTypeTests.java index 3aa423d5bbafd..13e6cbdefe62a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingTypeTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingTypeTests.java @@ -50,6 +50,44 @@ public void testTranslateToVersion_ReturnsFloat_WhenVersionOnByteEnumAddition_Wh ); } + public void testTranslateToVersion_ReturnsInt8_WhenVersionIsBeforeBitEnumAdditionPatch_WhenSpecifyingBit() { + assertThat( + CohereEmbeddingType.translateToVersion(CohereEmbeddingType.BIT, new TransportVersion(8_840_0_00)), + is(CohereEmbeddingType.INT8) + ); + } + + public void testTranslateToVersion_ReturnsInt8_WhenVersionIsBeforeBitEnumAddition_WhenSpecifyingBit() { + assertThat( + CohereEmbeddingType.translateToVersion(CohereEmbeddingType.BIT, new TransportVersion(9_000_0_00)), + is(CohereEmbeddingType.INT8) + ); + } + + public void testTranslateToVersion_ReturnsBit_WhenVersionOnBitEnumAddition_WhenSpecifyingBit() { + assertThat( + CohereEmbeddingType.translateToVersion(CohereEmbeddingType.BIT, TransportVersions.COHERE_BIT_EMBEDDING_TYPE_SUPPORT_ADDED), + is(CohereEmbeddingType.BIT) + ); + } + + public void testTranslateToVersion_ReturnsBit_WhenVersionOnBitEnumAdditionPatch_WhenSpecifyingBit() { + assertThat( + CohereEmbeddingType.translateToVersion( + CohereEmbeddingType.BIT, + TransportVersions.COHERE_BIT_EMBEDDING_TYPE_SUPPORT_ADDED_BACKPORT_8_X + ), + is(CohereEmbeddingType.BIT) + ); + } + + public void testTranslateToVersion_ReturnsFloat_WhenVersionOnBitEnumAddition_WhenSpecifyingFloat() { + assertThat( + CohereEmbeddingType.translateToVersion(CohereEmbeddingType.FLOAT, TransportVersions.COHERE_BIT_EMBEDDING_TYPE_SUPPORT_ADDED), + is(CohereEmbeddingType.FLOAT) + ); + } + public void testFromElementType_CovertsFloatToCohereEmbeddingTypeFloat() { assertThat(CohereEmbeddingType.fromElementType(DenseVectorFieldMapper.ElementType.FLOAT), is(CohereEmbeddingType.FLOAT)); } @@ -57,4 +95,8 @@ public void testFromElementType_CovertsFloatToCohereEmbeddingTypeFloat() { public void testFromElementType_CovertsByteToCohereEmbeddingTypeByte() { assertThat(CohereEmbeddingType.fromElementType(DenseVectorFieldMapper.ElementType.BYTE), is(CohereEmbeddingType.BYTE)); } + + public void testFromElementType_ConvertsBitToCohereEmbeddingTypeBinary() { + assertThat(CohereEmbeddingType.fromElementType(DenseVectorFieldMapper.ElementType.BIT), is(CohereEmbeddingType.BIT)); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java index 73ebd6c6c0505..544676cfa7cc7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java @@ -218,7 +218,7 @@ public void testFromMap_InvalidEmbeddingType_ThrowsError_ForRequest() { is( Strings.format( "Validation Failed: 1: [service_settings] Invalid value [abc] received. " - + "[embedding_type] must be one of [byte, float, int8];" + + "[embedding_type] must be one of [binary, bit, byte, float, int8];" ) ) ); @@ -238,7 +238,7 @@ public void testFromMap_InvalidEmbeddingType_ThrowsError_ForPersistent() { is( Strings.format( "Validation Failed: 1: [service_settings] Invalid value [abc] received. " - + "[embedding_type] must be one of [byte, float];" + + "[embedding_type] must be one of [bit, byte, float];" ) ) ); @@ -289,6 +289,16 @@ public void testFromMap_ConvertsInt8_ToCohereEmbeddingTypeInt8() { ); } + public void testFromMap_ConvertsBit_ToCohereEmbeddingTypeBit() { + assertThat( + CohereEmbeddingsServiceSettings.fromMap( + new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, CohereEmbeddingType.BIT.toString())), + ConfigurationParseContext.REQUEST + ), + is(new CohereEmbeddingsServiceSettings(new CohereServiceSettings(), CohereEmbeddingType.BIT)) + ); + } + public void testFromMap_PreservesEmbeddingTypeFloat() { assertThat( CohereEmbeddingsServiceSettings.fromMap( @@ -314,6 +324,8 @@ public void testFromCohereOrDenseVectorEnumValues() { assertEquals(CohereEmbeddingType.BYTE, CohereEmbeddingsServiceSettings.fromCohereOrDenseVectorEnumValues("byte", validation)); assertEquals(CohereEmbeddingType.INT8, CohereEmbeddingsServiceSettings.fromCohereOrDenseVectorEnumValues("int8", validation)); assertEquals(CohereEmbeddingType.FLOAT, CohereEmbeddingsServiceSettings.fromCohereOrDenseVectorEnumValues("float", validation)); + assertEquals(CohereEmbeddingType.BINARY, CohereEmbeddingsServiceSettings.fromCohereOrDenseVectorEnumValues("binary", validation)); + assertEquals(CohereEmbeddingType.BIT, CohereEmbeddingsServiceSettings.fromCohereOrDenseVectorEnumValues("bit", validation)); assertTrue(validation.validationErrors().isEmpty()); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettingsTests.java new file mode 100644 index 0000000000000..e477ffb10def0 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettingsTests.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elastic; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class ElasticInferenceServiceSettingsTests extends ESTestCase { + + private static final String ELASTIC_INFERENCE_SERVICE_URL = "http://elastic-inference-service"; + private static final String ELASTIC_INFERENCE_SERVICE_LEGACY_URL = "http://elastic-inference-service-legacy"; + + public void testGetElasticInferenceServiceUrl_WithUrlSetting() { + var settings = Settings.builder() + .put(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_URL.getKey(), ELASTIC_INFERENCE_SERVICE_URL) + .build(); + + var eisSettings = new ElasticInferenceServiceSettings(settings); + + assertThat(eisSettings.getElasticInferenceServiceUrl(), equalTo(ELASTIC_INFERENCE_SERVICE_URL)); + } + + public void testGetElasticInferenceServiceUrl_WithLegacyUrlSetting() { + var settings = Settings.builder() + .put(ElasticInferenceServiceSettings.EIS_GATEWAY_URL.getKey(), ELASTIC_INFERENCE_SERVICE_LEGACY_URL) + .build(); + + var eisSettings = new ElasticInferenceServiceSettings(settings); + + assertThat(eisSettings.getElasticInferenceServiceUrl(), equalTo(ELASTIC_INFERENCE_SERVICE_LEGACY_URL)); + } + + public void testGetElasticInferenceServiceUrl_WithUrlSetting_TakesPrecedenceOverLegacyUrlSetting() { + var settings = Settings.builder() + .put(ElasticInferenceServiceSettings.EIS_GATEWAY_URL.getKey(), ELASTIC_INFERENCE_SERVICE_LEGACY_URL) + .put(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_URL.getKey(), ELASTIC_INFERENCE_SERVICE_URL) + .build(); + + var eisSettings = new ElasticInferenceServiceSettings(settings); + + assertThat(eisSettings.getElasticInferenceServiceUrl(), equalTo(ELASTIC_INFERENCE_SERVICE_URL)); + } + + public void testGetElasticInferenceServiceUrl_WithoutUrlSetting() { + var eisSettings = new ElasticInferenceServiceSettings(Settings.EMPTY); + + assertThat(eisSettings.getElasticInferenceServiceUrl(), equalTo("")); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModelTests.java index c9f4234331221..02bbbb844c04f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModelTests.java @@ -11,20 +11,19 @@ import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels; public class ElasticInferenceServiceSparseEmbeddingsModelTests extends ESTestCase { - public static ElasticInferenceServiceSparseEmbeddingsModel createModel(String url) { - return createModel(url, null); + public static ElasticInferenceServiceSparseEmbeddingsModel createModel(String url, String modelId) { + return createModel(url, modelId, null); } - public static ElasticInferenceServiceSparseEmbeddingsModel createModel(String url, Integer maxInputTokens) { + public static ElasticInferenceServiceSparseEmbeddingsModel createModel(String url, String modelId, Integer maxInputTokens) { return new ElasticInferenceServiceSparseEmbeddingsModel( "id", TaskType.SPARSE_EMBEDDING, "service", - new ElasticInferenceServiceSparseEmbeddingsServiceSettings(ElserModels.ELSER_V2_MODEL, maxInputTokens, null), + new ElasticInferenceServiceSparseEmbeddingsServiceSettings(modelId, maxInputTokens, null), EmptyTaskSettings.INSTANCE, EmptySecretSettings.INSTANCE, new ElasticInferenceServiceComponents(url) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettingsTests.java index dd205b12408ba..be6057bcddce0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettingsTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.inference.services.elastic; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xcontent.XContentBuilder; @@ -23,7 +22,6 @@ import java.util.Map; import static org.elasticsearch.xpack.inference.services.elasticsearch.ElserModelsTests.randomElserModel; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; public class ElasticInferenceServiceSparseEmbeddingsServiceSettingsTests extends AbstractWireSerializingTestCase< @@ -47,7 +45,7 @@ protected ElasticInferenceServiceSparseEmbeddingsServiceSettings mutateInstance( } public void testFromMap() { - var modelId = ElserModels.ELSER_V2_MODEL; + var modelId = "my-model-id"; var serviceSettings = ElasticInferenceServiceSparseEmbeddingsServiceSettings.fromMap( new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId)), @@ -57,20 +55,6 @@ public void testFromMap() { assertThat(serviceSettings, is(new ElasticInferenceServiceSparseEmbeddingsServiceSettings(modelId, null, null))); } - public void testFromMap_InvalidElserModelId() { - var invalidModelId = "invalid"; - - ValidationException validationException = expectThrows( - ValidationException.class, - () -> ElasticInferenceServiceSparseEmbeddingsServiceSettings.fromMap( - new HashMap<>(Map.of(ServiceFields.MODEL_ID, invalidModelId)), - ConfigurationParseContext.REQUEST - ) - ); - - assertThat(validationException.getMessage(), containsString(Strings.format("unknown ELSER model id [%s]", invalidModelId))); - } - public void testToXContent_WritesAllFields() throws IOException { var modelId = ElserModels.ELSER_V1_MODEL; var maxInputTokens = 10; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java index 5a3a9a29d7564..414c2a3f943dd 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java @@ -27,14 +27,17 @@ import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnifiedCompletionRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingSparse; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.core.ml.search.WeightedToken; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; @@ -44,11 +47,15 @@ import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests; +import org.elasticsearch.xpack.inference.services.InferenceEventsAssertion; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorization; import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorizationHandler; import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorizationTests; +import org.elasticsearch.xpack.inference.services.elastic.completion.ElasticInferenceServiceCompletionModel; +import org.elasticsearch.xpack.inference.services.elastic.completion.ElasticInferenceServiceCompletionServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.junit.After; @@ -61,8 +68,10 @@ import java.util.Map; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.ExceptionsHelper.unwrapCause; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.getModelListenerForException; import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; @@ -76,6 +85,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.isA; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -308,12 +318,12 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists public void testCheckModelConfig_ReturnsNewModelReference() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); try (var service = createService(senderFactory, getUrl(webServer))) { - var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer)); + var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), "my-model-id"); PlainActionFuture listener = new PlainActionFuture<>(); service.checkModelConfig(model, listener); var returnedModel = listener.actionGet(TIMEOUT); - assertThat(returnedModel, is(ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer)))); + assertThat(returnedModel, is(ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), "my-model-id"))); } } @@ -356,6 +366,14 @@ public void testInfer_ThrowsErrorWhenModelIsNotAValidModel() throws IOException private ModelRegistry mockModelRegistry() { var client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); + + doAnswer(invocationOnMock -> { + @SuppressWarnings("unchecked") + var listener = (ActionListener) invocationOnMock.getArgument(2); + listener.onResponse(true); + + return Void.TYPE; + }).when(client).execute(any(), any(), any()); return new ModelRegistry(client); } @@ -426,7 +444,7 @@ public void testInfer_ThrowsErrorWhenTaskTypeIsNotValid_ChatCompletion() throws "Inference entity [model_id] does not support task type [chat_completion] " + "for inference, the task type must be one of [sparse_embedding]. " + "The task type for the inference entity is chat_completion, " - + "please use the _inference/chat_completion/model_id/_unified URL." + + "please use the _inference/chat_completion/model_id/_stream URL." ) ); @@ -457,7 +475,7 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(eisGatewayUrl); + var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(eisGatewayUrl, "my-model-id"); PlainActionFuture listener = new PlainActionFuture<>(); service.infer( model, @@ -486,7 +504,7 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), Matchers.equalTo(XContentType.JSON.mediaType())); var requestMap = entityAsMap(request.getBody()); - assertThat(requestMap, is(Map.of("input", List.of("input text"), "usage_context", "search"))); + assertThat(requestMap, is(Map.of("input", List.of("input text"), "model", "my-model-id", "usage_context", "search"))); } } @@ -508,7 +526,7 @@ public void testChunkedInfer_PassesThrough() throws IOException { webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(eisGatewayUrl); + var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(eisGatewayUrl, "my-model-id"); PlainActionFuture> listener = new PlainActionFuture<>(); service.chunkedInfer( model, @@ -544,7 +562,7 @@ public void testChunkedInfer_PassesThrough() throws IOException { ); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - assertThat(requestMap, is(Map.of("input", List.of("input text"), "usage_context", "ingest"))); + assertThat(requestMap, is(Map.of("input", List.of("input text"), "model", "my-model-id", "usage_context", "ingest"))); } } @@ -795,7 +813,8 @@ public void testSupportedStreamingTasks_ReturnsChatCompletion_WhenAuthRespondsWi var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); try (var service = createServiceWithAuthHandler(senderFactory, getUrl(webServer))) { service.waitForAuthorizationToComplete(TIMEOUT); - assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.CHAT_COMPLETION, TaskType.ANY))); + assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.CHAT_COMPLETION))); + assertFalse(service.canStream(TaskType.ANY)); assertTrue(service.defaultConfigIds().isEmpty()); PlainActionFuture> listener = new PlainActionFuture<>(); @@ -915,13 +934,17 @@ public void testDefaultConfigs_Returns_DefaultChatCompletion_V1_WhenTaskTypeIsIn } } - public void testDefaultConfigs_Returns_DefaultChatCompletion_V1_WhenTaskTypeIsCorrect() throws Exception { + public void testDefaultConfigs_Returns_DefaultEndpoints_WhenTaskTypeIsCorrect() throws Exception { String responseJson = """ { "models": [ { "model_name": "rainbow-sprinkles", "task_types": ["chat"] + }, + { + "model_name": "elser-v2", + "task_types": ["embed/text/sparse"] } ] } @@ -932,20 +955,111 @@ public void testDefaultConfigs_Returns_DefaultChatCompletion_V1_WhenTaskTypeIsCo var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); try (var service = createServiceWithAuthHandler(senderFactory, getUrl(webServer))) { service.waitForAuthorizationToComplete(TIMEOUT); - assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.CHAT_COMPLETION, TaskType.ANY))); + assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.CHAT_COMPLETION))); + assertFalse(service.canStream(TaskType.ANY)); assertThat( service.defaultConfigIds(), is( List.of( + new InferenceService.DefaultConfigId(".elser-v2-elastic", MinimalServiceSettings.sparseEmbedding(), service), new InferenceService.DefaultConfigId(".rainbow-sprinkles-elastic", MinimalServiceSettings.chatCompletion(), service) ) ) ); - assertThat(service.supportedTaskTypes(), is(EnumSet.of(TaskType.CHAT_COMPLETION))); + assertThat(service.supportedTaskTypes(), is(EnumSet.of(TaskType.CHAT_COMPLETION, TaskType.SPARSE_EMBEDDING))); PlainActionFuture> listener = new PlainActionFuture<>(); service.defaultConfigs(listener); - assertThat(listener.actionGet(TIMEOUT).get(0).getConfigurations().getInferenceEntityId(), is(".rainbow-sprinkles-elastic")); + var models = listener.actionGet(TIMEOUT); + assertThat(models.size(), is(2)); + assertThat(models.get(0).getConfigurations().getInferenceEntityId(), is(".elser-v2-elastic")); + assertThat(models.get(1).getConfigurations().getInferenceEntityId(), is(".rainbow-sprinkles-elastic")); + } + } + + public void testUnifiedCompletionError() throws Exception { + testUnifiedStreamError(404, """ + { + "error": "The model `rainbow-sprinkles` does not exist or you do not have access to it." + }""", """ + {\ + "error":{\ + "code":"not_found",\ + "message":"Received an unsuccessful status code for request from inference entity id [id] status \ + [404]. Error message: [The model `rainbow-sprinkles` does not exist or you do not have access to it.]",\ + "type":"error"\ + }}"""); + } + + public void testUnifiedCompletionErrorMidStream() throws Exception { + testUnifiedStreamError(200, """ + data: { "error": "some error" } + + """, """ + {\ + "error":{\ + "code":"stream_error",\ + "message":"Received an error response for request from inference entity id [id]. Error message: [some error]",\ + "type":"error"\ + }}"""); + } + + public void testUnifiedCompletionMalformedError() throws Exception { + testUnifiedStreamError(200, """ + data: { i am not json } + + """, """ + {\ + "error":{\ + "code":"bad_request",\ + "message":"[1:3] Unexpected character ('i' (code 105)): was expecting double-quote to start field name\\n\ + at [Source: (String)\\"{ i am not json }\\"; line: 1, column: 3]",\ + "type":"x_content_parse_exception"\ + }}"""); + } + + private void testUnifiedStreamError(int responseCode, String responseJson, String expectedJson) throws Exception { + var eisGatewayUrl = getUrl(webServer); + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = createService(senderFactory, eisGatewayUrl)) { + webServer.enqueue(new MockResponse().setResponseCode(responseCode).setBody(responseJson)); + var model = new ElasticInferenceServiceCompletionModel( + "id", + TaskType.COMPLETION, + "elastic", + new ElasticInferenceServiceCompletionServiceSettings("model_id", new RateLimitSettings(100)), + EmptyTaskSettings.INSTANCE, + EmptySecretSettings.INSTANCE, + new ElasticInferenceServiceComponents(eisGatewayUrl) + ); + PlainActionFuture listener = new PlainActionFuture<>(); + service.unifiedCompletionInfer( + model, + UnifiedCompletionRequest.of( + List.of(new UnifiedCompletionRequest.Message(new UnifiedCompletionRequest.ContentString("hello"), "user", null, null)) + ), + InferenceAction.Request.DEFAULT_TIMEOUT, + listener + ); + + var result = listener.actionGet(TIMEOUT); + + InferenceEventsAssertion.assertThat(result).hasFinishedStream().hasNoEvents().hasErrorMatching(e -> { + e = unwrapCause(e); + assertThat(e, isA(UnifiedChatCompletionException.class)); + try (var builder = XContentFactory.jsonBuilder()) { + ((UnifiedChatCompletionException) e).toXContentChunked(EMPTY_PARAMS).forEachRemaining(xContent -> { + try { + xContent.toXContent(builder, EMPTY_PARAMS); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + }); + var json = XContentHelper.convertToJson(BytesReference.bytes(builder), false, builder.contentType()); + + assertThat(json, is(expectedJson)); + } + }); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettingsTests.java index 0f6386f670338..c530ff5c03482 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettingsTests.java @@ -53,7 +53,7 @@ public void testFromMap() { ConfigurationParseContext.REQUEST ); - assertThat(serviceSettings, is(new ElasticInferenceServiceCompletionServiceSettings(modelId, new RateLimitSettings(240L)))); + assertThat(serviceSettings, is(new ElasticInferenceServiceCompletionServiceSettings(modelId, new RateLimitSettings(720L)))); } public void testFromMap_MissingModelId_ThrowsException() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettingsTests.java index ebb9c964e4c9a..3b8ce3a7cc64c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettingsTests.java @@ -48,6 +48,7 @@ public static CustomElandInternalTextEmbeddingServiceSettings createRandom() { numThreads, modelId, null, + null, dims, similarityMeasure, elementType @@ -86,6 +87,7 @@ public void testFromMap_Request_CreatesSettingsCorrectly() { modelId, null, null, + null, SimilarityMeasure.DOT_PRODUCT, DenseVectorFieldMapper.ElementType.FLOAT ) @@ -111,6 +113,7 @@ public void testFromMap_Request_DoesNotDefaultSimilarityElementType() { modelId, null, null, + null, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT ) @@ -152,6 +155,7 @@ public void testFromMap_Request_IgnoresDimensions() { modelId, null, null, + null, SimilarityMeasure.DOT_PRODUCT, DenseVectorFieldMapper.ElementType.FLOAT ) @@ -192,6 +196,7 @@ public void testFromMap_Persistent_CreatesSettingsCorrectly() { numThreads, modelId, null, + null, 1, SimilarityMeasure.DOT_PRODUCT, DenseVectorFieldMapper.ElementType.FLOAT @@ -206,6 +211,7 @@ public void testToXContent_WritesAllValues() throws IOException { 1, "model_id", null, + null, 100, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.BYTE diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettingsTests.java index 0db0a7669c8aa..4ec575420613f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettingsTests.java @@ -103,7 +103,7 @@ public void testFromMap() { ) ) ).build(); - assertEquals(new ElasticsearchInternalServiceSettings(1, 4, ".elser_model_1", null), serviceSettings); + assertEquals(new ElasticsearchInternalServiceSettings(1, 4, ".elser_model_1", null, null), serviceSettings); } public void testFromMapMissingOptions() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 3b634f45dc751..d1ce79b863c61 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; @@ -46,12 +47,14 @@ import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingSparse; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceError; import org.elasticsearch.xpack.core.ml.MachineLearningField; +import org.elasticsearch.xpack.core.ml.action.GetDeploymentStatsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; +import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentStats; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResultsTests; @@ -67,13 +70,16 @@ import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.chunking.WordBoundaryChunkingSettings; import org.elasticsearch.xpack.inference.services.ServiceFields; +import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.List; @@ -81,12 +87,14 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.xpack.core.ml.action.GetTrainedModelsStatsAction.Response.RESULTS_FIELD; import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettingsMap; import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID; import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86; @@ -101,6 +109,8 @@ import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class ElasticsearchInternalServiceTests extends ESTestCase { @@ -351,7 +361,9 @@ public void testParseRequestConfig_elser() { ) ); - var elserServiceSettings = new ElserInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null); + var elserServiceSettings = new ElserInternalServiceSettings( + new ElasticsearchInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null, null) + ); service.parseRequestConfig( randomInferenceEntityId, @@ -381,7 +393,9 @@ public void testParseRequestConfig_elser() { ) ); - var elserServiceSettings = new ElserInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null); + var elserServiceSettings = new ElserInternalServiceSettings( + new ElasticsearchInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null, null) + ); String criticalWarning = "Putting elasticsearch service inference endpoints (including elser service) without a model_id field is" @@ -450,7 +464,9 @@ public void testParseRequestConfig_elser() { ); config.put(ModelConfigurations.CHUNKING_SETTINGS, createRandomChunkingSettingsMap()); - var elserServiceSettings = new ElserInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null); + var elserServiceSettings = new ElserInternalServiceSettings( + new ElasticsearchInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null, null) + ); service.parseRequestConfig( randomInferenceEntityId, @@ -486,7 +502,9 @@ public void testParseRequestConfig_elser() { ) ); - var elserServiceSettings = new ElserInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null); + var elserServiceSettings = new ElserInternalServiceSettings( + new ElasticsearchInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null, null) + ); service.parseRequestConfig( randomInferenceEntityId, @@ -742,7 +760,16 @@ public void testParsePersistedConfig() { TaskType.TEXT_EMBEDDING, settings ); - var elandServiceSettings = new CustomElandInternalTextEmbeddingServiceSettings(1, 4, "invalid", null); + var elandServiceSettings = new CustomElandInternalTextEmbeddingServiceSettings( + 1, + 4, + "invalid", + null, + null, + null, + SimilarityMeasure.COSINE, + DenseVectorFieldMapper.ElementType.FLOAT + ); assertEquals( new CustomElandEmbeddingModel( randomInferenceEntityId, @@ -933,7 +960,7 @@ private void testChunkInfer_Sparse(ChunkingSettings chunkingSettings) throws Int "foo", TaskType.SPARSE_EMBEDDING, "elasticsearch", - new ElasticsearchInternalServiceSettings(1, 1, "model-id", null), + new ElasticsearchInternalServiceSettings(1, 1, "model-id", null, null), chunkingSettings ); var service = createService(client); @@ -1003,7 +1030,7 @@ private void testChunkInfer_Elser(ChunkingSettings chunkingSettings) throws Inte "foo", TaskType.SPARSE_EMBEDDING, "elasticsearch", - new ElserInternalServiceSettings(1, 1, "model-id", null), + new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 1, "model-id", null, null)), new ElserMlNodeTaskSettings(), chunkingSettings ); @@ -1328,11 +1355,20 @@ private CustomElandModel getCustomElandModel(TaskType taskType) { randomInferenceEntityId, taskType, ElasticsearchInternalService.NAME, - new CustomElandInternalServiceSettings(1, 4, "custom-model", null), + new CustomElandInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 4, "custom-model", null, null)), RerankTaskSettings.DEFAULT_SETTINGS ); } else if (taskType == TaskType.TEXT_EMBEDDING) { - var serviceSettings = new CustomElandInternalTextEmbeddingServiceSettings(1, 4, "custom-model", null); + var serviceSettings = new CustomElandInternalTextEmbeddingServiceSettings( + 1, + 4, + "custom-model", + null, + null, + null, + SimilarityMeasure.COSINE, + DenseVectorFieldMapper.ElementType.FLOAT + ); expectedModel = new CustomElandEmbeddingModel( randomInferenceEntityId, @@ -1346,7 +1382,7 @@ private CustomElandModel getCustomElandModel(TaskType taskType) { randomInferenceEntityId, taskType, ElasticsearchInternalService.NAME, - new CustomElandInternalServiceSettings(1, 4, "custom-model", null), + new CustomElandInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 4, "custom-model", null, null)), (ChunkingSettings) null ); } @@ -1438,6 +1474,7 @@ public void testParseRequestConfigEland_SetsDimensionsToOne() { 4, "custom-model", null, + null, 1, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT @@ -1463,6 +1500,7 @@ public void testParseRequestConfigEland_SetsDimensionsToOne() { "custom-model", null, null, + null, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT ), @@ -1511,7 +1549,7 @@ public void testEmbeddingTypeFromTaskTypeAndSettings() { EmbeddingRequestChunker.EmbeddingType.SPARSE, ElasticsearchInternalService.embeddingTypeFromTaskTypeAndSettings( TaskType.SPARSE_EMBEDDING, - new ElasticsearchInternalServiceSettings(1, 1, "foo", null) + new ElasticsearchInternalServiceSettings(1, 1, "foo", null, null) ) ); assertEquals( @@ -1526,7 +1564,7 @@ public void testEmbeddingTypeFromTaskTypeAndSettings() { ElasticsearchStatusException.class, () -> ElasticsearchInternalService.embeddingTypeFromTaskTypeAndSettings( TaskType.COMPLETION, - new ElasticsearchInternalServiceSettings(1, 1, "foo", null) + new ElasticsearchInternalServiceSettings(1, 1, "foo", null, null) ) ); assertThat(e1.getMessage(), containsString("Chunking is not supported for task type [completion]")); @@ -1535,7 +1573,7 @@ public void testEmbeddingTypeFromTaskTypeAndSettings() { ElasticsearchStatusException.class, () -> ElasticsearchInternalService.embeddingTypeFromTaskTypeAndSettings( TaskType.RERANK, - new ElasticsearchInternalServiceSettings(1, 1, "foo", null) + new ElasticsearchInternalServiceSettings(1, 1, "foo", null, null) ) ); assertThat(e2.getMessage(), containsString("Chunking is not supported for task type [rerank]")); @@ -1604,6 +1642,209 @@ public void testGetConfiguration() throws Exception { } } + public void testUpdateModelsWithDynamicFields_NoModelsToUpdate() throws Exception { + ActionListener> resultsListener = ActionListener.>wrap( + updatedModels -> assertEquals(Collections.emptyList(), updatedModels), + e -> fail("Unexpected exception: " + e) + ); + + try (var service = createService(mock(Client.class))) { + service.updateModelsWithDynamicFields(List.of(), resultsListener); + } + } + + public void testUpdateModelsWithDynamicFields_InvalidModelProvided() throws IOException { + ActionListener> resultsListener = ActionListener.wrap( + updatedModels -> fail("Expected invalid model assertion error to be thrown"), + e -> fail("Expected invalid model assertion error to be thrown") + ); + + try (var service = createService(mock(Client.class))) { + assertThrows( + AssertionError.class, + () -> { service.updateModelsWithDynamicFields(List.of(mock(Model.class)), resultsListener); } + ); + } + } + + @SuppressWarnings("unchecked") + public void testUpdateModelsWithDynamicFields_FailsToRetrieveDeployments() throws IOException { + var deploymentId = randomAlphaOfLength(10); + var model = mock(ElasticsearchInternalModel.class); + when(model.mlNodeDeploymentId()).thenReturn(deploymentId); + when(model.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + + ActionListener> resultsListener = ActionListener.wrap(updatedModels -> { + assertEquals(updatedModels.size(), 1); + verify(model).mlNodeDeploymentId(); + verifyNoMoreInteractions(model); + }, e -> fail("Expected original models to be returned")); + + var client = mock(Client.class); + when(client.threadPool()).thenReturn(threadPool); + doAnswer(invocation -> { + var listener = (ActionListener) invocation.getArguments()[2]; + listener.onFailure(new RuntimeException(randomAlphaOfLength(10))); + return null; + }).when(client).execute(eq(GetDeploymentStatsAction.INSTANCE), any(), any()); + + try (var service = createService(client)) { + service.updateModelsWithDynamicFields(List.of(model), resultsListener); + } + } + + public void testUpdateModelsWithDynamicFields_SingleModelToUpdate() throws IOException { + var deploymentId = randomAlphaOfLength(10); + var model = mock(ElasticsearchInternalModel.class); + when(model.mlNodeDeploymentId()).thenReturn(deploymentId); + when(model.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + + var modelsByDeploymentId = new HashMap>(); + modelsByDeploymentId.put(deploymentId, List.of(model)); + + testUpdateModelsWithDynamicFields(modelsByDeploymentId); + } + + public void testUpdateModelsWithDynamicFields_MultipleModelsWithDifferentDeploymentsToUpdate() throws IOException { + var deploymentId1 = randomAlphaOfLength(10); + var model1 = mock(ElasticsearchInternalModel.class); + when(model1.mlNodeDeploymentId()).thenReturn(deploymentId1); + when(model1.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + var deploymentId2 = randomAlphaOfLength(10); + var model2 = mock(ElasticsearchInternalModel.class); + when(model2.mlNodeDeploymentId()).thenReturn(deploymentId2); + when(model2.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + + var modelsByDeploymentId = new HashMap>(); + modelsByDeploymentId.put(deploymentId1, List.of(model1)); + modelsByDeploymentId.put(deploymentId2, List.of(model2)); + + testUpdateModelsWithDynamicFields(modelsByDeploymentId); + } + + public void testUpdateModelsWithDynamicFields_MultipleModelsWithSameDeploymentsToUpdate() throws IOException { + var deploymentId = randomAlphaOfLength(10); + var model1 = mock(ElasticsearchInternalModel.class); + when(model1.mlNodeDeploymentId()).thenReturn(deploymentId); + when(model1.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + var model2 = mock(ElasticsearchInternalModel.class); + when(model2.mlNodeDeploymentId()).thenReturn(deploymentId); + when(model2.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); + + var modelsByDeploymentId = new HashMap>(); + modelsByDeploymentId.put(deploymentId, List.of(model1, model2)); + + testUpdateModelsWithDynamicFields(modelsByDeploymentId); + } + + @SuppressWarnings("unchecked") + private void testUpdateModelsWithDynamicFields(Map> modelsByDeploymentId) throws IOException { + var modelsToUpdate = new ArrayList(); + modelsByDeploymentId.values().forEach(modelsToUpdate::addAll); + + var updatedNumberOfAllocations = new HashMap(); + modelsByDeploymentId.keySet().forEach(deploymentId -> updatedNumberOfAllocations.put(deploymentId, randomIntBetween(1, 10))); + + ActionListener> resultsListener = ActionListener.wrap(updatedModels -> { + assertEquals(updatedModels.size(), modelsToUpdate.size()); + modelsByDeploymentId.forEach((deploymentId, models) -> { + var expectedNumberOfAllocations = updatedNumberOfAllocations.get(deploymentId); + models.forEach(model -> { + verify((ElasticsearchInternalModel) model).updateNumAllocations(expectedNumberOfAllocations); + verify((ElasticsearchInternalModel) model).mlNodeDeploymentId(); + verifyNoMoreInteractions(model); + }); + }); + }, e -> fail("Unexpected exception: " + e)); + + var client = mock(Client.class); + when(client.threadPool()).thenReturn(threadPool); + doAnswer(invocation -> { + var listener = (ActionListener) invocation.getArguments()[2]; + var mockAssignmentStats = new ArrayList(); + modelsByDeploymentId.keySet().forEach(deploymentId -> { + var mockAssignmentStatsForDeploymentId = mock(AssignmentStats.class); + when(mockAssignmentStatsForDeploymentId.getDeploymentId()).thenReturn(deploymentId); + when(mockAssignmentStatsForDeploymentId.getNumberOfAllocations()).thenReturn(updatedNumberOfAllocations.get(deploymentId)); + mockAssignmentStats.add(mockAssignmentStatsForDeploymentId); + }); + listener.onResponse( + new GetDeploymentStatsAction.Response( + Collections.emptyList(), + Collections.emptyList(), + mockAssignmentStats, + mockAssignmentStats.size() + ) + ); + return null; + }).when(client).execute(eq(GetDeploymentStatsAction.INSTANCE), any(), any()); + + try (var service = createService(client)) { + service.updateModelsWithDynamicFields(modelsToUpdate, resultsListener); + } + } + + public void testUpdateWithoutMlEnabled() throws IOException, InterruptedException { + var cs = mock(ClusterService.class); + var cSettings = new ClusterSettings(Settings.EMPTY, Set.of(MachineLearningField.MAX_LAZY_ML_NODES)); + when(cs.getClusterSettings()).thenReturn(cSettings); + var context = new InferenceServiceExtension.InferenceServiceFactoryContext( + mock(), + threadPool, + cs, + Settings.builder().put("xpack.ml.enabled", false).build() + ); + try (var service = new ElasticsearchInternalService(context)) { + var models = List.of(mock(Model.class)); + var latch = new CountDownLatch(1); + service.updateModelsWithDynamicFields(models, ActionTestUtils.assertNoFailureListener(r -> { + latch.countDown(); + assertThat(r, Matchers.sameInstance(models)); + })); + assertTrue(latch.await(30, TimeUnit.SECONDS)); + } + } + + public void testUpdateWithMlEnabled() throws IOException, InterruptedException { + var deploymentId = "deploymentId"; + var model = mock(ElasticsearchInternalModel.class); + when(model.mlNodeDeploymentId()).thenReturn(deploymentId); + + AssignmentStats stats = mock(); + when(stats.getDeploymentId()).thenReturn(deploymentId); + when(stats.getNumberOfAllocations()).thenReturn(3); + + var client = mock(Client.class); + doAnswer(ans -> { + QueryPage queryPage = new QueryPage<>(List.of(stats), 1, RESULTS_FIELD); + + GetDeploymentStatsAction.Response response = mock(); + when(response.getStats()).thenReturn(queryPage); + + ActionListener listener = ans.getArgument(2); + listener.onResponse(response); + return null; + }).when(client).execute(eq(GetDeploymentStatsAction.INSTANCE), any(), any()); + when(client.threadPool()).thenReturn(threadPool); + + var cs = mock(ClusterService.class); + var cSettings = new ClusterSettings(Settings.EMPTY, Set.of(MachineLearningField.MAX_LAZY_ML_NODES)); + when(cs.getClusterSettings()).thenReturn(cSettings); + var context = new InferenceServiceExtension.InferenceServiceFactoryContext( + client, + threadPool, + cs, + Settings.builder().put("xpack.ml.enabled", true).build() + ); + try (var service = new ElasticsearchInternalService(context)) { + List models = List.of(model); + var latch = new CountDownLatch(1); + service.updateModelsWithDynamicFields(models, ActionTestUtils.assertNoFailureListener(r -> latch.countDown())); + assertTrue(latch.await(30, TimeUnit.SECONDS)); + verify(model).updateNumAllocations(3); + } + } + private ElasticsearchInternalService createService(Client client) { var cs = mock(ClusterService.class); var cSettings = new ClusterSettings(Settings.EMPTY, Set.of(MachineLearningField.MAX_LAZY_ML_NODES)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java index 96cd42efa42f5..5b21717ac03e4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java @@ -16,7 +16,7 @@ public void testUpdateNumAllocation() { "foo", TaskType.SPARSE_EMBEDDING, ElasticsearchInternalService.NAME, - new ElserInternalServiceSettings(null, 1, "elser", null), + new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(null, 1, "elser", null, null)), new ElserMlNodeTaskSettings(), null ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettingsTests.java index f4e97b2c2e5e0..dd4513db0d50a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettingsTests.java @@ -24,12 +24,12 @@ public static ElserInternalServiceSettings createRandom() { public void testBwcWrite() throws IOException { { - var settings = new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 1, ".elser_model_1", null)); + var settings = new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 1, ".elser_model_1", null, null)); var copy = copyInstance(settings, TransportVersions.V_8_12_0); assertEquals(settings, copy); } { - var settings = new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 1, ".elser_model_1", null)); + var settings = new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 1, ".elser_model_1", null, null)); var copy = copyInstance(settings, TransportVersions.V_8_11_X); assertEquals(settings, copy); } @@ -53,6 +53,7 @@ protected ElserInternalServiceSettings mutateInstance(ElserInternalServiceSettin instance.getNumAllocations() == null ? 1 : instance.getNumAllocations() + 1, instance.getNumThreads(), instance.modelId(), + null, null ) ); @@ -61,6 +62,7 @@ protected ElserInternalServiceSettings mutateInstance(ElserInternalServiceSettin instance.getNumAllocations(), instance.getNumThreads() + 1, instance.modelId(), + null, null ) ); @@ -72,6 +74,7 @@ yield new ElserInternalServiceSettings( instance.getNumAllocations(), instance.getNumThreads(), versions.iterator().next(), + null, null ) ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java index 26dae5d172fb0..d0760a583df29 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java @@ -54,6 +54,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -1174,8 +1175,8 @@ public void testGetConfiguration() throws Exception { public void testSupportsStreaming() throws IOException { try (var service = new GoogleAiStudioService(mock(), createWithEmptySettings(mock()))) { - assertTrue(service.canStream(TaskType.COMPLETION)); - assertTrue(service.canStream(TaskType.ANY)); + assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.COMPLETION))); + assertFalse(service.canStream(TaskType.ANY)); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java index ff99101fc4ee5..99b7b3868b7f4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java @@ -50,6 +50,7 @@ import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModel; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModelTests; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; @@ -154,6 +155,42 @@ public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModel() throws IO } } + public void testParseRequestConfig_CreatesAIbmWatsonxRerankModel() throws IOException { + try (var service = createIbmWatsonxService()) { + ActionListener modelListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(IbmWatsonxRerankModel.class)); + + var rerankModel = (IbmWatsonxRerankModel) model; + assertThat(rerankModel.getServiceSettings().modelId(), is(modelId)); + assertThat(rerankModel.getServiceSettings().projectId(), is(projectId)); + assertThat(rerankModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(rerankModel.getSecretSettings().apiKey().toString(), is(apiKey)); + }, e -> fail("Model parsing should have succeeded, but failed: " + e.getMessage())); + + service.parseRequestConfig( + "id", + TaskType.RERANK, + getRequestConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + new HashMap<>(Map.of()), + getSecretSettingsMap(apiKey) + ), + modelListener + ); + } + } + public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createIbmWatsonxService()) { ActionListener modelListener = ActionListener.wrap(model -> { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModelTests.java new file mode 100644 index 0000000000000..0138952c11e07 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModelTests.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; + +import java.net.URI; + +public class IbmWatsonxRerankModelTests extends ESTestCase { + public static IbmWatsonxRerankModel createModel(String model, String projectId, URI uri, String apiVersion, String apiKey) { + return new IbmWatsonxRerankModel( + "id", + TaskType.RERANK, + "service", + new IbmWatsonxRerankServiceSettings(uri, apiVersion, model, projectId, null), + new IbmWatsonxRerankTaskSettings(2, true, 100), + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java index 5fa14da4ba733..2aeb0447f9c78 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java @@ -1831,33 +1831,53 @@ public void testDefaultSimilarity() { @SuppressWarnings("checkstyle:LineLength") public void testGetConfiguration() throws Exception { try (var service = createJinaAIService()) { - String content = XContentHelper.stripWhitespace(""" - { - "service": "jinaai", - "name": "Jina AI", - "task_types": ["text_embedding", "rerank"], - "configurations": { - "api_key": { - "description": "API Key for the provider you're connecting to.", - "label": "API Key", - "required": true, - "sensitive": true, - "updatable": true, - "type": "str", - "supported_task_types": ["text_embedding", "rerank"] - }, - "rate_limit.requests_per_minute": { - "description": "Minimize the number of rate limit errors.", - "label": "Rate Limit", - "required": false, - "sensitive": false, - "updatable": false, - "type": "int", - "supported_task_types": ["text_embedding", "rerank"] + String content = XContentHelper.stripWhitespace( + """ + { + "service": "jinaai", + "name": "Jina AI", + "task_types": ["text_embedding", "rerank"], + "configurations": { + "api_key": { + "description": "API Key for the provider you're connecting to.", + "label": "API Key", + "required": true, + "sensitive": true, + "updatable": true, + "type": "str", + "supported_task_types": ["text_embedding", "rerank"] + }, + "dimensions": { + "description": "The number of dimensions the resulting embeddings should have. For more information refer to https://api.jina.ai/redoc#tag/embeddings/operation/create_embedding_v1_embeddings_post.", + "label": "Dimensions", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding"] + }, + "model_id": { + "description": "The name of the model to use for the inference task.", + "label": "Model ID", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "rerank"] + }, + "rate_limit.requests_per_minute": { + "description": "Minimize the number of rate limit errors.", + "label": "Rate Limit", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding", "rerank"] + } } } - } - """); + """ + ); InferenceServiceConfiguration configuration = InferenceServiceConfiguration.fromXContentBytes( new BytesArray(content), XContentType.JSON diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index 6fddbf4450283..b31cdf4f9d592 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -33,9 +33,11 @@ import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbeddingFloat; +import org.elasticsearch.xpack.core.inference.results.UnifiedChatCompletionException; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; @@ -56,13 +58,16 @@ import java.io.IOException; import java.util.Arrays; +import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.ExceptionsHelper.unwrapCause; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; import static org.elasticsearch.xpack.inference.Utils.getRequestConfigMap; @@ -84,6 +89,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.isA; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -931,7 +937,7 @@ public void testInfer_ThrowsErrorWhenTaskTypeIsNotValid_ChatCompletion() throws "Inference entity [model_id] does not support task type [chat_completion] " + "for inference, the task type must be one of [text_embedding, completion]. " + "The task type for the inference entity is chat_completion, " - + "please use the _inference/chat_completion/model_id/_unified URL." + + "please use the _inference/chat_completion/model_id/_stream URL." ) ); @@ -1061,6 +1067,94 @@ public void testUnifiedCompletionInfer() throws Exception { } } + public void testUnifiedCompletionError() throws Exception { + String responseJson = """ + { + "error": { + "message": "The model `gpt-4awero` does not exist or you do not have access to it.", + "type": "invalid_request_error", + "param": null, + "code": "model_not_found" + } + }"""; + webServer.enqueue(new MockResponse().setResponseCode(404).setBody(responseJson)); + testStreamError(""" + {\ + "error":{\ + "code":"model_not_found",\ + "message":"Received an unsuccessful status code for request from inference entity id [id] status \ + [404]. Error message: [The model `gpt-4awero` does not exist or you do not have access to it.]",\ + "type":"invalid_request_error"\ + }}"""); + } + + private void testStreamError(String expectedResponse) throws Exception { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new OpenAiService(senderFactory, createWithEmptySettings(threadPool))) { + var model = OpenAiChatCompletionModelTests.createChatCompletionModel(getUrl(webServer), "org", "secret", "model", "user"); + PlainActionFuture listener = new PlainActionFuture<>(); + service.unifiedCompletionInfer( + model, + UnifiedCompletionRequest.of( + List.of(new UnifiedCompletionRequest.Message(new UnifiedCompletionRequest.ContentString("hello"), "user", null, null)) + ), + InferenceAction.Request.DEFAULT_TIMEOUT, + listener + ); + + var result = listener.actionGet(TIMEOUT); + + InferenceEventsAssertion.assertThat(result).hasFinishedStream().hasNoEvents().hasErrorMatching(e -> { + e = unwrapCause(e); + assertThat(e, isA(UnifiedChatCompletionException.class)); + try (var builder = XContentFactory.jsonBuilder()) { + ((UnifiedChatCompletionException) e).toXContentChunked(EMPTY_PARAMS).forEachRemaining(xContent -> { + try { + xContent.toXContent(builder, EMPTY_PARAMS); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + }); + var json = XContentHelper.convertToJson(BytesReference.bytes(builder), false, builder.contentType()); + + assertThat(json, is(expectedResponse)); + } + }); + } + } + + public void testMidStreamUnifiedCompletionError() throws Exception { + String responseJson = """ + event: error + data: { "error": { "message": "Timed out waiting for more data", "type": "timeout" } } + + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + testStreamError(""" + {\ + "error":{\ + "message":"Received an error response for request from inference entity id [id]. Error message: \ + [Timed out waiting for more data]",\ + "type":"timeout"\ + }}"""); + } + + public void testUnifiedCompletionMalformedError() throws Exception { + String responseJson = """ + data: { invalid json } + + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + testStreamError(""" + {\ + "error":{\ + "code":"bad_request",\ + "message":"[1:3] Unexpected character ('i' (code 105)): was expecting double-quote to start field name\\n\ + at [Source: (String)\\"{ invalid json }\\"; line: 1, column: 3]",\ + "type":"x_content_parse_exception"\ + }}"""); + } + public void testInfer_StreamRequest() throws Exception { String responseJson = """ data: {\ @@ -1133,8 +1227,8 @@ public void testInfer_StreamRequest_ErrorResponse() throws Exception { public void testSupportsStreaming() throws IOException { try (var service = new OpenAiService(mock(), createWithEmptySettings(mock()))) { - assertTrue(service.canStream(TaskType.COMPLETION)); - assertTrue(service.canStream(TaskType.ANY)); + assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.COMPLETION, TaskType.CHAT_COMPLETION))); + assertFalse(service.canStream(TaskType.ANY)); } } @@ -1752,6 +1846,15 @@ public void testGetConfiguration() throws Exception { "type": "str", "supported_task_types": ["text_embedding", "completion", "chat_completion"] }, + "dimensions": { + "description": "The number of dimensions the resulting embeddings should have. For more information refer to https://platform.openai.com/docs/api-reference/embeddings/create#embeddings-create-dimensions.", + "label": "Dimensions", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding"] + }, "organization_id": { "description": "The unique identifier of your organization.", "label": "Organization ID", @@ -1778,16 +1881,6 @@ public void testGetConfiguration() throws Exception { "updatable": false, "type": "str", "supported_task_types": ["text_embedding", "completion", "chat_completion"] - }, - "url": { - "default_value": "https://api.openai.com/v1/chat/completions", - "description": "The OpenAI API endpoint URL. For more information on the URL, refer to the https://platform.openai.com/docs/api-reference.", - "label": "URL", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion", "chat_completion"] } } } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference_bwc.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference_bwc.yml index 07341273151bc..5f87942b2c710 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference_bwc.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference_bwc.yml @@ -650,3 +650,28 @@ setup: - match: { hits.total.value: 1 } - match: { hits.total.relation: eq } - match: { hits.hits.0._source.dense_field.text: "updated text" } + +--- +"Skip fetching _inference_fields": + - requires: + cluster_features: semantic_text.skip_inference_fields + reason: Skip _inference_fields when search is performed on legacy semantic_text format. + + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "test value" + refresh: true + + - do: + search: + index: test-index + body: + fields: [ _inference_fields ] + query: + match_all: { } + + - match: { hits.total.value: 1 } + - not_exists: hits.hits.0._source._inference_fields diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/47_semantic_text_knn.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/47_semantic_text_knn.yml index dec4e127e501c..64ecb0f2d882c 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/47_semantic_text_knn.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/47_semantic_text_knn.yml @@ -43,6 +43,8 @@ setup: body: mappings: properties: + keyword_field: + type: keyword inference_field: type: semantic_text inference_id: dense-inference-id @@ -53,6 +55,8 @@ setup: body: mappings: properties: + keyword_field: + type: keyword inference_field: type: semantic_text inference_id: dense-inference-id-2 @@ -63,6 +67,8 @@ setup: body: mappings: properties: + keyword_field: + type: keyword inference_field: type: dense_vector dims: 10 @@ -74,6 +80,8 @@ setup: body: mappings: properties: + keyword_field: + type: keyword inference_field: type: dense_vector dims: 3 @@ -84,6 +92,7 @@ setup: index: test-semantic-text-index id: doc_1 body: + keyword_field: "foo" inference_field: [ "inference test", "another inference test" ] refresh: true @@ -92,6 +101,7 @@ setup: index: test-semantic-text-index-2 id: doc_2 body: + keyword_field: "bar" inference_field: [ "inference test", "another inference test" ] refresh: true @@ -100,6 +110,7 @@ setup: index: test-dense-vector-index id: doc_3 body: + keyword_field: "baz" inference_field: [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ] refresh: true @@ -108,6 +119,7 @@ setup: index: test-incompatible-dense-vector-index id: doc_4 body: + keyword_field: "qux" inference_field: [ 1, 2, 3 ] refresh: true @@ -311,6 +323,34 @@ setup: - match: { hits.total.value: 2 } +--- +"knn query respects filters": + - requires: + cluster_features: "search.semantic_knn_filter_fix" + reason: filters fixed in 8.18.0 + + - do: + search: + index: + - test-semantic-text-index + - test-semantic-text-index-2 + body: + query: + knn: + field: inference_field + k: 10 + num_candidates: 100 + query_vector_builder: + text_embedding: + model_text: test + filter: + term: + keyword_field: "foo" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + + --- "knn query against multiple semantic_text fields with multiple inference IDs specified in semantic_text fields with smaller k returns k for each index": diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter.yml index ca87c97fc3acd..7765795ebfbdc 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter.yml @@ -55,22 +55,32 @@ setup: index.mapping.semantic_text.use_legacy_format: false mappings: properties: + title: + type: text body: type: semantic_text inference_id: dense-inference-id ---- -"Highlighting using a sparse embedding model": - do: index: index: test-sparse-index id: doc_1 body: + title: "Elasticsearch" body: ["ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!"] refresh: true - - match: { result: created } + - do: + index: + index: test-dense-index + id: doc_1 + body: + title: "Elasticsearch" + body: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + refresh: true +--- +"Highlighting using a sparse embedding model": - do: search: index: test-sparse-index @@ -153,16 +163,6 @@ setup: --- "Highlighting using a dense embedding model": - - do: - index: - index: test-dense-index - id: doc_1 - body: - body: ["ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!"] - refresh: true - - - match: { result: created } - - do: search: index: test-dense-index @@ -243,4 +243,51 @@ setup: - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } - match: { hits.hits.0.highlight.body.1: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } +--- +"Default highlighter for fields": + - requires: + cluster_features: "semantic_text.highlighter.default" + reason: semantic text field defaults to the semantic highlighter + + - do: + search: + index: test-dense-index + body: + query: + match: + body: "What is Elasticsearch?" + highlight: + fields: + body: + order: "score" + number_of_fragments: 2 + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - length: { hits.hits.0.highlight.body: 2 } + - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } + - match: { hits.hits.0.highlight.body.1: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + +--- +"semantic highlighter ignores non-inference fields": + - requires: + cluster_features: "semantic_text.highlighter.default" + reason: semantic text field defaults to the semantic highlighter + + - do: + search: + index: test-dense-index + body: + query: + match: + title: "Elasticsearch" + highlight: + fields: + title: + type: semantic + number_of_fragments: 2 + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - not_exists: hits.hits.0.highlight.title diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeDisabledRestTestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeDisabledRestTestIT.java index 40aab696dc9c4..4ae1e9961a109 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeDisabledRestTestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeDisabledRestTestIT.java @@ -63,14 +63,14 @@ public void setup() throws Exception { private RestClient client; - public void testLogsSettingsIndexModeDisabled() throws IOException { + public void testLogsSettingsIndexModeEnabledByDefault() throws IOException { assertOK(createDataStream(client, "logs-custom-dev")); final String indexMode = (String) getSetting( client, getDataStreamBackingIndex(client, "logs-custom-dev", 0), IndexSettings.MODE.getKey() ); - assertThat(indexMode, Matchers.not(equalTo(IndexMode.LOGSDB.getName()))); + assertThat(indexMode, equalTo(IndexMode.LOGSDB.getName())); } public void testTogglingLogsdb() throws IOException { @@ -81,29 +81,21 @@ public void testTogglingLogsdb() throws IOException { getDataStreamBackingIndex(client, "logs-custom-dev", 0), IndexSettings.MODE.getKey() ); - assertThat(indexModeBefore, Matchers.not(equalTo(IndexMode.LOGSDB.getName()))); - assertOK(putClusterSetting(client, "cluster.logsdb.enabled", "true")); + assertThat(indexModeBefore, equalTo(IndexMode.LOGSDB.getName())); + assertOK(putClusterSetting(client, "cluster.logsdb.enabled", "false")); final String indexModeAfter = (String) getSetting( client, getDataStreamBackingIndex(client, "logs-custom-dev", 0), IndexSettings.MODE.getKey() ); - assertThat(indexModeAfter, Matchers.not(equalTo(IndexMode.LOGSDB.getName()))); + assertThat(indexModeAfter, equalTo(IndexMode.LOGSDB.getName())); assertOK(rolloverDataStream(client, "logs-custom-dev")); final String indexModeLater = (String) getSetting( client, getDataStreamBackingIndex(client, "logs-custom-dev", 1), IndexSettings.MODE.getKey() ); - assertThat(indexModeLater, equalTo(IndexMode.LOGSDB.getName())); - assertOK(putClusterSetting(client, "cluster.logsdb.enabled", "false")); - assertOK(rolloverDataStream(client, "logs-custom-dev")); - final String indexModeFinal = (String) getSetting( - client, - getDataStreamBackingIndex(client, "logs-custom-dev", 2), - IndexSettings.MODE.getKey() - ); - assertThat(indexModeFinal, Matchers.not(equalTo(IndexMode.LOGSDB.getName()))); + assertThat(indexModeLater, Matchers.not(equalTo(IndexMode.LOGSDB.getName()))); } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbSnapshotRestoreIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbSnapshotRestoreIT.java new file mode 100644 index 0000000000000..0b57d0ed8c4f0 --- /dev/null +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbSnapshotRestoreIT.java @@ -0,0 +1,372 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.FormatNames; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.xcontent.XContentType; +import org.junit.After; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.net.InetAddress; +import java.time.Instant; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasSize; + +public class LogsdbSnapshotRestoreIT extends ESRestTestCase { + + private static TemporaryFolder repoDirectory = new TemporaryFolder(); + + private static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("path.repo", () -> getRepoPath()) + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + // TODO: remove when initializing / serializing default SourceFieldMapper instance have been fixed: + // (SFM's mode attribute often gets initialized, even when mode attribute isn't set) + .jvmArg("-da:org.elasticsearch.index.mapper.DocumentMapper") + .jvmArg("-da:org.elasticsearch.index.mapper.MapperService") + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(repoDirectory).around(cluster); + + static final String LOGS_TEMPLATE = """ + { + "index_patterns": [ "logs-*-*" ], + "data_stream": {}, + "priority": 1000, + "template": { + "settings": { + "index": { + "mapping": { + "source":{ + "mode": "{{source_mode}}" + } + } + } + }, + "mappings": { + "properties": { + "@timestamp" : { + "type": "date" + }, + "host": { + "properties": { + "name": { + "type": "keyword" + } + } + }, + "pid": { + "type": "integer" + }, + "method": { + "type": "keyword" + }, + "message": { + "type": "text" + }, + "ip_address": { + "type": "ip" + }, + "my_object_array": { + "type": "{{array_type}}" + } + } + } + } + }"""; + + static final String DOC_TEMPLATE = """ + { + "@timestamp": "%s", + "host": { "name": "%s"}, + "pid": %d, + "method": "%s", + "message": "%s", + "ip_address": "%s", + "memory_usage_bytes": "%d", + "my_object_array": [ + { + "field_1": "a", + "field_2": "b" + }, + { + "field_1": "c", + "field_2": "d" + } + ] + } + """; + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public void testSnapshotRestore() throws Exception { + snapshotAndRestore("synthetic", "object", false); + } + + public void testSnapshotRestoreWithSourceOnlyRepository() throws Exception { + snapshotAndFail("object"); + } + + public void testSnapshotRestoreNested() throws Exception { + snapshotAndRestore("synthetic", "nested", false); + } + + public void testSnapshotRestoreNestedWithSourceOnlyRepository() throws Exception { + snapshotAndFail("nested"); + } + + public void testSnapshotRestoreStoredSource() throws Exception { + snapshotAndRestore("stored", "object", false); + } + + public void testSnapshotRestoreStoredSourceWithSourceOnlyRepository() throws Exception { + snapshotAndRestore("stored", "object", true); + } + + public void testSnapshotRestoreStoredSourceNested() throws Exception { + snapshotAndRestore("stored", "nested", false); + } + + public void testSnapshotRestoreStoredSourceNestedWithSourceOnlyRepository() throws Exception { + snapshotAndRestore("stored", "nested", true); + } + + @After + public void cleanup() throws Exception { + deleteSnapshot("my-repository", "my-snapshot", true); + deleteRepository("my-repository"); + deleteDataStream("logs-my-test"); + } + + static void snapshotAndRestore(String sourceMode, String arrayType, boolean sourceOnly) throws IOException { + String dataStreamName = "logs-my-test"; + String repositoryName = "my-repository"; + if (sourceOnly) { + var repositorySettings = Settings.builder().put("delegate_type", "fs").put("location", getRepoPath()).build(); + registerRepository(repositoryName, "source", true, repositorySettings); + } else { + var repositorySettings = Settings.builder().put("location", getRepoPath()).build(); + registerRepository(repositoryName, FsRepository.TYPE, true, repositorySettings); + } + + putTemplate("my-template", LOGS_TEMPLATE.replace("{{source_mode}}", sourceMode).replace("{{array_type}}", arrayType)); + String[] docs = new String[100]; + for (int i = 0; i < 100; i++) { + docs[i] = document( + Instant.now(), + String.format(Locale.ROOT, "host-%03d", i), + randomNonNegativeInt(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(32), + randomIp(randomBoolean()), + randomLongBetween(1_000_000L, 2_000_000L) + ); + indexDocument(dataStreamName, docs[i]); + } + refresh(dataStreamName); + assertDocCount(client(), dataStreamName, 100); + assertSource(dataStreamName, docs); + assertDataStream(dataStreamName, sourceMode); + + String snapshotName = "my-snapshot"; + var snapshotResponse = performSnapshot(repositoryName, dataStreamName, snapshotName, true); + assertOK(snapshotResponse); + var snapshotResponseBody = entityAsMap(snapshotResponse); + Map snapshotItem = (Map) snapshotResponseBody.get("snapshot"); + List failures = (List) snapshotItem.get("failures"); + assertThat(failures, empty()); + deleteDataStream(dataStreamName); + assertDocCount(dataStreamName, 0); + + restoreSnapshot(repositoryName, snapshotName, true); + assertDataStream(dataStreamName, sourceMode); + assertDocCount(dataStreamName, 100); + assertSource(dataStreamName, docs); + } + + static void snapshotAndFail(String arrayType) throws IOException { + String dataStreamName = "logs-my-test"; + String repositoryName = "my-repository"; + var repositorySettings = Settings.builder().put("delegate_type", "fs").put("location", getRepoPath()).build(); + registerRepository(repositoryName, "source", true, repositorySettings); + + putTemplate("my-template", LOGS_TEMPLATE.replace("{{source_mode}}", "synthetic").replace("{{array_type}}", arrayType)); + for (int i = 0; i < 100; i++) { + indexDocument( + dataStreamName, + document( + Instant.now(), + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(32), + randomIp(randomBoolean()), + randomIntBetween(1_000_000, 2_000_000) + ) + ); + } + refresh(dataStreamName); + assertDocCount(client(), dataStreamName, 100); + assertDataStream(dataStreamName, "synthetic"); + + String snapshotName = "my-snapshot"; + var snapshotResponse = performSnapshot(repositoryName, dataStreamName, snapshotName, true); + assertOK(snapshotResponse); + var snapshotResponseBody = entityAsMap(snapshotResponse); + Map snapshotItem = (Map) snapshotResponseBody.get("snapshot"); + List failures = (List) snapshotItem.get("failures"); + assertThat(failures, hasSize(1)); + Map failure = (Map) failures.get(0); + assertThat( + (String) failure.get("reason"), + containsString( + "Can't snapshot _source only on an index that has incomplete source ie. has _source disabled or filters the source" + ) + ); + } + + static void deleteDataStream(String dataStreamName) throws IOException { + assertOK(client().performRequest(new Request("DELETE", "/_data_stream/" + dataStreamName))); + } + + static void putTemplate(String templateName, String template) throws IOException { + final Request request = new Request("PUT", "/_index_template/" + templateName); + request.setJsonEntity(template); + assertOK(client().performRequest(request)); + } + + static void indexDocument(String indexOrtDataStream, String doc) throws IOException { + final Request request = new Request("POST", "/" + indexOrtDataStream + "/_doc?refresh=true"); + request.setJsonEntity(doc); + final Response response = client().performRequest(request); + assertOK(response); + assertThat(entityAsMap(response).get("result"), equalTo("created")); + } + + static String document( + final Instant timestamp, + final String hostname, + long pid, + final String method, + final String message, + final InetAddress ipAddress, + long memoryUsageBytes + ) { + return String.format( + Locale.ROOT, + DOC_TEMPLATE, + DateFormatter.forPattern(FormatNames.DATE_TIME.getName()).format(timestamp), + hostname, + pid, + method, + message, + InetAddresses.toAddrString(ipAddress), + memoryUsageBytes + ); + } + + static Response performSnapshot(String repository, String dataStreamName, String snapshot, boolean waitForCompletion) + throws IOException { + final Request request = new Request(HttpPut.METHOD_NAME, "_snapshot/" + repository + '/' + snapshot); + request.setJsonEntity(""" + { + "indices": "{{dataStreamName}}" + } + """.replace("{{dataStreamName}}", dataStreamName)); + request.addParameter("wait_for_completion", Boolean.toString(waitForCompletion)); + + return client().performRequest(request); + } + + static void assertDataStream(String dataStreamName, final String sourceMode) throws IOException { + String indexName = getWriteBackingIndex(dataStreamName, 0); + var flatSettings = (Map) ((Map) getIndexSettings(indexName).get(indexName)).get("settings"); + assertThat(flatSettings, hasEntry("index.mode", "logsdb")); + assertThat(flatSettings, hasEntry("index.mapping.source.mode", sourceMode)); + } + + static String getWriteBackingIndex(String dataStreamName, int backingIndex) throws IOException { + final Request request = new Request("GET", "_data_stream/" + dataStreamName); + final List dataStreams = (List) entityAsMap(client().performRequest(request)).get("data_streams"); + final Map dataStream = (Map) dataStreams.get(0); + final List backingIndices = (List) dataStream.get("indices"); + return (String) ((Map) backingIndices.get(backingIndex)).get("index_name"); + } + + static void assertDocCount(String indexName, long docCount) throws IOException { + Request countReq = new Request("GET", "/" + indexName + "/_count"); + countReq.addParameter("ignore_unavailable", "true"); + ObjectPath resp = ObjectPath.createFromResponse(client().performRequest(countReq)); + assertEquals( + "expected " + docCount + " documents but it was a different number", + docCount, + Long.parseLong(resp.evaluate("count").toString()) + ); + } + + static void assertSource(String indexName, String[] docs) throws IOException { + Request searchReq = new Request("GET", "/" + indexName + "/_search"); + searchReq.addParameter("size", String.valueOf(docs.length)); + var response = client().performRequest(searchReq); + assertOK(response); + var responseBody = entityAsMap(response); + List hits = (List) ((Map) responseBody.get("hits")).get("hits"); + assertThat(hits, hasSize(docs.length)); + for (Object hit : hits) { + Map actualSource = (Map) ((Map) hit).get("_source"); + String actualHost = (String) ((Map) actualSource.get("host")).get("name"); + Map expectedSource = null; + for (String doc : docs) { + expectedSource = XContentHelper.convertToMap(XContentType.JSON.xContent(), doc, false); + String expectedHost = (String) ((Map) expectedSource.get("host")).get("name"); + if (expectedHost.equals(actualHost)) { + break; + } + } + + assertMap(actualSource, matchesMap(expectedSource)); + } + } + + @SuppressForbidden(reason = "TemporaryFolder only has io.File methods, not nio.File") + private static String getRepoPath() { + return repoDirectory.getRoot().getPath(); + } + +} diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java index 8a5bb8d12cd3d..fb890f3ac7ae7 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java @@ -49,14 +49,14 @@ public DataGenerationHelper(Consumer builder "host.name", FieldType.KEYWORD, Map.of("type", "keyword"), - () -> ESTestCase.randomAlphaOfLength(5) + (ignored) -> ESTestCase.randomAlphaOfLength(5) ), // Needed for terms query new PredefinedField.WithGenerator( "method", FieldType.KEYWORD, Map.of("type", "keyword"), - () -> ESTestCase.randomFrom("put", "post", "get") + (ignored) -> ESTestCase.randomFrom("put", "post", "get") ), // Needed for histogram aggregation @@ -64,7 +64,7 @@ public DataGenerationHelper(Consumer builder "memory_usage_bytes", FieldType.LONG, Map.of("type", "long"), - () -> ESTestCase.randomLongBetween(1000, 2000) + (ignored) -> ESTestCase.randomLongBetween(1000, 2000) ) ) ); diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java index 4720ec87cb85c..455e707cc0d2c 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java @@ -11,10 +11,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; @@ -42,17 +40,21 @@ import java.util.function.Predicate; import java.util.function.Supplier; -import static org.elasticsearch.xpack.logsdb.LogsPatternUsageService.LOGSDB_PRIOR_LOGS_USAGE; -import static org.elasticsearch.xpack.logsdb.LogsPatternUsageService.USAGE_CHECK_MAX_PERIOD; import static org.elasticsearch.xpack.logsdb.SyntheticSourceLicenseService.FALLBACK_SETTING; public class LogsDBPlugin extends Plugin implements ActionPlugin { private final Settings settings; private final SyntheticSourceLicenseService licenseService; + private static final Setting LOGSDB_PRIOR_LOGS_USAGE = Setting.boolSetting( + "logsdb.prior_logs_usage", + false, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); public static final Setting CLUSTER_LOGSDB_ENABLED = Setting.boolSetting( "cluster.logsdb.enabled", - false, + settings -> Boolean.toString(LOGSDB_PRIOR_LOGS_USAGE.get(settings) == false), Setting.Property.Dynamic, Setting.Property.NodeScope ); @@ -81,18 +83,6 @@ public Collection createComponents(PluginServices services) { logsdbIndexModeSettingsProvider::updateClusterIndexModeLogsdbEnabled ); - var clusterService = services.clusterService(); - Supplier metadataSupplier = () -> clusterService.state().metadata(); - var historicLogsUsageService = new LogsPatternUsageService(services.client(), settings, services.threadPool(), metadataSupplier); - clusterService.addLocalNodeMasterListener(historicLogsUsageService); - clusterService.addLifecycleListener(new LifecycleListener() { - - @Override - public void beforeStop() { - historicLogsUsageService.offMaster(); - } - }); - // Nothing to share here: return super.createComponents(services); } @@ -112,7 +102,7 @@ public Collection getAdditionalIndexSettingProviders(Index @Override public List> getSettings() { - return List.of(FALLBACK_SETTING, CLUSTER_LOGSDB_ENABLED, USAGE_CHECK_MAX_PERIOD, LOGSDB_PRIOR_LOGS_USAGE); + return List.of(FALLBACK_SETTING, CLUSTER_LOGSDB_ENABLED, LOGSDB_PRIOR_LOGS_USAGE); } @Override diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageService.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageService.java deleted file mode 100644 index 929db16a618a0..0000000000000 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageService.java +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.logsdb; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.LocalNodeMasterListener; -import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; -import org.elasticsearch.threadpool.Scheduler; -import org.elasticsearch.threadpool.ThreadPool; - -import java.util.concurrent.TimeUnit; -import java.util.function.Supplier; - -import static org.elasticsearch.xpack.logsdb.LogsdbIndexModeSettingsProvider.LOGS_PATTERN; - -/** - * A component that checks in the background whether there are data streams that match log-*-* pattern and if so records this - * as persistent setting in cluster state. If logs-*-* data stream usage has been found then this component will no longer - * run in the background. - *

    - * After {@link #onMaster()} is invoked, the first check is scheduled to run after 1 minute. If no logs-*-* data streams are - * found, then the next check runs after 2 minutes. The schedule time will double if no data streams with logs-*-* pattern - * are found up until the maximum configured period in the {@link #USAGE_CHECK_MAX_PERIOD} setting (defaults to 24 hours). - *

    - * If during a check one or more logs-*-* data streams are found, then the {@link #LOGSDB_PRIOR_LOGS_USAGE} setting gets set - * as persistent cluster setting and this component will not schedule new checks. The mentioned setting is visible in persistent settings - * of cluster state and a signal that upon upgrading to 9.x logsdb will not be enabled by default for data streams matching the - * logs-*-* pattern. It isn't recommended to manually set the {@link #LOGSDB_PRIOR_LOGS_USAGE} setting. - */ -final class LogsPatternUsageService implements LocalNodeMasterListener { - - private static final Logger LOGGER = LogManager.getLogger(LogsPatternUsageService.class); - private static final TimeValue USAGE_CHECK_MINIMUM = TimeValue.timeValueSeconds(30); - static final Setting USAGE_CHECK_MAX_PERIOD = Setting.timeSetting( - "logsdb.usage_check.max_period", - new TimeValue(24, TimeUnit.HOURS), - Setting.Property.NodeScope - ); - static final Setting LOGSDB_PRIOR_LOGS_USAGE = Setting.boolSetting( - "logsdb.prior_logs_usage", - false, - Setting.Property.Dynamic, - Setting.Property.NodeScope - ); - - private final Client client; - private final Settings nodeSettings; - private final ThreadPool threadPool; - private final Supplier metadataSupplier; - - // Initializing to 30s, so first time will run with a delay of 60s: - volatile TimeValue nextWaitTime = USAGE_CHECK_MINIMUM; - volatile boolean isMaster; - volatile boolean hasPriorLogsUsage; - volatile Scheduler.Cancellable cancellable; - - LogsPatternUsageService(Client client, Settings nodeSettings, ThreadPool threadPool, Supplier metadataSupplier) { - this.client = client; - this.nodeSettings = nodeSettings; - this.threadPool = threadPool; - this.metadataSupplier = metadataSupplier; - } - - @Override - public void onMaster() { - if (cancellable == null || cancellable.isCancelled()) { - isMaster = true; - nextWaitTime = USAGE_CHECK_MINIMUM; - scheduleNext(); - } - } - - @Override - public void offMaster() { - isMaster = false; - if (cancellable != null && cancellable.isCancelled() == false) { - cancellable.cancel(); - cancellable = null; - } - } - - void scheduleNext() { - TimeValue maxWaitTime = USAGE_CHECK_MAX_PERIOD.get(nodeSettings); - nextWaitTime = TimeValue.timeValueMillis(Math.min(nextWaitTime.millis() * 2, maxWaitTime.millis())); - scheduleNext(nextWaitTime); - } - - void scheduleNext(TimeValue waitTime) { - if (isMaster && hasPriorLogsUsage == false) { - try { - cancellable = threadPool.schedule(this::check, waitTime, threadPool.generic()); - } catch (EsRejectedExecutionException e) { - if (e.isExecutorShutdown()) { - LOGGER.debug("Failed to check; Shutting down", e); - } else { - throw e; - } - } - } else { - LOGGER.debug("Skipping check, because [{}]/[{}]", isMaster, hasPriorLogsUsage); - } - } - - void check() { - LOGGER.debug("Starting logs-*-* usage check"); - if (isMaster) { - var metadata = metadataSupplier.get(); - if (LOGSDB_PRIOR_LOGS_USAGE.exists(metadata.persistentSettings())) { - LOGGER.debug("Using persistent logs-*-* usage check"); - hasPriorLogsUsage = true; - return; - } - - if (hasLogsUsage(metadata)) { - updateSetting(); - } else { - LOGGER.debug("No usage found; Skipping check"); - scheduleNext(); - } - } else { - LOGGER.debug("No longer master; Skipping check"); - } - } - - static boolean hasLogsUsage(Metadata metadata) { - for (var dataStream : metadata.dataStreams().values()) { - if (Regex.simpleMatch(LOGS_PATTERN, dataStream.getName())) { - return true; - } - } - return false; - } - - void updateSetting() { - var settingsToUpdate = Settings.builder().put(LOGSDB_PRIOR_LOGS_USAGE.getKey(), true).build(); - var request = new ClusterUpdateSettingsRequest(TimeValue.ONE_MINUTE, TimeValue.ONE_MINUTE); - request.persistentSettings(settingsToUpdate); - client.execute(ClusterUpdateSettingsAction.INSTANCE, request, ActionListener.wrap(resp -> { - if (resp.isAcknowledged() && LOGSDB_PRIOR_LOGS_USAGE.exists(resp.getPersistentSettings())) { - hasPriorLogsUsage = true; - cancellable = null; - } else { - LOGGER.debug(() -> "unexpected response [" + LOGSDB_PRIOR_LOGS_USAGE.getKey() + "]"); - scheduleNext(TimeValue.ONE_MINUTE); - } - }, e -> { - LOGGER.debug(() -> "Failed to update [" + LOGSDB_PRIOR_LOGS_USAGE.getKey() + "]", e); - scheduleNext(TimeValue.ONE_MINUTE); - })); - } -} diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceIntegrationTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceIntegrationTests.java deleted file mode 100644 index fcd1d311df802..0000000000000 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceIntegrationTests.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.logsdb; - -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsAction; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; -import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; -import org.elasticsearch.action.datastreams.DeleteDataStreamAction; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; -import org.elasticsearch.cluster.metadata.Template; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.datastreams.DataStreamsPlugin; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.threadpool.ThreadPoolStats; - -import java.util.Collection; -import java.util.List; - -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.nullValue; - -public class LogsPatternUsageServiceIntegrationTests extends ESSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return List.of(LogsDBPlugin.class, DataStreamsPlugin.class); - } - - @Override - protected Settings nodeSettings() { - return Settings.builder().put("logsdb.usage_check.max_period", "1s").build(); - } - - @Override - protected boolean resetNodeAfterTest() { - return true; - } - - public void testLogsPatternUsage() throws Exception { - var template = ComposableIndexTemplate.builder() - .indexPatterns(List.of("logs-*-*")) - .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) - .build(); - assertAcked( - client().execute( - TransportPutComposableIndexTemplateAction.TYPE, - new TransportPutComposableIndexTemplateAction.Request("1").indexTemplate(template) - ).actionGet() - ); - - IndexRequest indexRequest = new IndexRequest("my-index").create(true).source("field", "value"); - var indexResponse = client().index(indexRequest).actionGet(); - assertThat(indexResponse.getResult(), equalTo(DocWriteResponse.Result.CREATED)); - - { - var response = client().execute(ClusterGetSettingsAction.INSTANCE, new ClusterGetSettingsAction.Request(TimeValue.ONE_MINUTE)) - .actionGet(); - assertThat(response.persistentSettings().get("logsdb.prior_logs_usage"), nullValue()); - } - - indexRequest = new IndexRequest("logs-myapp-prod").create(true).source("@timestamp", "2000-01-01T00:00"); - indexResponse = client().index(indexRequest).actionGet(); - assertThat(indexResponse.getResult(), equalTo(DocWriteResponse.Result.CREATED)); - - assertBusy(() -> { - var response = client().execute(ClusterGetSettingsAction.INSTANCE, new ClusterGetSettingsAction.Request(TimeValue.ONE_MINUTE)) - .actionGet(); - assertThat(response.persistentSettings().get("logsdb.prior_logs_usage"), equalTo("true")); - }); - } - - public void testLogsPatternUsageNoLogsStarDashStarUsage() throws Exception { - var template = ComposableIndexTemplate.builder() - .indexPatterns(List.of("log-*-*")) - .template(new Template(Settings.builder().put("index.number_of_replicas", 0).build(), null, null)) - .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) - .build(); - assertAcked( - client().execute( - TransportPutComposableIndexTemplateAction.TYPE, - new TransportPutComposableIndexTemplateAction.Request("1").indexTemplate(template) - ).actionGet() - ); - - var indexRequest = new IndexRequest("log-myapp-prod").create(true).source("@timestamp", "2000-01-01T00:00"); - var indexResponse = client().index(indexRequest).actionGet(); - assertThat(indexResponse.getResult(), equalTo(DocWriteResponse.Result.CREATED)); - - ensureGreen("log-myapp-prod"); - // Check that LogsPatternUsageService checked three times by checking generic threadpool stats. - // (the LogsPatternUsageService's check is scheduled via the generic threadpool) - var threadPool = getInstanceFromNode(ThreadPool.class); - var beforeStat = getGenericThreadpoolStat(threadPool); - assertBusy(() -> { - var stat = getGenericThreadpoolStat(threadPool); - assertThat(stat.completed(), greaterThanOrEqualTo(beforeStat.completed() + 3)); - }); - var response = client().execute(ClusterGetSettingsAction.INSTANCE, new ClusterGetSettingsAction.Request(TimeValue.ONE_MINUTE)) - .actionGet(); - assertThat(response.persistentSettings().get("logsdb.prior_logs_usage"), nullValue()); - } - - private static ThreadPoolStats.Stats getGenericThreadpoolStat(ThreadPool threadPool) { - var result = threadPool.stats().stats().stream().filter(stats -> stats.name().equals(ThreadPool.Names.GENERIC)).toList(); - assertThat(result.size(), equalTo(1)); - return result.get(0); - } - - @Override - public void tearDown() throws Exception { - // Need to clean up the data stream and logsdb.prior_logs_usage setting because ESSingleNodeTestCase tests aren't allowed to leave - // persistent cluster settings around. - - var deleteDataStreamsRequest = new DeleteDataStreamAction.Request(TEST_REQUEST_TIMEOUT, "*"); - deleteDataStreamsRequest.indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN); - assertAcked(client().execute(DeleteDataStreamAction.INSTANCE, deleteDataStreamsRequest)); - - var settings = Settings.builder().put("logsdb.prior_logs_usage", (String) null).build(); - client().admin() - .cluster() - .updateSettings(new ClusterUpdateSettingsRequest(TimeValue.ONE_MINUTE, TimeValue.ONE_MINUTE).persistentSettings(settings)) - .actionGet(); - - super.tearDown(); - } -} diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceTests.java deleted file mode 100644 index 2cd2f9216aba3..0000000000000 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceTests.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.logsdb; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.DataStreamTestHelper; -import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.Scheduler; -import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; - -import java.util.List; -import java.util.function.Supplier; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.same; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoInteractions; -import static org.mockito.Mockito.when; - -public class LogsPatternUsageServiceTests extends ESTestCase { - - public void testOnMaster() throws Exception { - var nodeSettings = Settings.builder().put("logsdb.usage_check.max_period", "1s").build(); - var client = mock(Client.class); - doAnswer(invocationOnMock -> { - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock - .getArguments()[2]; - var persistentSettings = Settings.builder().put("logsdb.prior_logs_usage", true).build(); - listener.onResponse(new ClusterUpdateSettingsResponse(true, Settings.EMPTY, persistentSettings)); - return null; - }).when(client).execute(same(ClusterUpdateSettingsAction.INSTANCE), any(), any()); - - try (var threadPool = new TestThreadPool(getTestName())) { - var clusterState = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("logs-app1-prod", 1)), List.of()); - Supplier metadataSupplier = clusterState::metadata; - - var service = new LogsPatternUsageService(client, nodeSettings, threadPool, metadataSupplier); - // pre-check: - assertFalse(service.isMaster); - assertFalse(service.hasPriorLogsUsage); - assertNull(service.cancellable); - // Trigger service: - service.onMaster(); - assertBusy(() -> { - assertTrue(service.isMaster); - assertTrue(service.hasPriorLogsUsage); - assertNull(service.cancellable); - }); - } - } - - public void testCheckHasUsage() { - var nodeSettings = Settings.EMPTY; - var client = mock(Client.class); - doAnswer(invocationOnMock -> { - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock - .getArguments()[2]; - var persistentSettings = Settings.builder().put("logsdb.prior_logs_usage", true).build(); - listener.onResponse(new ClusterUpdateSettingsResponse(true, Settings.EMPTY, persistentSettings)); - return null; - }).when(client).execute(same(ClusterUpdateSettingsAction.INSTANCE), any(), any()); - - var threadPool = mock(ThreadPool.class); - var scheduledCancellable = mock(Scheduler.ScheduledCancellable.class); - when(threadPool.schedule(any(), any(), any())).thenReturn(scheduledCancellable); - var clusterState = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("logs-app1-prod", 1)), List.of()); - Supplier metadataSupplier = clusterState::metadata; - - LogsPatternUsageService service = new LogsPatternUsageService(client, nodeSettings, threadPool, metadataSupplier); - service.onMaster(); - assertFalse(service.hasPriorLogsUsage); - assertNotNull(service.cancellable); - assertEquals(service.nextWaitTime, TimeValue.timeValueMinutes(1)); - service.check(); - assertTrue(service.hasPriorLogsUsage); - assertNull(service.cancellable); - assertEquals(service.nextWaitTime, TimeValue.timeValueMinutes(1)); - - verify(threadPool, times(1)).schedule(any(), any(), any()); - verify(client, times(1)).execute(same(ClusterUpdateSettingsAction.INSTANCE), any(), any()); - } - - public void testCheckHasUsageNoMatch() { - var nodeSettings = Settings.EMPTY; - var client = mock(Client.class); - - var threadPool = mock(ThreadPool.class); - var scheduledCancellable = mock(Scheduler.ScheduledCancellable.class); - when(threadPool.schedule(any(), any(), any())).thenReturn(scheduledCancellable); - var clusterState = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("log-app1-prod", 1)), List.of()); - Supplier metadataSupplier = clusterState::metadata; - - LogsPatternUsageService service = new LogsPatternUsageService(client, nodeSettings, threadPool, metadataSupplier); - service.onMaster(); - assertFalse(service.hasPriorLogsUsage); - assertNotNull(service.cancellable); - assertEquals(service.nextWaitTime, TimeValue.timeValueMinutes(1)); - service.check(); - assertFalse(service.hasPriorLogsUsage); - assertNotNull(service.cancellable); - assertEquals(service.nextWaitTime, TimeValue.timeValueMinutes(2)); - - verify(threadPool, times(2)).schedule(any(), any(), any()); - verifyNoInteractions(client); - } - - public void testCheckPriorLogsUsageAlreadySet() { - var nodeSettings = Settings.EMPTY; - var client = mock(Client.class); - - var threadPool = mock(ThreadPool.class); - var scheduledCancellable = mock(Scheduler.ScheduledCancellable.class); - when(threadPool.schedule(any(), any(), any())).thenReturn(scheduledCancellable); - var clusterState = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("log-app1-prod", 1)), List.of()); - clusterState = ClusterState.builder(clusterState) - .metadata( - Metadata.builder(clusterState.getMetadata()) - .persistentSettings(Settings.builder().put("logsdb.prior_logs_usage", true).build()) - .build() - ) - .build(); - Supplier metadataSupplier = clusterState::metadata; - - LogsPatternUsageService service = new LogsPatternUsageService(client, nodeSettings, threadPool, metadataSupplier); - service.isMaster = true; - assertFalse(service.hasPriorLogsUsage); - assertNull(service.cancellable); - service.check(); - assertTrue(service.hasPriorLogsUsage); - assertNull(service.cancellable); - - verifyNoInteractions(client, threadPool); - } - - public void testCheckHasUsageUnexpectedResponse() { - var nodeSettings = Settings.EMPTY; - var client = mock(Client.class); - doAnswer(invocationOnMock -> { - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock - .getArguments()[2]; - ClusterUpdateSettingsResponse response; - if (randomBoolean()) { - var persistentSettings = Settings.builder().put("logsdb.prior_logs_usage", true).build(); - response = new ClusterUpdateSettingsResponse(false, Settings.EMPTY, persistentSettings); - } else { - response = new ClusterUpdateSettingsResponse(true, Settings.EMPTY, Settings.EMPTY); - } - listener.onResponse(response); - return null; - }).when(client).execute(same(ClusterUpdateSettingsAction.INSTANCE), any(), any()); - - var threadPool = mock(ThreadPool.class); - var scheduledCancellable = mock(Scheduler.ScheduledCancellable.class); - when(threadPool.schedule(any(), any(), any())).thenReturn(scheduledCancellable); - var clusterState = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("logs-app1-prod", 1)), List.of()); - Supplier metadataSupplier = clusterState::metadata; - - LogsPatternUsageService service = new LogsPatternUsageService(client, nodeSettings, threadPool, metadataSupplier); - service.isMaster = true; - assertFalse(service.hasPriorLogsUsage); - assertNull(service.cancellable); - service.check(); - assertFalse(service.hasPriorLogsUsage); - assertNotNull(service.cancellable); - - verify(threadPool, times(1)).schedule(any(), any(), any()); - verify(client, times(1)).execute(same(ClusterUpdateSettingsAction.INSTANCE), any(), any()); - } - - public void testHasLogsUsage() { - var metadata = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(), List.of()).getMetadata(); - assertFalse(LogsPatternUsageService.hasLogsUsage(metadata)); - metadata = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("log-app1", 1)), List.of()).getMetadata(); - assertFalse(LogsPatternUsageService.hasLogsUsage(metadata)); - metadata = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("logs-app1", 1)), List.of()).getMetadata(); - assertFalse(LogsPatternUsageService.hasLogsUsage(metadata)); - metadata = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("log-app1-prod", 1)), List.of()).getMetadata(); - assertFalse(LogsPatternUsageService.hasLogsUsage(metadata)); - metadata = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("logs-app1-prod", 1)), List.of()).getMetadata(); - assertTrue(LogsPatternUsageService.hasLogsUsage(metadata)); - metadata = DataStreamTestHelper.getClusterStateWithDataStreams( - List.of(new Tuple<>("log-app1-prod", 1), new Tuple<>("logs-app2-prod", 1)), - List.of() - ).getMetadata(); - assertTrue(LogsPatternUsageService.hasLogsUsage(metadata)); - metadata = DataStreamTestHelper.getClusterStateWithDataStreams( - List.of(new Tuple<>("log-app1", 1), new Tuple<>("logs-app2-prod", 1)), - List.of() - ).getMetadata(); - assertTrue(LogsPatternUsageService.hasLogsUsage(metadata)); - } - -} diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbTestSuiteIT.java b/x-pack/plugin/logsdb/src/yamlRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbTestSuiteIT.java index fcac791cb2057..a683305c9fde3 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbTestSuiteIT.java +++ b/x-pack/plugin/logsdb/src/yamlRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbTestSuiteIT.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; @@ -23,6 +24,7 @@ public class LogsdbTestSuiteIT extends ESClientYamlSuiteTestCase { .distribution(DistributionType.DEFAULT) .setting("xpack.security.enabled", "false") .setting("xpack.license.self_generated.type", "trial") + .feature(FeatureFlag.INDEX_RECOVERY_USE_SYNTHETIC_SOURCE) .build(); public LogsdbTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/60_synthetic_source_recovery.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/60_synthetic_source_recovery.yml new file mode 100644 index 0000000000000..cc2216997c6d3 --- /dev/null +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/60_synthetic_source_recovery.yml @@ -0,0 +1,261 @@ +--- +synthetic recovery for synthetic source mode index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_synthetic_recovery + body: + settings: + index: + mapping.source.mode: synthetic + + - do: + indices.get_settings: + index: test_synthetic_recovery + include_defaults: true + + - match: { test_synthetic_recovery.settings.index.mapping.source.mode: synthetic } + - match: { test_synthetic_recovery.defaults.index.recovery.use_synthetic_source: "true" } + +--- +synthetic recovery for stored source mode index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_stored_recovery + body: + settings: + index: + mapping.source.mode: stored + + - do: + indices.get_settings: + index: test_stored_recovery + include_defaults: true + + - match: { test_stored_recovery.settings.index.mapping.source.mode: stored } + - match: { test_stored_recovery.defaults.index.recovery.use_synthetic_source: "false" } + +--- +synthetic recovery for disabled source mode index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_disabled_recovery + body: + settings: + index: + mapping.source.mode: disabled + + - do: + indices.get_settings: + index: test_disabled_recovery + include_defaults: true + + - match: { test_disabled_recovery.settings.index.mapping.source.mode: disabled } + - match: { test_disabled_recovery.defaults.index.recovery.use_synthetic_source: "false" } + +--- +synthetic recovery for standard index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_standard_index_recovery + body: + settings: + index: + mode: standard + + - do: + indices.get_settings: + index: test_standard_index_recovery + include_defaults: true + + - match: { test_standard_index_recovery.defaults.index.recovery.use_synthetic_source: "false" } + +--- +synthetic recovery for logsdb index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_logsdb_index_recovery + body: + settings: + index: + mode: logsdb + + - do: + indices.get_settings: + index: test_logsdb_index_recovery + include_defaults: true + + - match: { test_logsdb_index_recovery.defaults.index.recovery.use_synthetic_source: "true" } + +--- +synthetic recovery for time_series index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_time_series_index_recovery + body: + settings: + index: + mode: time_series + routing_path: [ keyword ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + keyword: + type: keyword + time_series_dimension: true + + - do: + indices.get_settings: + index: test_time_series_index_recovery + include_defaults: true + + - match: { test_time_series_index_recovery.defaults.index.recovery.use_synthetic_source: "true" } + +--- +override synthetic recovery for synthetic source mode index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_synthetic_recovery_override + body: + settings: + index: + mapping.source.mode: synthetic + recovery.use_synthetic_source: false + + - do: + indices.get_settings: + index: test_synthetic_recovery_override + include_defaults: true + + - match: { test_synthetic_recovery_override.settings.index.mapping.source.mode: synthetic } + - match: { test_synthetic_recovery_override.settings.index.recovery.use_synthetic_source: "false" } + +--- +override synthetic recovery for stored source mode index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + catch: bad_request + indices.create: + index: test_stored_recovery_override + body: + settings: + index: + mapping.source.mode: stored + recovery.use_synthetic_source: true + +--- +override synthetic recovery for disabled source mode index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + catch: bad_request + indices.create: + index: test_disabled_recovery_override + body: + settings: + index: + mapping.source.mode: disabled + recovery.use_synthetic_source: true + +--- +override synthetic recovery for standard index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + catch: bad_request + indices.create: + index: test_standard_index_recovery_override + body: + settings: + index: + mode: standard + recovery.use_synthetic_source: true + +--- +override synthetic recovery for logsdb index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_logsdb_index_recovery_override + body: + settings: + index: + mode: logsdb + recovery.use_synthetic_source: false + + - do: + indices.get_settings: + index: test_logsdb_index_recovery_override + include_defaults: true + + - match: { test_logsdb_index_recovery_override.settings.index.recovery.use_synthetic_source: "false" } + +--- +override synthetic recovery for time_series index: + - requires: + cluster_features: [ "mapper.synthetic_recovery_source" ] + reason: requires synthetic recovery source + + - do: + indices.create: + index: test_time_series_index_recovery_override + body: + settings: + index: + mode: time_series + recovery.use_synthetic_source: false + routing_path: [ keyword ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + keyword: + type: keyword + time_series_dimension: true + + - do: + indices.get_settings: + index: test_time_series_index_recovery_override + include_defaults: true + + - match: { test_time_series_index_recovery_override.settings.index.recovery.use_synthetic_source: "false" } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/AggregateMetricMapperPlugin.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/AggregateMetricMapperPlugin.java index fea55e793d638..9320df583b4c5 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/AggregateMetricMapperPlugin.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/AggregateMetricMapperPlugin.java @@ -17,7 +17,7 @@ import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.xpack.aggregatemetric.aggregations.metrics.AggregateMetricsAggregatorsRegistrar; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; @@ -32,7 +32,7 @@ public class AggregateMetricMapperPlugin extends Plugin implements MapperPlugin, @Override public Map getMappers() { - return singletonMap(AggregateDoubleMetricFieldMapper.CONTENT_TYPE, AggregateDoubleMetricFieldMapper.PARSER); + return singletonMap(AggregateMetricDoubleFieldMapper.CONTENT_TYPE, AggregateMetricDoubleFieldMapper.PARSER); } @Override diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregator.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregator.java index add4fb3e5d2db..a12d476af3ac1 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregator.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregator.java @@ -24,14 +24,14 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSource; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.Map; class AggregateMetricBackedAvgAggregator extends NumericMetricsAggregator.SingleValue { - final AggregateMetricsValuesSource.AggregateDoubleMetric valuesSource; + final AggregateMetricsValuesSource.AggregateMetricDouble valuesSource; LongArray counts; DoubleArray sums; @@ -47,7 +47,7 @@ class AggregateMetricBackedAvgAggregator extends NumericMetricsAggregator.Single ) throws IOException { super(name, context, parent, metadata); assert valuesSourceConfig.hasValues(); - this.valuesSource = (AggregateMetricsValuesSource.AggregateDoubleMetric) valuesSourceConfig.getValuesSource(); + this.valuesSource = (AggregateMetricsValuesSource.AggregateMetricDouble) valuesSourceConfig.getValuesSource(); final BigArrays bigArrays = context.bigArrays(); counts = bigArrays.newLongArray(1, true); sums = bigArrays.newDoubleArray(1, true); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregator.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregator.java index dd485ec218371..a007f334a69e2 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregator.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregator.java @@ -24,14 +24,14 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSource; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.Map; class AggregateMetricBackedMaxAggregator extends NumericMetricsAggregator.SingleValue { - private final AggregateMetricsValuesSource.AggregateDoubleMetric valuesSource; + private final AggregateMetricsValuesSource.AggregateMetricDouble valuesSource; final DocValueFormat formatter; DoubleArray maxes; @@ -44,7 +44,7 @@ class AggregateMetricBackedMaxAggregator extends NumericMetricsAggregator.Single ) throws IOException { super(name, context, parent, metadata); assert config.hasValues(); - this.valuesSource = (AggregateMetricsValuesSource.AggregateDoubleMetric) config.getValuesSource(); + this.valuesSource = (AggregateMetricsValuesSource.AggregateMetricDouble) config.getValuesSource(); maxes = context.bigArrays().newDoubleArray(1, false); maxes.fill(0, maxes.size(), Double.NEGATIVE_INFINITY); this.formatter = config.format(); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregator.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregator.java index 5a70801b8ae76..3b024c512aa82 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregator.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregator.java @@ -24,14 +24,14 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSource; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.Map; class AggregateMetricBackedMinAggregator extends NumericMetricsAggregator.SingleValue { - private final AggregateMetricsValuesSource.AggregateDoubleMetric valuesSource; + private final AggregateMetricsValuesSource.AggregateMetricDouble valuesSource; final DocValueFormat format; DoubleArray mins; @@ -44,7 +44,7 @@ class AggregateMetricBackedMinAggregator extends NumericMetricsAggregator.Single ) throws IOException { super(name, context, parent, metadata); assert config.hasValues(); - this.valuesSource = (AggregateMetricsValuesSource.AggregateDoubleMetric) config.getValuesSource(); + this.valuesSource = (AggregateMetricsValuesSource.AggregateMetricDouble) config.getValuesSource(); mins = context.bigArrays().newDoubleArray(1, false); mins.fill(0, mins.size(), Double.POSITIVE_INFINITY); this.format = config.format(); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregator.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregator.java index f4c28d7381214..480590b359bd3 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregator.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregator.java @@ -23,14 +23,14 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSource; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.Map; class AggregateMetricBackedSumAggregator extends NumericMetricsAggregator.SingleValue { - private final AggregateMetricsValuesSource.AggregateDoubleMetric valuesSource; + private final AggregateMetricsValuesSource.AggregateMetricDouble valuesSource; private final DocValueFormat format; private DoubleArray sums; @@ -45,7 +45,7 @@ class AggregateMetricBackedSumAggregator extends NumericMetricsAggregator.Single ) throws IOException { super(name, context, parent, metadata); assert valuesSourceConfig.hasValues(); - this.valuesSource = (AggregateMetricsValuesSource.AggregateDoubleMetric) valuesSourceConfig.getValuesSource(); + this.valuesSource = (AggregateMetricsValuesSource.AggregateMetricDouble) valuesSourceConfig.getValuesSource(); sums = context.bigArrays().newDoubleArray(1, true); compensations = context.bigArrays().newDoubleArray(1, true); this.format = valuesSourceConfig.format(); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregator.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregator.java index 065a5411b0bcb..49b3fd8846f9c 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregator.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregator.java @@ -20,7 +20,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSource; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper; import java.io.IOException; import java.util.Map; @@ -32,7 +32,7 @@ */ class AggregateMetricBackedValueCountAggregator extends NumericMetricsAggregator.SingleValue { - private final AggregateMetricsValuesSource.AggregateDoubleMetric valuesSource; + private final AggregateMetricsValuesSource.AggregateMetricDouble valuesSource; // a count per bucket LongArray counts; @@ -46,7 +46,7 @@ class AggregateMetricBackedValueCountAggregator extends NumericMetricsAggregator ) throws IOException { super(name, aggregationContext, parent, metadata); assert valuesSourceConfig.hasValues(); - this.valuesSource = (AggregateMetricsValuesSource.AggregateDoubleMetric) valuesSourceConfig.getValuesSource(); + this.valuesSource = (AggregateMetricsValuesSource.AggregateMetricDouble) valuesSourceConfig.getValuesSource(); counts = bigArrays().newLongArray(1, true); } @@ -55,7 +55,7 @@ public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, final BigArrays bigArrays = bigArrays(); final SortedNumericDoubleValues values = valuesSource.getAggregateMetricValues( aggCtx.getLeafReaderContext(), - AggregateDoubleMetricFieldMapper.Metric.value_count + AggregateMetricDoubleFieldMapper.Metric.value_count ); return new LeafBucketCollectorBase(sub, values) { diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSource.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSource.java index a964573dbb5d9..c4a9c37fcf380 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSource.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSource.java @@ -13,23 +13,23 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.aggregations.AggregationErrors; import org.elasticsearch.search.aggregations.support.AggregationContext; -import org.elasticsearch.xpack.aggregatemetric.fielddata.IndexAggregateDoubleMetricFieldData; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.fielddata.IndexAggregateMetricDoubleFieldData; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.function.Function; public class AggregateMetricsValuesSource { - public abstract static class AggregateDoubleMetric extends org.elasticsearch.search.aggregations.support.ValuesSource { + public abstract static class AggregateMetricDouble extends org.elasticsearch.search.aggregations.support.ValuesSource { public abstract SortedNumericDoubleValues getAggregateMetricValues(LeafReaderContext context, Metric metric) throws IOException; - public static class Fielddata extends AggregateDoubleMetric { + public static class Fielddata extends AggregateMetricDouble { - protected final IndexAggregateDoubleMetricFieldData indexFieldData; + protected final IndexAggregateMetricDoubleFieldData indexFieldData; - public Fielddata(IndexAggregateDoubleMetricFieldData indexFieldData) { + public Fielddata(IndexAggregateMetricDoubleFieldData indexFieldData) { this.indexFieldData = indexFieldData; } @@ -51,7 +51,7 @@ public boolean advanceExact(int doc) throws IOException { @Override protected Function roundingPreparer(AggregationContext context) throws IOException { - throw AggregationErrors.unsupportedRounding(AggregateDoubleMetricFieldMapper.CONTENT_TYPE); + throw AggregationErrors.unsupportedRounding(AggregateMetricDoubleFieldMapper.CONTENT_TYPE); } public SortedNumericDoubleValues getAggregateMetricValues(LeafReaderContext context, Metric metric) throws IOException { diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSourceType.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSourceType.java index bcac3f12fd131..e47275ed4b756 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSourceType.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSourceType.java @@ -15,7 +15,7 @@ import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceType; -import org.elasticsearch.xpack.aggregatemetric.fielddata.IndexAggregateDoubleMetricFieldData; +import org.elasticsearch.xpack.aggregatemetric.fielddata.IndexAggregateMetricDoubleFieldData; import java.util.Locale; import java.util.function.LongSupplier; @@ -43,7 +43,7 @@ public ValuesSource getScript(AggregationScript.LeafFactory script, ValueType sc public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFactory script) { final IndexFieldData indexFieldData = fieldContext.indexFieldData(); - if ((indexFieldData instanceof IndexAggregateDoubleMetricFieldData) == false) { + if ((indexFieldData instanceof IndexAggregateMetricDoubleFieldData) == false) { throw new IllegalArgumentException( "Expected aggregate_metric_double type on field [" + fieldContext.field() @@ -52,7 +52,7 @@ public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFa + "]" ); } - return new AggregateMetricsValuesSource.AggregateDoubleMetric.Fielddata((IndexAggregateDoubleMetricFieldData) indexFieldData); + return new AggregateMetricsValuesSource.AggregateMetricDouble.Fielddata((IndexAggregateMetricDoubleFieldData) indexFieldData); } @Override diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/IndexAggregateDoubleMetricFieldData.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/IndexAggregateMetricDoubleFieldData.java similarity index 83% rename from x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/IndexAggregateDoubleMetricFieldData.java rename to x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/IndexAggregateMetricDoubleFieldData.java index eb07f9c641efb..a98b6eb4c04a4 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/IndexAggregateDoubleMetricFieldData.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/IndexAggregateMetricDoubleFieldData.java @@ -13,12 +13,12 @@ /** * Specialization of {@link IndexFieldData} for aggregate_metric. */ -public abstract class IndexAggregateDoubleMetricFieldData implements IndexFieldData { +public abstract class IndexAggregateMetricDoubleFieldData implements IndexFieldData { protected final String fieldName; protected final ValuesSourceType valuesSourceType; - public IndexAggregateDoubleMetricFieldData(String fieldName, ValuesSourceType valuesSourceType) { + public IndexAggregateMetricDoubleFieldData(String fieldName, ValuesSourceType valuesSourceType) { this.fieldName = fieldName; this.valuesSourceType = valuesSourceType; } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/LeafAggregateDoubleMetricFieldData.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/LeafAggregateMetricDoubleFieldData.java similarity index 72% rename from x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/LeafAggregateDoubleMetricFieldData.java rename to x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/LeafAggregateMetricDoubleFieldData.java index c8a89456be5e5..c11ccd5d4ff2e 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/LeafAggregateDoubleMetricFieldData.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/fielddata/LeafAggregateMetricDoubleFieldData.java @@ -8,12 +8,12 @@ import org.elasticsearch.index.fielddata.LeafFieldData; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; /** - * {@link LeafFieldData} specialization for aggregate_double_metric data. + * {@link LeafFieldData} specialization for aggregate_metric_double data. */ -public interface LeafAggregateDoubleMetricFieldData extends LeafFieldData { +public interface LeafAggregateMetricDoubleFieldData extends LeafFieldData { /** * Return aggregate_metric of double values for a given metric diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldMapper.java similarity index 79% rename from x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java rename to x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldMapper.java index df4a0aed01bc2..70adabf414f66 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldMapper.java @@ -10,6 +10,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.Query; import org.apache.lucene.search.SortField; @@ -27,6 +28,8 @@ import org.elasticsearch.index.fielddata.ScriptDocValues.DoublesSupplier; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.index.mapper.BlockDocValuesReader; +import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.index.mapper.CompositeSyntheticFieldLoader; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; @@ -56,8 +59,8 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentSubParser; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSourceType; -import org.elasticsearch.xpack.aggregatemetric.fielddata.IndexAggregateDoubleMetricFieldData; -import org.elasticsearch.xpack.aggregatemetric.fielddata.LeafAggregateDoubleMetricFieldData; +import org.elasticsearch.xpack.aggregatemetric.fielddata.IndexAggregateMetricDoubleFieldData; +import org.elasticsearch.xpack.aggregatemetric.fielddata.LeafAggregateMetricDoubleFieldData; import java.io.IOException; import java.time.ZoneId; @@ -75,15 +78,15 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; /** A {@link FieldMapper} for a field containing aggregate metrics such as min/max/value_count etc. */ -public class AggregateDoubleMetricFieldMapper extends FieldMapper { +public class AggregateMetricDoubleFieldMapper extends FieldMapper { - private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(AggregateDoubleMetricFieldMapper.class); + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(AggregateMetricDoubleFieldMapper.class); public static final String CONTENT_TYPE = "aggregate_metric_double"; public static final String SUBFIELD_SEPARATOR = "."; - private static AggregateDoubleMetricFieldMapper toType(FieldMapper in) { - return (AggregateDoubleMetricFieldMapper) in; + private static AggregateMetricDoubleFieldMapper toType(FieldMapper in) { + return (AggregateMetricDoubleFieldMapper) in; } /** @@ -94,7 +97,7 @@ private static AggregateDoubleMetricFieldMapper toType(FieldMapper in) { * @return the name of the subfield */ public static String subfieldName(String fieldName, Metric metric) { - return fieldName + AggregateDoubleMetricFieldMapper.SUBFIELD_SEPARATOR + metric.name(); + return fieldName + AggregateMetricDoubleFieldMapper.SUBFIELD_SEPARATOR + metric.name(); } /** @@ -147,7 +150,7 @@ public static final class Builder extends FieldMapper.Builder { /** * Parameter that marks this field as a time series metric defining its time series metric type. - * For {@link AggregateDoubleMetricFieldMapper} fields gauge, counter and summary metric types are + * For {@link AggregateMetricDoubleFieldMapper} fields gauge, counter and summary metric types are * supported. */ private final Parameter timeSeriesMetric; @@ -191,7 +194,7 @@ public Builder metric(MetricType metric) { } @Override - public AggregateDoubleMetricFieldMapper build(MapperBuilderContext context) { + public AggregateMetricDoubleFieldMapper build(MapperBuilderContext context) { if (multiFieldsBuilder.hasMultiFields()) { DEPRECATION_LOGGER.warn( DeprecationCategory.MAPPINGS, @@ -258,7 +261,7 @@ public AggregateDoubleMetricFieldMapper build(MapperBuilderContext context) { throw new IllegalArgumentException("Duplicate keys " + l + "and " + r + "."); }, () -> new EnumMap<>(Metric.class))); - AggregateDoubleMetricFieldType metricFieldType = new AggregateDoubleMetricFieldType( + AggregateMetricDoubleFieldType metricFieldType = new AggregateMetricDoubleFieldType( context.buildFullName(leafName()), meta.getValue(), timeSeriesMetric.getValue() @@ -266,7 +269,7 @@ public AggregateDoubleMetricFieldMapper build(MapperBuilderContext context) { metricFieldType.setMetricFields(metricFields); metricFieldType.setDefaultMetric(defaultMetric.getValue()); - return new AggregateDoubleMetricFieldMapper(leafName(), metricFieldType, metricMappers, builderParams(this, context), this); + return new AggregateMetricDoubleFieldMapper(leafName(), metricFieldType, metricMappers, builderParams(this, context), this); } } @@ -275,7 +278,7 @@ public AggregateDoubleMetricFieldMapper build(MapperBuilderContext context) { notInMultiFields(CONTENT_TYPE) ); - public static final class AggregateDoubleMetricFieldType extends SimpleMappedFieldType { + public static final class AggregateMetricDoubleFieldType extends SimpleMappedFieldType { private EnumMap metricFields; @@ -283,12 +286,12 @@ public static final class AggregateDoubleMetricFieldType extends SimpleMappedFie private final MetricType metricType; - public AggregateDoubleMetricFieldType(String name) { + public AggregateMetricDoubleFieldType(String name) { this(name, Collections.emptyMap(), null); } - public AggregateDoubleMetricFieldType(String name, Map meta, MetricType metricType) { - super(name, true, false, false, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); + public AggregateMetricDoubleFieldType(String name, Map meta, MetricType metricType) { + super(name, true, false, true, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); this.metricType = metricType; } @@ -323,7 +326,7 @@ public Map getMetricFields() { public void addMetricField(Metric m, NumberFieldMapper.NumberFieldType subfield) { if (metricFields == null) { - metricFields = new EnumMap<>(AggregateDoubleMetricFieldMapper.Metric.class); + metricFields = new EnumMap<>(AggregateMetricDoubleFieldMapper.Metric.class); } if (name() == null) { @@ -405,13 +408,13 @@ public boolean isAggregatable() { @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { - return (cache, breakerService) -> new IndexAggregateDoubleMetricFieldData( + return (cache, breakerService) -> new IndexAggregateMetricDoubleFieldData( name(), AggregateMetricsValuesSourceType.AGGREGATE_METRIC ) { @Override - public LeafAggregateDoubleMetricFieldData load(LeafReaderContext context) { - return new LeafAggregateDoubleMetricFieldData() { + public LeafAggregateMetricDoubleFieldData load(LeafReaderContext context) { + return new LeafAggregateMetricDoubleFieldData() { @Override public SortedNumericDoubleValues getAggregateMetricValues(final Metric metric) { try { @@ -473,7 +476,7 @@ public long ramBytesUsed() { } @Override - public LeafAggregateDoubleMetricFieldData loadDirect(LeafReaderContext context) { + public LeafAggregateMetricDoubleFieldData loadDirect(LeafReaderContext context) { return load(context); } @@ -508,6 +511,141 @@ public ValueFetcher valueFetcher(SearchExecutionContext context, String format) return SourceValueFetcher.identity(name(), context, format); } + public class AggregateMetricDoubleBlockLoader extends BlockDocValuesReader.DocValuesBlockLoader { + NumberFieldMapper.NumberFieldType minFieldType = metricFields.get(Metric.min); + NumberFieldMapper.NumberFieldType maxFieldType = metricFields.get(Metric.max); + NumberFieldMapper.NumberFieldType sumFieldType = metricFields.get(Metric.sum); + NumberFieldMapper.NumberFieldType countFieldType = metricFields.get(Metric.value_count); + + private AggregateMetricDoubleBlockLoader() {} + + static NumericDocValues getNumericDocValues(NumberFieldMapper.NumberFieldType field, LeafReader leafReader) throws IOException { + if (field == null) { + return null; + } + String fieldName = field.name(); + var values = leafReader.getNumericDocValues(fieldName); + if (values != null) { + return values; + } + + var sortedValues = leafReader.getSortedNumericDocValues(fieldName); + return DocValues.unwrapSingleton(sortedValues); + } + + @Override + public AllReader reader(LeafReaderContext context) throws IOException { + NumericDocValues minValues = getNumericDocValues(minFieldType, context.reader()); + NumericDocValues maxValues = getNumericDocValues(maxFieldType, context.reader()); + NumericDocValues sumValues = getNumericDocValues(sumFieldType, context.reader()); + NumericDocValues valueCountValues = getNumericDocValues(countFieldType, context.reader()); + + return new BlockDocValuesReader() { + + private int docID = -1; + + @Override + protected int docId() { + return docID; + } + + @Override + public String toString() { + return "BlockDocValuesReader.AggregateMetricDouble"; + } + + @Override + public Block read(BlockFactory factory, Docs docs) throws IOException { + try (var builder = factory.aggregateMetricDoubleBuilder(docs.count())) { + copyDoubleValuesToBuilder(docs, builder.min(), minValues); + copyDoubleValuesToBuilder(docs, builder.max(), maxValues); + copyDoubleValuesToBuilder(docs, builder.sum(), sumValues); + copyIntValuesToBuilder(docs, builder.count(), valueCountValues); + return builder.build(); + } + } + + private void copyDoubleValuesToBuilder(Docs docs, BlockLoader.DoubleBuilder builder, NumericDocValues values) + throws IOException { + int lastDoc = -1; + for (int i = 0; i < docs.count(); i++) { + int doc = docs.get(i); + if (doc < lastDoc) { + throw new IllegalStateException("docs within same block must be in order"); + } + if (values == null || values.advanceExact(doc) == false) { + builder.appendNull(); + } else { + double value = NumericUtils.sortableLongToDouble(values.longValue()); + lastDoc = doc; + this.docID = doc; + builder.appendDouble(value); + } + } + } + + private void copyIntValuesToBuilder(Docs docs, BlockLoader.IntBuilder builder, NumericDocValues values) + throws IOException { + int lastDoc = -1; + for (int i = 0; i < docs.count(); i++) { + int doc = docs.get(i); + if (doc < lastDoc) { + throw new IllegalStateException("docs within same block must be in order"); + } + if (values == null || values.advanceExact(doc) == false) { + builder.appendNull(); + } else { + int value = Math.toIntExact(values.longValue()); + lastDoc = doc; + this.docID = doc; + builder.appendInt(value); + } + } + } + + @Override + public void read(int docId, StoredFields storedFields, Builder builder) throws IOException { + var blockBuilder = (AggregateMetricDoubleBuilder) builder; + this.docID = docId; + readSingleRow(docId, blockBuilder); + } + + private void readSingleRow(int docId, AggregateMetricDoubleBuilder builder) throws IOException { + if (minValues.advanceExact(docId)) { + builder.min().appendDouble(NumericUtils.sortableLongToDouble(minValues.longValue())); + } else { + builder.min().appendNull(); + } + if (maxValues.advanceExact(docId)) { + builder.max().appendDouble(NumericUtils.sortableLongToDouble(maxValues.longValue())); + } else { + builder.max().appendNull(); + } + if (sumValues.advanceExact(docId)) { + builder.sum().appendDouble(NumericUtils.sortableLongToDouble(sumValues.longValue())); + } else { + builder.sum().appendNull(); + } + if (valueCountValues.advanceExact(docId)) { + builder.count().appendInt(Math.toIntExact(valueCountValues.longValue())); + } else { + builder.count().appendNull(); + } + } + }; + } + + @Override + public Builder builder(BlockFactory factory, int expectedCount) { + return factory.aggregateMetricDoubleBuilder(expectedCount); + } + } + + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + return new AggregateMetricDoubleBlockLoader(); + } + /** * If field is a time series metric field, returns its metric type * @return the metric type or null @@ -536,7 +674,7 @@ public MetricType getMetricType() { private final IndexMode indexMode; - private AggregateDoubleMetricFieldMapper( + private AggregateMetricDoubleFieldMapper( String simpleName, MappedFieldType mappedFieldType, EnumMap metricFieldMappers, @@ -564,8 +702,8 @@ Metric defaultMetric() { } @Override - public AggregateDoubleMetricFieldType fieldType() { - return (AggregateDoubleMetricFieldType) super.fieldType(); + public AggregateMetricDoubleFieldType fieldType() { + return (AggregateMetricDoubleFieldType) super.fieldType(); } @Override diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregatorTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregatorTests.java index 8378f99b2d7b2..fade3f68376d0 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregatorTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregatorTests.java @@ -27,15 +27,15 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSourceType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.List; import java.util.function.Consumer; import static java.util.Collections.singleton; -import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.subfieldName; +import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.subfieldName; public class AggregateMetricBackedAvgAggregatorTests extends AggregatorTestCase { @@ -116,8 +116,8 @@ public void testQueryFiltering() throws IOException { * @param fieldName the name of the field * @return the created field type */ - private AggregateDoubleMetricFieldType createDefaultFieldType(String fieldName) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(fieldName); + private AggregateMetricDoubleFieldType createDefaultFieldType(String fieldName) { + AggregateMetricDoubleFieldType fieldType = new AggregateMetricDoubleFieldType(fieldName); for (Metric m : List.of(Metric.value_count, Metric.sum)) { String subfieldName = subfieldName(fieldName, m); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregatorTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregatorTests.java index 9cbafff116b4c..33e9151773fc2 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregatorTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregatorTests.java @@ -27,15 +27,15 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSourceType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.List; import java.util.function.Consumer; import static java.util.Collections.singleton; -import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.subfieldName; +import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.subfieldName; public class AggregateMetricBackedMaxAggregatorTests extends AggregatorTestCase { @@ -116,8 +116,8 @@ public void testQueryFiltering() throws IOException { * @param fieldName the name of the field * @return the created field type */ - private AggregateDoubleMetricFieldType createDefaultFieldType(String fieldName) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(fieldName); + private AggregateMetricDoubleFieldType createDefaultFieldType(String fieldName) { + AggregateMetricDoubleFieldType fieldType = new AggregateMetricDoubleFieldType(fieldName); for (Metric m : List.of(Metric.min, Metric.max)) { String subfieldName = subfieldName(fieldName, m); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregatorTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregatorTests.java index fb4ea5785fbce..0f655b90a2358 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregatorTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregatorTests.java @@ -27,15 +27,15 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSourceType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.List; import java.util.function.Consumer; import static java.util.Collections.singleton; -import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.subfieldName; +import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.subfieldName; public class AggregateMetricBackedMinAggregatorTests extends AggregatorTestCase { @@ -116,8 +116,8 @@ public void testQueryFiltering() throws IOException { * @param fieldName the name of the field * @return the created field type */ - private AggregateDoubleMetricFieldType createDefaultFieldType(String fieldName) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(fieldName); + private AggregateMetricDoubleFieldType createDefaultFieldType(String fieldName) { + AggregateMetricDoubleFieldType fieldType = new AggregateMetricDoubleFieldType(fieldName); for (Metric m : List.of(Metric.min, Metric.max)) { String subfieldName = subfieldName(fieldName, m); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregatorTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregatorTests.java index 91a34b4643456..e0e421189497c 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregatorTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregatorTests.java @@ -27,15 +27,15 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSourceType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.List; import java.util.function.Consumer; import static java.util.Collections.singleton; -import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.subfieldName; +import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.subfieldName; public class AggregateMetricBackedSumAggregatorTests extends AggregatorTestCase { @@ -116,8 +116,8 @@ public void testQueryFiltering() throws IOException { * @param fieldName the name of the field * @return the created field type */ - private AggregateDoubleMetricFieldType createDefaultFieldType(String fieldName) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(fieldName); + private AggregateMetricDoubleFieldType createDefaultFieldType(String fieldName) { + AggregateMetricDoubleFieldType fieldType = new AggregateMetricDoubleFieldType(fieldName); for (Metric m : List.of(Metric.value_count, Metric.sum)) { String subfieldName = subfieldName(fieldName, m); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregatorTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregatorTests.java index faff3c2d7cb30..dbae604b8f725 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregatorTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregatorTests.java @@ -27,15 +27,15 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin; import org.elasticsearch.xpack.aggregatemetric.aggregations.support.AggregateMetricsValuesSourceType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.List; import java.util.function.Consumer; import static java.util.Collections.singleton; -import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.subfieldName; +import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.subfieldName; public class AggregateMetricBackedValueCountAggregatorTests extends AggregatorTestCase { @@ -115,8 +115,8 @@ public void testQueryFiltering() throws IOException { * @param fieldName the name of the field * @return the created field type */ - private AggregateDoubleMetricFieldType createDefaultFieldType(String fieldName) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(fieldName); + private AggregateMetricDoubleFieldType createDefaultFieldType(String fieldName) { + AggregateMetricDoubleFieldType fieldType = new AggregateMetricDoubleFieldType(fieldName); for (Metric m : List.of(Metric.value_count, Metric.sum)) { String subfieldName = subfieldName(fieldName, m); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldMapperTests.java similarity index 93% rename from x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java rename to x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldMapperTests.java index 72c2beeed3ba4..3674043a72766 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldMapperTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import org.hamcrest.Matchers; import org.junit.AssumptionViolatedException; @@ -36,19 +36,20 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.function.Function; -import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Names.IGNORE_MALFORMED; -import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Names.METRICS; +import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Names.IGNORE_MALFORMED; +import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Names.METRICS; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.core.IsInstanceOf.instanceOf; -public class AggregateDoubleMetricFieldMapperTests extends MapperTestCase { +public class AggregateMetricDoubleFieldMapperTests extends MapperTestCase { public static final String METRICS_FIELD = METRICS; - public static final String CONTENT_TYPE = AggregateDoubleMetricFieldMapper.CONTENT_TYPE; - public static final String DEFAULT_METRIC = AggregateDoubleMetricFieldMapper.Names.DEFAULT_METRIC; + public static final String CONTENT_TYPE = AggregateMetricDoubleFieldMapper.CONTENT_TYPE; + public static final String DEFAULT_METRIC = AggregateMetricDoubleFieldMapper.Names.DEFAULT_METRIC; @Override protected Collection getPlugins() { @@ -108,7 +109,7 @@ public void testParseValue() throws Exception { assertEquals("DoubleField ", doc.rootDoc().getField("field.min").toString()); Mapper fieldMapper = mapper.mappers().getMapper("field"); - assertThat(fieldMapper, instanceOf(AggregateDoubleMetricFieldMapper.class)); + assertThat(fieldMapper, instanceOf(AggregateMetricDoubleFieldMapper.class)); } /** @@ -324,8 +325,8 @@ public void testExplicitDefaultMetric() throws Exception { ); Mapper fieldMapper = mapper.mappers().getMapper("field"); - assertThat(fieldMapper, instanceOf(AggregateDoubleMetricFieldMapper.class)); - assertEquals(Metric.sum, ((AggregateDoubleMetricFieldMapper) fieldMapper).defaultMetric()); + assertThat(fieldMapper, instanceOf(AggregateMetricDoubleFieldMapper.class)); + assertEquals(Metric.sum, ((AggregateMetricDoubleFieldMapper) fieldMapper).defaultMetric()); } /** @@ -337,8 +338,8 @@ public void testImplicitDefaultMetricSingleMetric() throws Exception { ); Mapper fieldMapper = mapper.mappers().getMapper("field"); - assertThat(fieldMapper, instanceOf(AggregateDoubleMetricFieldMapper.class)); - assertEquals(Metric.value_count, ((AggregateDoubleMetricFieldMapper) fieldMapper).defaultMetric); + assertThat(fieldMapper, instanceOf(AggregateMetricDoubleFieldMapper.class)); + assertEquals(Metric.value_count, ((AggregateMetricDoubleFieldMapper) fieldMapper).defaultMetric); } /** @@ -347,8 +348,8 @@ public void testImplicitDefaultMetricSingleMetric() throws Exception { public void testImplicitDefaultMetric() throws Exception { DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); Mapper fieldMapper = mapper.mappers().getMapper("field"); - assertThat(fieldMapper, instanceOf(AggregateDoubleMetricFieldMapper.class)); - assertEquals(Metric.max, ((AggregateDoubleMetricFieldMapper) fieldMapper).defaultMetric); + assertThat(fieldMapper, instanceOf(AggregateMetricDoubleFieldMapper.class)); + assertEquals(Metric.max, ((AggregateMetricDoubleFieldMapper) fieldMapper).defaultMetric); } /** @@ -417,7 +418,7 @@ public void testParseNestedValue() throws Exception { ); Mapper fieldMapper = mapper.mappers().getMapper("field.subfield"); - assertThat(fieldMapper, instanceOf(AggregateDoubleMetricFieldMapper.class)); + assertThat(fieldMapper, instanceOf(AggregateMetricDoubleFieldMapper.class)); ParsedDocument doc = mapper.parse( source( b -> b.startObject("field") @@ -461,7 +462,7 @@ public void testFieldCaps() throws IOException { protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneDocument fields) { assertThat(query, Matchers.instanceOf(FieldExistsQuery.class)); FieldExistsQuery fieldExistsQuery = (FieldExistsQuery) query; - String defaultMetric = ((AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType) fieldType).getDefaultMetric().name(); + String defaultMetric = ((AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType) fieldType).getDefaultMetric().name(); assertEquals("field." + defaultMetric, fieldExistsQuery.getField()); assertNoFieldNamesField(fields); } @@ -487,10 +488,10 @@ public void testCannotBeUsedInMultifields() { public void testMetricType() throws IOException { // Test default setting MapperService mapperService = createMapperService(fieldMapping(b -> minimalMapping(b))); - AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType ft = - (AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType) mapperService.fieldType("field"); + AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType ft = + (AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType) mapperService.fieldType("field"); assertNull(ft.getMetricType()); - assertMetricType("gauge", AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType::getMetricType); + assertMetricType("gauge", AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType::getMetricType); { // Test invalid metric type for this field type @@ -518,7 +519,7 @@ public void testMetricType() throws IOException { @Override protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) { - return new AggregateDoubleMetricSyntheticSourceSupport(ignoreMalformed); + return new AggregateMetricDoubleSyntheticSourceSupport(ignoreMalformed); } @Override @@ -563,11 +564,11 @@ public void testArrayValueSyntheticSource() throws Exception { assertEquals(Strings.toString(expected), syntheticSource); } - protected final class AggregateDoubleMetricSyntheticSourceSupport implements SyntheticSourceSupport { + protected final class AggregateMetricDoubleSyntheticSourceSupport implements SyntheticSourceSupport { private final boolean malformedExample; private final EnumSet storedMetrics; - public AggregateDoubleMetricSyntheticSourceSupport(boolean malformedExample) { + public AggregateMetricDoubleSyntheticSourceSupport(boolean malformedExample) { this.malformedExample = malformedExample; this.storedMetrics = EnumSet.copyOf(randomNonEmptySubsetOf(Arrays.asList(Metric.values()))); } @@ -618,4 +619,15 @@ public void testSyntheticSourceKeepArrays() { protected boolean supportsCopyTo() { return false; } + + @Override + protected Function loadBlockExpected() { + return n -> ((Number) n); + } + + @Override + protected Function loadBlockExpected(BlockReaderSupport blockReaderSupport, boolean columnReader) { + assumeTrue("Not supporting", false); + return null; + } } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldTypeTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldTypeTests.java similarity index 91% rename from x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldTypeTests.java rename to x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldTypeTests.java index 89c2799d8327d..55ecfc13b1f3e 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldTypeTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldTypeTests.java @@ -27,8 +27,8 @@ import org.elasticsearch.script.ScoreScript; import org.elasticsearch.script.Script; import org.elasticsearch.search.lookup.SearchLookup; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType; -import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.AggregateMetricDoubleFieldType; +import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.Metric; import java.io.IOException; import java.util.Collections; @@ -36,20 +36,20 @@ import java.util.Map; import static java.util.Arrays.asList; -import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.subfieldName; +import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateMetricDoubleFieldMapper.subfieldName; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class AggregateDoubleMetricFieldTypeTests extends FieldTypeTestCase { +public class AggregateMetricDoubleFieldTypeTests extends FieldTypeTestCase { - protected AggregateDoubleMetricFieldType createDefaultFieldType(String name, Map meta, Metric defaultMetric) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(name, meta, null); - for (AggregateDoubleMetricFieldMapper.Metric m : List.of( - AggregateDoubleMetricFieldMapper.Metric.min, - AggregateDoubleMetricFieldMapper.Metric.max + protected AggregateMetricDoubleFieldType createDefaultFieldType(String name, Map meta, Metric defaultMetric) { + AggregateMetricDoubleFieldType fieldType = new AggregateMetricDoubleFieldType(name, meta, null); + for (AggregateMetricDoubleFieldMapper.Metric m : List.of( + AggregateMetricDoubleFieldMapper.Metric.min, + AggregateMetricDoubleFieldMapper.Metric.max )) { String subfieldName = subfieldName(fieldType.name(), m); NumberFieldMapper.NumberFieldType subfield = new NumberFieldMapper.NumberFieldType( diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java index 76ea7cab59ffc..f41dbff6dfc1c 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.countedkeyword; -import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DocValues; @@ -19,6 +18,7 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.SortField; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.ByteArrayStreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -31,6 +31,7 @@ import org.elasticsearch.index.fielddata.plain.AbstractIndexOrdinalsFieldData; import org.elasticsearch.index.fielddata.plain.AbstractLeafOrdinalsFieldData; import org.elasticsearch.index.mapper.BinaryFieldMapper; +import org.elasticsearch.index.mapper.CustomDocValuesField; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; @@ -434,15 +435,17 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio return; } - int i = 0; - int[] counts = new int[values.size()]; - for (Map.Entry value : values.entrySet()) { - context.doc().add(new KeywordFieldMapper.KeywordField(fullPath(), new BytesRef(value.getKey()), fieldType)); - counts[i++] = value.getValue(); + for (String value : values.keySet()) { + context.doc().add(new KeywordFieldMapper.KeywordField(fullPath(), new BytesRef(value), fieldType)); + } + CountsBinaryDocValuesField field = (CountsBinaryDocValuesField) context.doc().getByKey(countFieldMapper.fieldType().name()); + if (field == null) { + field = new CountsBinaryDocValuesField(countFieldMapper.fieldType().name()); + field.add(values); + context.doc().addWithKey(countFieldMapper.fieldType().name(), field); + } else { + field.add(values); } - BytesStreamOutput streamOutput = new BytesStreamOutput(); - streamOutput.writeVIntArray(counts); - context.doc().add(new BinaryDocValuesField(countFieldMapper.fullPath(), streamOutput.bytes().toBytesRef())); } private void parseArray(DocumentParserContext context, SortedMap values) throws IOException { @@ -509,4 +512,37 @@ protected SyntheticSourceSupport syntheticSourceSupport() { ); } + private class CountsBinaryDocValuesField extends CustomDocValuesField { + private final SortedMap counts; + + CountsBinaryDocValuesField(String name) { + super(name); + counts = new TreeMap<>(); + } + + public void add(SortedMap newCounts) { + for (Map.Entry currCount : newCounts.entrySet()) { + this.counts.put(currCount.getKey(), this.counts.getOrDefault(currCount.getKey(), 0) + currCount.getValue()); + } + } + + @Override + public BytesRef binaryValue() { + try { + int maxBytesPerVInt = 5; + int bytesSize = (counts.size() + 1) * maxBytesPerVInt; + BytesStreamOutput out = new BytesStreamOutput(bytesSize); + int countsArr[] = new int[counts.size()]; + int i = 0; + for (Integer currCount : counts.values()) { + countsArr[i++] = currCount; + } + out.writeVIntArray(countsArr); + return out.bytes().toBytesRef(); + } catch (IOException e) { + throw new ElasticsearchException("Failed to get binary value", e); + } + } + } + } diff --git a/x-pack/plugin/migrate/build.gradle b/x-pack/plugin/migrate/build.gradle index 283362a637e78..f179a311e0fea 100644 --- a/x-pack/plugin/migrate/build.gradle +++ b/x-pack/plugin/migrate/build.gradle @@ -19,6 +19,7 @@ dependencies { testImplementation project(xpackModule('ccr')) testImplementation project(':modules:data-streams') testImplementation project(path: ':modules:reindex') + testImplementation project(path: ':modules:ingest-common') } addQaCheckDependencies(project) diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java index 5220e17618a34..fb491132147a7 100644 --- a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java @@ -32,6 +32,8 @@ import java.util.Map; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.not; public class CreateIndexFromSourceActionIT extends ESIntegTestCase { @@ -206,7 +208,7 @@ public void testSettingsNullOverride() throws Exception { assertTrue(destSettings.getAsBoolean(IndexMetadata.SETTING_BLOCKS_READ, false)); // override null removed - assertNull(destSettings.get(IndexMetadata.SETTING_BLOCKS_WRITE)); + assertThat(destSettings.keySet(), not(hasItem(IndexMetadata.SETTING_BLOCKS_WRITE))); } public void testRemoveIndexBlocksByDefault() throws Exception { @@ -236,9 +238,9 @@ public void testRemoveIndexBlocksByDefault() throws Exception { var destSettings = settingsResponse.getIndexToSettings().get(destIndex); // remove block settings override both source settings and override settings - assertNull(destSettings.get(IndexMetadata.SETTING_BLOCKS_WRITE)); - assertNull(destSettings.get(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE)); - assertNull(destSettings.get(IndexMetadata.SETTING_BLOCKS_READ)); + assertThat(destSettings.keySet(), not(hasItem(IndexMetadata.SETTING_BLOCKS_WRITE))); + assertThat(destSettings.keySet(), not(hasItem(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE))); + assertThat(destSettings.keySet(), not(hasItem(IndexMetadata.SETTING_BLOCKS_READ))); } public void testMappingsOverridden() { diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java index 0ad7dc45d4df8..e3b73d0aaa5cb 100644 --- a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.migrate.action; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; @@ -15,6 +16,7 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.delete.TransportDeleteIndexTemplateAction; @@ -22,11 +24,15 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineTransportAction; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; +import org.elasticsearch.cluster.metadata.MetadataIndexStateService; import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; @@ -35,12 +41,15 @@ import org.elasticsearch.datastreams.DataStreamsPlugin; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.ingest.common.IngestCommonPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.migrate.MigratePlugin; +import org.elasticsearch.xpack.migrate.MigrateTemplateRegistry; +import org.junit.Before; import java.io.IOException; import java.time.Instant; @@ -49,22 +58,29 @@ import java.util.Locale; import java.util.Map; +import static java.lang.Boolean.parseBoolean; import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.DEFAULT_TIMESTAMP_FIELD; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { + @Before + private void setup() throws Exception { + deletePipeline(MigrateTemplateRegistry.REINDEX_DATA_STREAM_PIPELINE_NAME); + assertBusy(() -> { assertTrue(getPipelines(MigrateTemplateRegistry.REINDEX_DATA_STREAM_PIPELINE_NAME).isFound()); }); + } + private static final String MAPPING = """ { "_doc":{ "dynamic":"strict", "properties":{ - "foo1":{ - "type":"text" - } + "foo1": {"type":"text"}, + "@timestamp": {"type":"date"} } } } @@ -72,23 +88,142 @@ public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return List.of(MigratePlugin.class, ReindexPlugin.class, MockTransportService.TestPlugin.class, DataStreamsPlugin.class); + return List.of( + MigratePlugin.class, + ReindexPlugin.class, + MockTransportService.TestPlugin.class, + DataStreamsPlugin.class, + IngestCommonPlugin.class + ); + } + + private static String DATA_STREAM_MAPPING = """ + { + "dynamic": true, + "_data_stream_timestamp": { + "enabled": true + }, + "properties": { + "@timestamp": {"type":"date"} + } + } + """; + + public void testTimestamp0AddedIfMissing() { + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); + + // add doc without timestamp + addDoc(sourceIndex, "{\"foo\":\"baz\"}"); + + // wait until doc is written to all shards before adding mapping + ensureHealth(sourceIndex); + + // add timestamp to source mapping + indicesAdmin().preparePutMapping(sourceIndex).setSource(DATA_STREAM_MAPPING, XContentType.JSON).get(); + + // call reindex + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); + + assertResponse(prepareSearch(destIndex), response -> { + Map sourceAsMap = response.getHits().getAt(0).getSourceAsMap(); + assertEquals(Integer.valueOf(0), sourceAsMap.get(DEFAULT_TIMESTAMP_FIELD)); + }); + } + + public void testTimestampNotAddedIfExists() { + + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); + + // add doc with timestamp + String time = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(System.currentTimeMillis()); + var doc = String.format(Locale.ROOT, "{\"%s\":\"%s\"}", DEFAULT_TIMESTAMP_FIELD, time); + addDoc(sourceIndex, doc); + + // wait until doc is written to all shards before adding mapping + ensureHealth(sourceIndex); + + // add timestamp to source mapping + indicesAdmin().preparePutMapping(sourceIndex).setSource(DATA_STREAM_MAPPING, XContentType.JSON).get(); + + // call reindex + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); + + assertResponse(prepareSearch(destIndex), response -> { + Map sourceAsMap = response.getHits().getAt(0).getSourceAsMap(); + assertEquals(time, sourceAsMap.get(DEFAULT_TIMESTAMP_FIELD)); + }); + } + + public void testCustomReindexPipeline() { + String customPipeline = """ + { + "processors": [ + { + "set": { + "field": "cheese", + "value": "gorgonzola" + } + } + ], + "version": 1000 + } + """; + + PutPipelineRequest putRequest = new PutPipelineRequest( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + MigrateTemplateRegistry.REINDEX_DATA_STREAM_PIPELINE_NAME, + new BytesArray(customPipeline), + XContentType.JSON + ); + + safeGet(clusterAdmin().execute(PutPipelineTransportAction.TYPE, putRequest)); + + var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); + + // add doc with timestamp + String time = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(System.currentTimeMillis()); + var doc = String.format(Locale.ROOT, "{\"%s\":\"%s\"}", DEFAULT_TIMESTAMP_FIELD, time); + addDoc(sourceIndex, doc); + + // wait until doc is written to all shards before adding mapping + ensureHealth(sourceIndex); + + // add timestamp to source mapping + indicesAdmin().preparePutMapping(sourceIndex).setSource(DATA_STREAM_MAPPING, XContentType.JSON).get(); + + String destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); + + assertResponse(prepareSearch(destIndex), response -> { + Map sourceAsMap = response.getHits().getAt(0).getSourceAsMap(); + assertEquals("gorgonzola", sourceAsMap.get("cheese")); + assertEquals(time, sourceAsMap.get(DEFAULT_TIMESTAMP_FIELD)); + }); } public void testDestIndexDeletedIfExists() throws Exception { // empty source index var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); // dest index with docs var destIndex = ReindexDataStreamIndexTransportAction.generateDestIndexName(sourceIndex); - indicesAdmin().create(new CreateIndexRequest(destIndex)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(destIndex))); indexDocs(destIndex, 10); - indicesAdmin().refresh(new RefreshRequest(destIndex)).actionGet(); + safeGet(indicesAdmin().refresh(new RefreshRequest(destIndex))); assertHitCount(prepareSearch(destIndex).setSize(0), 10); // call reindex - client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)).actionGet(); + safeGet(client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex))); // verify that dest still exists, but is now empty assertTrue(indexExists(destIndex)); @@ -97,11 +232,12 @@ public void testDestIndexDeletedIfExists() throws Exception { public void testDestIndexNameSet_noDotPrefix() throws Exception { var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); // call reindex - var response = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet(); + var response = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ); var expectedDestIndexName = ReindexDataStreamIndexTransportAction.generateDestIndexName(sourceIndex); assertEquals(expectedDestIndexName, response.getDestIndex()); @@ -110,11 +246,12 @@ public void testDestIndexNameSet_noDotPrefix() throws Exception { public void testDestIndexNameSet_withDotPrefix() throws Exception { var sourceIndex = "." + randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); // call reindex - var response = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet(); + var response = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ); var expectedDestIndexName = ReindexDataStreamIndexTransportAction.generateDestIndexName(sourceIndex); assertEquals(expectedDestIndexName, response.getDestIndex()); @@ -124,13 +261,14 @@ public void testDestIndexContainsDocs() throws Exception { // source index with docs var numDocs = randomIntBetween(1, 100); var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); indexDocs(sourceIndex, numDocs); // call reindex - var response = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet(); - indicesAdmin().refresh(new RefreshRequest(response.getDestIndex())).actionGet(); + var response = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ); + safeGet(indicesAdmin().refresh(new RefreshRequest(response.getDestIndex()))); // verify that dest contains docs assertHitCount(prepareSearch(response.getDestIndex()).setSize(0), numDocs); @@ -141,23 +279,38 @@ public void testSetSourceToBlockWrites() throws Exception { // empty source index var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings))); // call reindex - client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)).actionGet(); + safeGet(client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex))); + + // Assert that source index is now read-only but not verified read-only + GetSettingsResponse getSettingsResponse = safeGet(admin().indices().getSettings(new GetSettingsRequest().indices(sourceIndex))); + assertTrue(parseBoolean(getSettingsResponse.getSetting(sourceIndex, IndexMetadata.SETTING_BLOCKS_WRITE))); + assertFalse( + parseBoolean(getSettingsResponse.getSetting(sourceIndex, MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey())) + ); // assert that write to source fails var indexReq = new IndexRequest(sourceIndex).source(jsonBuilder().startObject().field("field", "1").endObject()); - assertThrows(ClusterBlockException.class, () -> client().index(indexReq).actionGet()); + expectThrows(ClusterBlockException.class, client().index(indexReq)); assertHitCount(prepareSearch(sourceIndex).setSize(0), 0); } - public void testSettingsAddedBeforeReindex() throws Exception { + public void testMissingSourceIndex() { + var nonExistentSourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + expectThrows( + ResourceNotFoundException.class, + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(nonExistentSourceIndex)) + ); + } + + public void testSettingsAddedBeforeReindex() { // start with a static setting var numShards = randomIntBetween(1, 10); var staticSettings = Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards).build(); var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex, staticSettings)).get(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex, staticSettings))); // update with a dynamic setting var numReplicas = randomIntBetween(0, 10); @@ -166,31 +319,32 @@ public void testSettingsAddedBeforeReindex() throws Exception { .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas) .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), refreshInterval) .build(); - indicesAdmin().updateSettings(new UpdateSettingsRequest(dynamicSettings, sourceIndex)).actionGet(); + safeGet(indicesAdmin().updateSettings(new UpdateSettingsRequest(dynamicSettings, sourceIndex))); // call reindex - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); // assert both static and dynamic settings set on dest index - var settingsResponse = indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex)).actionGet(); + var settingsResponse = safeGet(indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex))); assertEquals(numReplicas, Integer.parseInt(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_NUMBER_OF_REPLICAS))); assertEquals(numShards, Integer.parseInt(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_NUMBER_OF_SHARDS))); assertEquals(refreshInterval, settingsResponse.getSetting(destIndex, IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey())); } - public void testMappingsAddedToDestIndex() throws Exception { + public void testMappingsAddedToDestIndex() { var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(MAPPING)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(MAPPING))); // call reindex - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); - var mappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(sourceIndex, destIndex)) - .actionGet(); + var mappingsResponse = safeGet( + indicesAdmin().getMappings(new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(sourceIndex, destIndex)) + ); Map mappings = mappingsResponse.mappings(); var destMappings = mappings.get(destIndex).sourceAsMap(); var sourceMappings = mappings.get(sourceIndex).sourceAsMap(); @@ -203,13 +357,13 @@ public void testMappingsAddedToDestIndex() throws Exception { public void testFailIfMetadataBlockSet() { var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); var settings = Settings.builder().put(IndexMetadata.SETTING_BLOCKS_METADATA, true).build(); - indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings))); - try { - client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)).actionGet(); - } catch (ElasticsearchException e) { - assertTrue(e.getMessage().contains("Cannot reindex index") || e.getCause().getMessage().equals("Cannot reindex index")); - } + ElasticsearchException e = expectThrows( + ElasticsearchException.class, + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ); + assertTrue(e.getMessage().contains("Cannot reindex index") || e.getCause().getMessage().equals("Cannot reindex index")); cleanupMetadataBlocks(sourceIndex); } @@ -217,13 +371,13 @@ public void testFailIfMetadataBlockSet() { public void testFailIfReadBlockSet() { var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); var settings = Settings.builder().put(IndexMetadata.SETTING_BLOCKS_READ, true).build(); - indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings))); - try { - client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)).actionGet(); - } catch (ElasticsearchException e) { - assertTrue(e.getMessage().contains("Cannot reindex index") || e.getCause().getMessage().equals("Cannot reindex index")); - } + ElasticsearchException e = expectThrows( + ElasticsearchException.class, + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ); + assertTrue(e.getMessage().contains("Cannot reindex index") || e.getCause().getMessage().equals("Cannot reindex index")); cleanupMetadataBlocks(sourceIndex); } @@ -235,17 +389,17 @@ public void testReadOnlyBlocksNotAddedBack() { .put(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE, randomBoolean()) .put(IndexMetadata.SETTING_BLOCKS_WRITE, randomBoolean()) .build(); - indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings))); // call reindex - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); - var settingsResponse = indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex)).actionGet(); - assertFalse(Boolean.parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_READ_ONLY))); - assertFalse(Boolean.parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE))); - assertFalse(Boolean.parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_BLOCKS_WRITE))); + var settingsResponse = safeGet(indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex))); + assertFalse(parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_READ_ONLY))); + assertFalse(parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE))); + assertFalse(parseBoolean(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_BLOCKS_WRITE))); cleanupMetadataBlocks(sourceIndex); cleanupMetadataBlocks(destIndex); @@ -264,11 +418,11 @@ public void testUpdateSettingsDefaultsRestored() { assertAcked(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); // call reindex - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); - var settingsResponse = indicesAdmin().getSettings(new GetSettingsRequest().indices(sourceIndex, destIndex)).actionGet(); + var settingsResponse = safeGet(indicesAdmin().getSettings(new GetSettingsRequest().indices(sourceIndex, destIndex))); var destSettings = settingsResponse.getIndexToSettings().get(destIndex); assertEquals( @@ -297,33 +451,34 @@ public void testSettingsAndMappingsFromTemplate() throws IOException { .build(); var request = new TransportPutComposableIndexTemplateAction.Request("logs-template"); request.indexTemplate(template); - client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet(); + safeGet(client().execute(TransportPutComposableIndexTemplateAction.TYPE, request)); var sourceIndex = "logs-" + randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - indicesAdmin().create(new CreateIndexRequest(sourceIndex)).actionGet(); + safeGet(indicesAdmin().create(new CreateIndexRequest(sourceIndex))); { var indexRequest = new IndexRequest(sourceIndex); indexRequest.source("{ \"foo1\": \"cheese\" }", XContentType.JSON); - client().index(indexRequest).actionGet(); + safeGet(client().index(indexRequest)); } // call reindex - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) + ).getDestIndex(); // verify settings from templates copied to dest index { - var settingsResponse = indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex)).actionGet(); + var settingsResponse = safeGet(indicesAdmin().getSettings(new GetSettingsRequest().indices(destIndex))); assertEquals(numReplicas, Integer.parseInt(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_NUMBER_OF_REPLICAS))); assertEquals(numShards, Integer.parseInt(settingsResponse.getSetting(destIndex, IndexMetadata.SETTING_NUMBER_OF_SHARDS))); } // verify mappings from templates copied to dest index { - var mappingsResponse = indicesAdmin().getMappings(new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(sourceIndex, destIndex)) - .actionGet(); + var mappingsResponse = safeGet( + indicesAdmin().getMappings(new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(sourceIndex, destIndex)) + ); var destMappings = mappingsResponse.mappings().get(destIndex).sourceAsMap(); var sourceMappings = mappingsResponse.mappings().get(sourceIndex).sourceAsMap(); assertEquals(sourceMappings, destMappings); @@ -384,7 +539,7 @@ public void testTsdbStartEndSet() throws Exception { .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) .build() ); - client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet(); + safeGet(client().execute(TransportPutComposableIndexTemplateAction.TYPE, request)); // index doc Instant time = Instant.now(); @@ -392,12 +547,11 @@ public void testTsdbStartEndSet() throws Exception { { var indexRequest = new IndexRequest("k8s").opType(DocWriteRequest.OpType.CREATE); indexRequest.source(TSDB_DOC.replace("$time", formatInstant(time)), XContentType.JSON); - var indexResponse = client().index(indexRequest).actionGet(); + var indexResponse = safeGet(client().index(indexRequest)); backingIndexName = indexResponse.getIndex(); } - var sourceSettings = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(backingIndexName)) - .actionGet() + var sourceSettings = safeGet(indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(backingIndexName))) .getSettings() .get(backingIndexName); Instant startTime = IndexSettings.TIME_SERIES_START_TIME.get(sourceSettings); @@ -410,17 +564,15 @@ public void testTsdbStartEndSet() throws Exception { // force a rollover so can call reindex and delete var rolloverRequest = new RolloverRequest("k8s", null); - var rolloverResponse = indicesAdmin().rolloverIndex(rolloverRequest).actionGet(); + var rolloverResponse = safeGet(indicesAdmin().rolloverIndex(rolloverRequest)); rolloverResponse.getNewIndex(); // call reindex on the original backing index - var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(backingIndexName)) - .actionGet() - .getDestIndex(); + var destIndex = safeGet( + client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(backingIndexName)) + ).getDestIndex(); - var destSettings = indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(destIndex)) - .actionGet() - .getSettings() + var destSettings = safeGet(indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(destIndex))).getSettings() .get(destIndex); var destStart = IndexSettings.TIME_SERIES_START_TIME.get(destSettings); var destEnd = IndexSettings.TIME_SERIES_END_TIME.get(destSettings); @@ -438,7 +590,7 @@ private static void cleanupMetadataBlocks(String index) { .putNull(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE) .putNull(IndexMetadata.SETTING_BLOCKS_METADATA) .build(); - assertAcked(indicesAdmin().updateSettings(new UpdateSettingsRequest(settings, index)).actionGet()); + safeGet(indicesAdmin().updateSettings(new UpdateSettingsRequest(settings, index))); } private static void indexDocs(String index, int numDocs) { @@ -451,7 +603,7 @@ private static void indexDocs(String index, int numDocs) { .source(String.format(Locale.ROOT, "{\"%s\":\"%s\"}", DEFAULT_TIMESTAMP_FIELD, value), XContentType.JSON) ); } - BulkResponse bulkResponse = client().bulk(bulkRequest).actionGet(); + BulkResponse bulkResponse = safeGet(client().bulk(bulkRequest)); assertThat(bulkResponse.getItems().length, equalTo(numDocs)); } @@ -459,12 +611,17 @@ private static String formatInstant(Instant instant) { return DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(instant); } - private static String getIndexUUID(String index) { - return indicesAdmin().getIndex(new GetIndexRequest(TEST_REQUEST_TIMEOUT).indices(index)) - .actionGet() - .getSettings() - .get(index) - .get(IndexMetadata.SETTING_INDEX_UUID); + void addDoc(String index, String doc) { + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(index).opType(DocWriteRequest.OpType.CREATE).source(doc, XContentType.JSON)); + safeGet(client().bulk(bulkRequest)); } + private void ensureHealth(String index) { + if (cluster().numDataNodes() > 1) { + ensureGreen(index); + } else { + ensureYellow(index); + } + } } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java index f5f8beba26d8f..7811e84ac9f53 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java @@ -55,6 +55,7 @@ import org.elasticsearch.xpack.migrate.task.ReindexDataStreamTaskParams; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.function.Predicate; import java.util.function.Supplier; @@ -64,6 +65,18 @@ import static org.elasticsearch.xpack.migrate.task.ReindexDataStreamPersistentTaskExecutor.MAX_CONCURRENT_INDICES_REINDEXED_PER_DATA_STREAM_SETTING; public class MigratePlugin extends Plugin implements ActionPlugin, PersistentTaskPlugin { + @Override + public Collection createComponents(PluginServices services) { + var registry = new MigrateTemplateRegistry( + services.environment().settings(), + services.clusterService(), + services.threadPool(), + services.client(), + services.xContentRegistry() + ); + registry.initialize(); + return List.of(registry); + } @Override public List getRestHandlers( diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigrateTemplateRegistry.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigrateTemplateRegistry.java new file mode 100644 index 0000000000000..2a9dc97e16352 --- /dev/null +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigrateTemplateRegistry.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.migrate; + +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.template.IndexTemplateRegistry; +import org.elasticsearch.xpack.core.template.IngestPipelineConfig; +import org.elasticsearch.xpack.core.template.JsonIngestPipelineConfig; + +import java.util.List; + +public class MigrateTemplateRegistry extends IndexTemplateRegistry { + + // This number must be incremented when we make changes to built-in pipeline. + // If a specific user pipeline is needed instead, its version should be set to a value higher than the REGISTRY_VERSION. + static final int REGISTRY_VERSION = 1; + public static final String REINDEX_DATA_STREAM_PIPELINE_NAME = "reindex-data-stream-pipeline"; + private static final String TEMPLATE_VERSION_VARIABLE = "xpack.migrate.reindex.pipeline.version"; + + public MigrateTemplateRegistry( + Settings nodeSettings, + ClusterService clusterService, + ThreadPool threadPool, + Client client, + NamedXContentRegistry xContentRegistry + ) { + super(nodeSettings, clusterService, threadPool, client, xContentRegistry); + } + + @Override + protected List getIngestPipelines() { + return List.of( + new JsonIngestPipelineConfig( + REINDEX_DATA_STREAM_PIPELINE_NAME, + "/" + REINDEX_DATA_STREAM_PIPELINE_NAME + ".json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ) + ); + } + + @Override + protected String getOrigin() { + return ClientHelper.STACK_ORIGIN; + } +} diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceAction.java index 14e5e8cccd910..5ab009decd381 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceAction.java @@ -15,6 +15,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -191,5 +194,15 @@ public String[] indices() { public IndicesOptions indicesOptions() { return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, getDescription(), parentTaskId, headers); + } + + @Override + public String getDescription() { + return "creating index " + destIndex + " from " + sourceIndex; + } } } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceTransportAction.java index 81edb0e716f51..a12316129a4b5 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceTransportAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceTransportAction.java @@ -46,13 +46,13 @@ public class CreateIndexFromSourceTransportAction extends HandledTransportAction private final ClusterService clusterService; private final Client client; private final IndexScopedSettings indexScopedSettings; - private static final Settings REMOVE_INDEX_BLOCKS_SETTING_OVERRIDE = Settings.builder() - .putNull(IndexMetadata.SETTING_READ_ONLY) - .putNull(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE) - .putNull(IndexMetadata.SETTING_BLOCKS_WRITE) - .putNull(IndexMetadata.SETTING_BLOCKS_METADATA) - .putNull(IndexMetadata.SETTING_BLOCKS_READ) - .build(); + private static final Set INDEX_BLOCK_SETTINGS = Set.of( + IndexMetadata.SETTING_READ_ONLY, + IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE, + IndexMetadata.SETTING_BLOCKS_WRITE, + IndexMetadata.SETTING_BLOCKS_METADATA, + IndexMetadata.SETTING_BLOCKS_READ + ); @Inject public CreateIndexFromSourceTransportAction( @@ -89,12 +89,15 @@ protected void doExecute(Task task, CreateIndexFromSourceAction.Request request, Settings.Builder settings = Settings.builder() // first settings from source index - .put(filterSettings(sourceIndex)) - // then override with request settings - .put(request.settingsOverride()); + .put(filterSettings(sourceIndex)); + + if (request.settingsOverride().isEmpty() == false) { + applyOverrides(settings, request.settingsOverride()); + } + if (request.removeIndexBlocks()) { // lastly, override with settings to remove index blocks if requested - settings.put(REMOVE_INDEX_BLOCKS_SETTING_OVERRIDE); + INDEX_BLOCK_SETTINGS.forEach(settings::remove); } Map mergeMappings; @@ -114,6 +117,16 @@ protected void doExecute(Task task, CreateIndexFromSourceAction.Request request, client.admin().indices().create(createIndexRequest, listener.map(response -> response)); } + private void applyOverrides(Settings.Builder settings, Settings overrides) { + overrides.keySet().forEach(key -> { + if (overrides.get(key) != null) { + settings.put(key, overrides.get(key)); + } else { + settings.remove(key); + } + }); + } + private static Map toMap(@Nullable MappingMetadata sourceMapping) { return Optional.ofNullable(sourceMapping) .map(MappingMetadata::source) @@ -122,10 +135,14 @@ private static Map toMap(@Nullable MappingMetadata sourceMapping .orElse(Map.of()); } + @SuppressWarnings("unchecked") private static Map mergeMappings(@Nullable MappingMetadata sourceMapping, Map mappingAddition) throws IOException { Map combinedMappingMap = new HashMap<>(toMap(sourceMapping)); XContentHelper.update(combinedMappingMap, mappingAddition, true); + if (sourceMapping != null && combinedMappingMap.size() == 1 && combinedMappingMap.containsKey(sourceMapping.type())) { + combinedMappingMap = (Map) combinedMappingMap.get(sourceMapping.type()); + } return combinedMappingMap; } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamAction.java index faf8982b79bf0..5ebd2040fbcb1 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamAction.java @@ -16,6 +16,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -24,6 +27,7 @@ import java.io.IOException; import java.util.Locale; +import java.util.Map; import java.util.Objects; import java.util.function.Predicate; @@ -144,5 +148,15 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, getDescription(), parentTaskId, headers); + } + + @Override + public String getDescription() { + return "reindexing data stream " + sourceDataStream; + } } } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexAction.java index 2e3fd1b76ed32..dec3cf2901fcc 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexAction.java @@ -14,8 +14,12 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import java.io.IOException; +import java.util.Map; import java.util.Objects; public class ReindexDataStreamIndexAction extends ActionType { @@ -78,6 +82,16 @@ public String[] indices() { public IndicesOptions indicesOptions() { return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, getDescription(), parentTaskId, headers); + } + + @Override + public String getDescription() { + return "reindexing data stream index " + sourceIndex; + } } public static class Response extends ActionResponse { diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java index d3fe27006e82e..792ec4ec2b6f9 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java @@ -10,14 +10,22 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; +import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; +import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.open.OpenIndexAction; +import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; +import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequest; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockResponse; import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; +import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -37,6 +45,7 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.ReindexAction; import org.elasticsearch.index.reindex.ReindexRequest; +import org.elasticsearch.index.reindex.ScrollableHitSource; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.Task; @@ -44,9 +53,11 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.deprecation.DeprecatedIndexPredicate; +import org.elasticsearch.xpack.migrate.MigrateTemplateRegistry; import java.util.Locale; import java.util.Map; +import java.util.Objects; import static org.elasticsearch.cluster.metadata.IndexMetadata.APIBlock.WRITE; @@ -117,6 +128,11 @@ protected void doExecute( var destIndexName = generateDestIndexName(sourceIndexName); TaskId taskId = new TaskId(clusterService.localNode().getId(), task.getId()); IndexMetadata sourceIndex = clusterService.state().getMetadata().index(sourceIndexName); + if (sourceIndex == null) { + listener.onFailure(new ResourceNotFoundException("source index [{}] does not exist", sourceIndexName)); + return; + } + Settings settingsBefore = sourceIndex.getSettings(); var hasOldVersion = DeprecatedIndexPredicate.getReindexRequiredPredicate(clusterService.state().metadata(), false); @@ -139,18 +155,51 @@ protected void doExecute( listener.onFailure(new ElasticsearchException(errorMessage)); return; } - + final boolean wasClosed = isClosed(sourceIndex); SubscribableListener.newForked(l -> setBlockWrites(sourceIndexName, l, taskId)) + .andThen(l -> openIndexIfClosed(sourceIndexName, wasClosed, l, taskId)) .andThen(l -> refresh(sourceIndexName, l, taskId)) .andThen(l -> deleteDestIfExists(destIndexName, l, taskId)) .andThen(l -> createIndex(sourceIndex, destIndexName, l, taskId)) .andThen(l -> reindex(sourceIndexName, destIndexName, l, taskId)) .andThen(l -> copyOldSourceSettingsToDest(settingsBefore, destIndexName, l, taskId)) .andThen(l -> sanityCheck(sourceIndexName, destIndexName, l, taskId)) + .andThen(l -> closeIndexIfWasClosed(destIndexName, wasClosed, l, taskId)) .andThenApply(ignored -> new ReindexDataStreamIndexAction.Response(destIndexName)) .addListener(listener); } + private void openIndexIfClosed(String indexName, boolean isClosed, ActionListener listener, TaskId parentTaskId) { + if (isClosed) { + logger.debug("Opening index [{}]", indexName); + var request = new OpenIndexRequest(indexName); + request.setParentTask(parentTaskId); + client.execute(OpenIndexAction.INSTANCE, request, listener); + } else { + listener.onResponse(null); + } + } + + private void closeIndexIfWasClosed( + String indexName, + boolean wasClosed, + ActionListener listener, + TaskId parentTaskId + ) { + if (wasClosed) { + logger.debug("Closing index [{}]", indexName); + var request = new CloseIndexRequest(indexName); + request.setParentTask(parentTaskId); + client.execute(TransportCloseIndexAction.TYPE, request, listener); + } else { + listener.onResponse(null); + } + } + + private static boolean isClosed(IndexMetadata indexMetadata) { + return indexMetadata.getState().equals(IndexMetadata.State.CLOSE); + } + private void setBlockWrites(String sourceIndexName, ActionListener listener, TaskId parentTaskId) { logger.debug("Setting write block on source index [{}]", sourceIndexName); addBlockToIndex(WRITE, sourceIndexName, new ActionListener<>() { @@ -223,13 +272,41 @@ void reindex(String sourceIndexName, String destIndexName, ActionListener checkForFailuresListener = ActionListener.wrap(bulkByScrollResponse -> { + if (bulkByScrollResponse.getSearchFailures().isEmpty() == false) { + ScrollableHitSource.SearchFailure firstSearchFailure = bulkByScrollResponse.getSearchFailures().get(0); + listener.onFailure( + new ElasticsearchException( + "Failure reading data from {} caused by {}", + firstSearchFailure.getReason(), + sourceIndexName, + firstSearchFailure.getReason().getMessage() + ) + ); + } else if (bulkByScrollResponse.getBulkFailures().isEmpty() == false) { + BulkItemResponse.Failure firstBulkFailure = bulkByScrollResponse.getBulkFailures().get(0); + listener.onFailure( + new ElasticsearchException( + "Failure loading data from {} into {} caused by {}", + firstBulkFailure.getCause(), + sourceIndexName, + destIndexName, + firstBulkFailure.getCause().getMessage() + ) + ); + } else { + listener.onResponse(bulkByScrollResponse); + } + }, listener::onFailure); + client.execute(ReindexAction.INSTANCE, reindexRequest, checkForFailuresListener); } private void updateSettings( @@ -296,6 +373,7 @@ private void addBlockToIndex( TaskId parentTaskId ) { AddIndexBlockRequest addIndexBlockRequest = new AddIndexBlockRequest(block, index); + addIndexBlockRequest.markVerified(false); addIndexBlockRequest.setParentTask(parentTaskId); client.admin().indices().execute(TransportAddIndexBlockAction.TYPE, addIndexBlockRequest, listener); } @@ -321,26 +399,24 @@ private void sanityCheck( ) { if (Assertions.ENABLED) { logger.debug("Comparing source [{}] and dest [{}] doc counts", sourceIndexName, destIndexName); - client.execute( - RefreshAction.INSTANCE, - new RefreshRequest(destIndexName), - listener.delegateFailureAndWrap((delegate, ignored) -> { - getIndexDocCount(sourceIndexName, parentTaskId, delegate.delegateFailureAndWrap((delegate1, sourceCount) -> { - getIndexDocCount(destIndexName, parentTaskId, delegate1.delegateFailureAndWrap((delegate2, destCount) -> { - assert sourceCount == destCount - : String.format( - Locale.ROOT, - "source index [%s] has %d docs and dest [%s] has %d docs", - sourceIndexName, - sourceCount, - destIndexName, - destCount - ); - delegate2.onResponse(null); - })); + RefreshRequest refreshRequest = new RefreshRequest(destIndexName); + refreshRequest.setParentTask(parentTaskId); + client.execute(RefreshAction.INSTANCE, refreshRequest, listener.delegateFailureAndWrap((delegate, ignored) -> { + getIndexDocCount(sourceIndexName, parentTaskId, delegate.delegateFailureAndWrap((delegate1, sourceCount) -> { + getIndexDocCount(destIndexName, parentTaskId, delegate1.delegateFailureAndWrap((delegate2, destCount) -> { + assert Objects.equals(sourceCount, destCount) + : String.format( + Locale.ROOT, + "source index [%s] has %d docs and dest [%s] has %d docs", + sourceIndexName, + sourceCount, + destIndexName, + destCount + ); + delegate2.onResponse(null); })); - }) - ); + })); + })); } else { listener.onResponse(null); } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java index 1f6a871388350..38ab0275f62c1 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java @@ -226,6 +226,7 @@ private void maybeProcessNextIndex( }, e -> { reindexDataStreamTask.reindexFailed(index.getName(), e); listener.onResponse(null); + maybeProcessNextIndex(indicesRemaining, reindexDataStreamTask, sourceDataStream, listener, parentTaskId); })); } diff --git a/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportActionTests.java b/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportActionTests.java index 99e1031dec3a2..55e4da30cdf11 100644 --- a/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportActionTests.java +++ b/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportActionTests.java @@ -30,6 +30,7 @@ import java.util.Collections; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.when; @@ -111,7 +112,7 @@ public void testReindexIncludesRateLimit() { ) ); - doNothing().when(client).execute(eq(ReindexAction.INSTANCE), request.capture(), eq(listener)); + doNothing().when(client).execute(eq(ReindexAction.INSTANCE), request.capture(), any()); action.reindex(sourceIndex, destIndex, listener, taskId); @@ -136,7 +137,7 @@ public void testReindexIncludesInfiniteRateLimit() { Collections.singleton(ReindexDataStreamIndexTransportAction.REINDEX_MAX_REQUESTS_PER_SECOND_SETTING) ) ); - doNothing().when(client).execute(eq(ReindexAction.INSTANCE), request.capture(), eq(listener)); + doNothing().when(client).execute(eq(ReindexAction.INSTANCE), request.capture(), any()); action.reindex(sourceIndex, destIndex, listener, taskId); diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java index a63d911e9d40d..e33fe677179d8 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java @@ -109,7 +109,7 @@ public List getBootstrapChecks() { @Override public BootstrapCheckResult check(BootstrapContext context) { try { - validateModelRepository(MODEL_REPOSITORY.get(context.settings()), context.environment().configFile()); + validateModelRepository(MODEL_REPOSITORY.get(context.settings()), context.environment().configDir()); } catch (Exception e) { return BootstrapCheckResult.failure( "Found an invalid configuration for xpack.ml.model_repository. " diff --git a/x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java b/x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java index 76b6dd991a41b..04dcb76a31882 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java +++ b/x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java @@ -41,6 +41,15 @@ public class MlBasicMultiNodeIT extends ESRestTestCase { ) .build(); + private static final RequestOptions FLUSH_OPTIONS = RequestOptions.DEFAULT.toBuilder() + .setWarningsHandler( + warnings -> Collections.singletonList( + "Forcing any buffered data to be processed is deprecated, " + + "in a future major version it will be compulsory to use a datafeed" + ).equals(warnings) == false + ) + .build(); + public void testMachineLearningInstalled() throws Exception { Response response = client().performRequest(new Request("GET", "/_xpack")); Map features = (Map) entityAsMap(response).get("features"); @@ -93,7 +102,9 @@ public void testMiniFarequote() throws Exception { assertEquals(1403481600000L, responseBody.get("earliest_record_timestamp")); assertEquals(1403481700000L, responseBody.get("latest_record_timestamp")); - Response flushResponse = client().performRequest(new Request("POST", BASE_PATH + "anomaly_detectors/" + jobId + "/_flush")); + Request flustRequest = new Request("POST", BASE_PATH + "anomaly_detectors/" + jobId + "/_flush"); + flustRequest.setOptions(FLUSH_OPTIONS); + Response flushResponse = client().performRequest(flustRequest); assertFlushResponse(flushResponse, true, 1403481600000L); Request closeRequest = new Request("POST", BASE_PATH + "anomaly_detectors/" + jobId + "/_close"); @@ -191,7 +202,9 @@ public void testMiniFarequoteReopen() throws Exception { assertEquals(1403481600000L, responseBody.get("earliest_record_timestamp")); assertEquals(1403482000000L, responseBody.get("latest_record_timestamp")); - Response flushResponse = client().performRequest(new Request("POST", BASE_PATH + "anomaly_detectors/" + jobId + "/_flush")); + Request flushRequest = new Request("POST", BASE_PATH + "anomaly_detectors/" + jobId + "/_flush"); + flushRequest.setOptions(FLUSH_OPTIONS); + Response flushResponse = client().performRequest(flushRequest); assertFlushResponse(flushResponse, true, 1403481600000L); Request closeRequest = new Request("POST", BASE_PATH + "anomaly_detectors/" + jobId + "/_close"); diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index 0869ae394d3de..d18f6da13cad2 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -258,4 +258,5 @@ testClusters.configureEach { user username: "no_ml", password: "x-pack-test-password", role: "minimal" setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' + systemProperty 'es.queryable_built_in_roles_enabled', 'false' } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java index 804033ef531b9..52958f40aa268 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java @@ -1297,19 +1297,28 @@ public void testLookbackOnlyGivenAggregationsWithHistogramAndRollupIndex() throw } ] }"""); + createRollupRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); client().performRequest(createRollupRequest); - client().performRequest(new Request("POST", "/_rollup/job/" + rollupJobId + "/_start")); + var startRequest = new Request("POST", "/_rollup/job/" + rollupJobId + "/_start"); + startRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); + client().performRequest(startRequest); assertBusy(() -> { - Response getRollup = client().performRequest(new Request("GET", "/_rollup/job/" + rollupJobId)); + var getRequest = new Request("GET", "/_rollup/job/" + rollupJobId); + getRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); + Response getRollup = client().performRequest(getRequest); String body = EntityUtils.toString(getRollup.getEntity()); assertThat(body, containsString("\"job_state\":\"started\"")); assertThat(body, containsString("\"rollups_indexed\":4")); }, 60, TimeUnit.SECONDS); - client().performRequest(new Request("POST", "/_rollup/job/" + rollupJobId + "/_stop")); + var stopRequest = new Request("POST", "/_rollup/job/" + rollupJobId + "/_stop"); + stopRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); + client().performRequest(stopRequest); assertBusy(() -> { - Response getRollup = client().performRequest(new Request("GET", "/_rollup/job/" + rollupJobId)); + var getRequest = new Request("GET", "/_rollup/job/" + rollupJobId); + getRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); + Response getRollup = client().performRequest(getRequest); assertThat(EntityUtils.toString(getRollup.getEntity()), containsString("\"job_state\":\"stopped\"")); }, 60, TimeUnit.SECONDS); @@ -1826,6 +1835,7 @@ private Response createJobAndDataFeed(String jobId, String datafeedId) throws IO String rollupJobId = "rollup-" + jobId; Request createRollupRequest = new Request("PUT", "/_rollup/job/" + rollupJobId); + createRollupRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); createRollupRequest.setJsonEntity(""" { "index_pattern": "airline-data-aggs", diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java index d981a60adbdb5..cebcb6631c9bf 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java @@ -58,6 +58,14 @@ public class MlJobIT extends ESRestTestCase { ).equals(warnings) == false ) .build(); + private static final RequestOptions FLUSH_OPTIONS = RequestOptions.DEFAULT.toBuilder() + .setWarningsHandler( + warnings -> Collections.singletonList( + "Forcing any buffered data to be processed is deprecated, " + + "in a future major version it will be compulsory to use a datafeed" + ).equals(warnings) == false + ) + .build(); @Override protected Settings restClientSettings() { @@ -534,9 +542,9 @@ public void testOutOfOrderData() throws Exception { postDataRequest.setJsonEntity("{ \"airline\":\"LOT\", \"responsetime\":100, \"time\":\"2019-07-01 00:10:00Z\" }"); client().performRequest(postDataRequest); - Response flushResponse = client().performRequest( - new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_flush") - ); + Request flushRequest = new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_flush"); + flushRequest.setOptions(FLUSH_OPTIONS); + Response flushResponse = client().performRequest(flushRequest); assertThat(entityAsMap(flushResponse), hasEntry("flushed", true)); closeJob(jobId); @@ -574,9 +582,9 @@ public void testDeleteJob_TimingStatsDocumentIsDeleted() throws Exception { { "airline":"LOT", "response_time":100, "time":"2019-07-01 02:00:00Z" }"""); client().performRequest(postDataRequest); - Response flushResponse = client().performRequest( - new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_flush") - ); + Request flushRequest = new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_flush"); + flushRequest.setOptions(FLUSH_OPTIONS); + Response flushResponse = client().performRequest(flushRequest); assertThat(entityAsMap(flushResponse), hasEntry("flushed", true)); closeJob(jobId); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java index c90c461fe8b1a..231d165c599c0 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java @@ -124,7 +124,7 @@ protected StopDataFrameAnalyticsAction.Response forceStopAnalytics(String id) { } protected void waitUntilAnalyticsIsStopped(String id) throws Exception { - waitUntilAnalyticsIsStopped(id, TimeValue.timeValueSeconds(60)); + waitUntilAnalyticsIsStopped(id, TimeValue.timeValueSeconds(90)); } protected void waitUntilAnalyticsIsStopped(String id, TimeValue waitTime) throws Exception { diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java index b163036e94760..f7cfdc7502e5e 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.action.SetResetModeActionRequest; @@ -363,7 +364,12 @@ private void createReindexedIndex(String reindexedIndexName) { } private void createNotification(boolean includeNodeInfo) { - AnomalyDetectionAuditor auditor = new AnomalyDetectionAuditor(client(), getInstanceFromNode(ClusterService.class), includeNodeInfo); + AnomalyDetectionAuditor auditor = new AnomalyDetectionAuditor( + client(), + getInstanceFromNode(ClusterService.class), + TestIndexNameExpressionResolver.newInstance(), + includeNodeInfo + ); auditor.info("whatever", "blah"); } } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java index 68e17ae15c549..5d06cfe0cd951 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java @@ -148,7 +148,12 @@ public void createComponents() throws Exception { .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueSeconds(1)); // We can't change the signature of createComponents to e.g. pass differing values of includeNodeInfo to pass to the // AnomalyDetectionAuditor constructor. Instead we generate a random boolean value for that purpose. - AnomalyDetectionAuditor auditor = new AnomalyDetectionAuditor(client(), getInstanceFromNode(ClusterService.class), randomBoolean()); + AnomalyDetectionAuditor auditor = new AnomalyDetectionAuditor( + client(), + getInstanceFromNode(ClusterService.class), + TestIndexNameExpressionResolver.newInstance(), + randomBoolean() + ); jobResultsProvider = new JobResultsProvider(client(), builder.build(), TestIndexNameExpressionResolver.newInstance()); renormalizer = mock(Renormalizer.class); process = mock(AutodetectProcess.class); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ChunkedTrainedModelPersisterIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ChunkedTrainedModelPersisterIT.java index 8c9c527382106..35bc424f67aff 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ChunkedTrainedModelPersisterIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ChunkedTrainedModelPersisterIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.Tuple; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.license.License; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.ml.MlConfigVersion; @@ -95,7 +96,12 @@ public void testStoreModelViaChunkedPersisterWithNodeInfo() throws IOException { ChunkedTrainedModelPersister persister = new ChunkedTrainedModelPersister( trainedModelProvider, analyticsConfig, - new DataFrameAnalyticsAuditor(client(), getInstanceFromNode(ClusterService.class), true), + new DataFrameAnalyticsAuditor( + client(), + getInstanceFromNode(ClusterService.class), + TestIndexNameExpressionResolver.newInstance(), + true + ), (ex) -> { throw new ElasticsearchException(ex); }, @@ -167,7 +173,12 @@ public void testStoreModelViaChunkedPersisterWithoutNodeInfo() throws IOExceptio ChunkedTrainedModelPersister persister = new ChunkedTrainedModelPersister( trainedModelProvider, analyticsConfig, - new DataFrameAnalyticsAuditor(client(), getInstanceFromNode(ClusterService.class), false), + new DataFrameAnalyticsAuditor( + client(), + getInstanceFromNode(ClusterService.class), + TestIndexNameExpressionResolver.newInstance(), + false + ), (ex) -> { throw new ElasticsearchException(ex); }, diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java index a8e97263647ea..f194d77d3836d 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.action.DeleteDataFrameAnalyticsAction; @@ -41,7 +42,12 @@ public void createComponents() throws Exception { configProvider = new DataFrameAnalyticsConfigProvider( client(), xContentRegistry(), - new DataFrameAnalyticsAuditor(client(), getInstanceFromNode(ClusterService.class), randomBoolean()), + new DataFrameAnalyticsAuditor( + client(), + getInstanceFromNode(ClusterService.class), + TestIndexNameExpressionResolver.newInstance(), + randomBoolean() + ), getInstanceFromNode(ClusterService.class) ); waitForMlTemplates(); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsConfigProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsConfigProviderIT.java index e29cd4545846c..ff92e06385252 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsConfigProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsConfigProviderIT.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ml.MlConfigVersion; @@ -58,7 +59,12 @@ public void createComponents() throws Exception { xContentRegistry(), // We can't change the signature of createComponents to e.g. pass differing values of includeNodeInfo to pass to the // DataFrameAnalyticsAuditor constructor. Instead we generate a random boolean value for that purpose. - new DataFrameAnalyticsAuditor(client(), getInstanceFromNode(ClusterService.class), randomBoolean()), + new DataFrameAnalyticsAuditor( + client(), + getInstanceFromNode(ClusterService.class), + TestIndexNameExpressionResolver.newInstance(), + randomBoolean() + ), getInstanceFromNode(ClusterService.class) ); dummyAuthenticationHeader = Authentication.newRealmAuthentication( diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java index 7ce0fbe760644..a650556b0501e 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java @@ -140,7 +140,7 @@ public void createComponents() throws Exception { // We can't change the signature of createComponents to e.g. pass differing values of includeNodeInfo to pass to the // AnomalyDetectionAuditor constructor. Instead we generate a random boolean value for that purpose. boolean includeNodeInfo = randomBoolean(); - auditor = new AnomalyDetectionAuditor(client(), clusterService, includeNodeInfo); + auditor = new AnomalyDetectionAuditor(client(), clusterService, TestIndexNameExpressionResolver.newInstance(), includeNodeInfo); waitForMlTemplates(); } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/NotificationsIndexIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/NotificationsIndexIT.java new file mode 100644 index 0000000000000..9887e69dafde1 --- /dev/null +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/NotificationsIndexIT.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.integration; + +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.metadata.AliasMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; +import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; +import org.elasticsearch.xpack.ml.notifications.AnomalyDetectionAuditor; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.is; + +public class NotificationsIndexIT extends MlSingleNodeTestCase { + + @Override + protected Settings nodeSettings() { + Settings.Builder newSettings = Settings.builder(); + newSettings.put(super.nodeSettings()); + newSettings.put(XPackSettings.SECURITY_ENABLED.getKey(), false); + newSettings.put(XPackSettings.WATCHER_ENABLED.getKey(), false); + return newSettings.build(); + } + + public void testAliasCreated() throws Exception { + // Auditing a notification should create the .ml-notifications-000002 index + // and write alias + createNotification(true); + + assertBusy(() -> { + assertNotificationsIndexExists(); + assertNotificationsWriteAliasCreated(); + }); + } + + private void assertNotificationsIndexExists() { + GetIndexResponse getIndexResponse = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) + .setIndices(NotificationsIndex.NOTIFICATIONS_INDEX) + .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) + .get(); + assertThat(Arrays.asList(getIndexResponse.getIndices()), contains(NotificationsIndex.NOTIFICATIONS_INDEX)); + } + + private void assertNotificationsWriteAliasCreated() { + Map> aliases = indicesAdmin().prepareGetAliases( + TimeValue.timeValueSeconds(10L), + NotificationsIndex.NOTIFICATIONS_INDEX_WRITE_ALIAS + ).setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN).get().getAliases(); + assertThat(aliases.size(), is(1)); + List indexAliases = aliases.get(NotificationsIndex.NOTIFICATIONS_INDEX); + assertNotNull(aliases.toString(), indexAliases); + assertThat(indexAliases.size(), is(1)); + var writeAlias = indexAliases.get(0); + assertThat(writeAlias.alias(), is(NotificationsIndex.NOTIFICATIONS_INDEX_WRITE_ALIAS)); + assertThat("notification write alias should be hidden but is not: " + aliases, writeAlias.isHidden(), is(true)); + } + + private void createNotification(boolean includeNodeInfo) { + AnomalyDetectionAuditor auditor = new AnomalyDetectionAuditor( + client(), + getInstanceFromNode(ClusterService.class), + TestIndexNameExpressionResolver.newInstance(), + includeNodeInfo + ); + auditor.info("whatever", "blah"); + } +} diff --git a/x-pack/plugin/ml/src/main/java/module-info.java b/x-pack/plugin/ml/src/main/java/module-info.java index 4984fa8912e28..1013b8e052e4c 100644 --- a/x-pack/plugin/ml/src/main/java/module-info.java +++ b/x-pack/plugin/ml/src/main/java/module-info.java @@ -37,8 +37,11 @@ exports org.elasticsearch.xpack.ml; exports org.elasticsearch.xpack.ml.action; + exports org.elasticsearch.xpack.ml.aggs; exports org.elasticsearch.xpack.ml.aggs.categorization; + exports org.elasticsearch.xpack.ml.aggs.changepoint; exports org.elasticsearch.xpack.ml.autoscaling; exports org.elasticsearch.xpack.ml.job.categorization; exports org.elasticsearch.xpack.ml.notifications; + } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 043a27b7cd147..4220c52b374f2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -568,7 +568,7 @@ public Map getProcessors(Processor.Parameters paramet parameters.client, parameters.ingestService.getClusterService(), this.settings, - machineLearningExtension.get().includeNodeInfo() + inferenceAuditor ); parameters.ingestService.addIngestClusterStateListener(inferenceFactory); return Map.of(InferenceProcessor.TYPE, inferenceFactory); @@ -765,6 +765,8 @@ public void loadExtensions(ExtensionLoader loader) { private final SetOnce datafeedRunner = new SetOnce<>(); private final SetOnce dataFrameAnalyticsManager = new SetOnce<>(); private final SetOnce dataFrameAnalyticsAuditor = new SetOnce<>(); + private final SetOnce anomalyDetectionAuditor = new SetOnce<>(); + private final SetOnce inferenceAuditor = new SetOnce<>(); private final SetOnce memoryTracker = new SetOnce<>(); private final SetOnce mlUpgradeModeActionFilter = new SetOnce<>(); private final SetOnce mlLifeCycleService = new SetOnce<>(); @@ -944,15 +946,24 @@ public Collection createComponents(PluginServices services) { AnomalyDetectionAuditor anomalyDetectionAuditor = new AnomalyDetectionAuditor( client, clusterService, + indexNameExpressionResolver, machineLearningExtension.get().includeNodeInfo() ); + this.anomalyDetectionAuditor.set(anomalyDetectionAuditor); DataFrameAnalyticsAuditor dataFrameAnalyticsAuditor = new DataFrameAnalyticsAuditor( client, clusterService, + indexNameExpressionResolver, + machineLearningExtension.get().includeNodeInfo() + ); + InferenceAuditor inferenceAuditor = new InferenceAuditor( + client, + clusterService, + indexNameExpressionResolver, machineLearningExtension.get().includeNodeInfo() ); - InferenceAuditor inferenceAuditor = new InferenceAuditor(client, clusterService, machineLearningExtension.get().includeNodeInfo()); - SystemAuditor systemAuditor = new SystemAuditor(client, clusterService); + this.inferenceAuditor.set(inferenceAuditor); + SystemAuditor systemAuditor = new SystemAuditor(client, clusterService, indexNameExpressionResolver); this.dataFrameAnalyticsAuditor.set(dataFrameAnalyticsAuditor); OriginSettingClient originSettingClient = new OriginSettingClient(client, ML_ORIGIN); @@ -1233,14 +1244,11 @@ public Collection createComponents(PluginServices services) { ), new MlIndexRollover.IndexPatternAndAlias(MlStatsIndex.indexPattern(), MlStatsIndex.writeAlias()), new MlIndexRollover.IndexPatternAndAlias(AnnotationIndex.INDEX_PATTERN, AnnotationIndex.WRITE_ALIAS_NAME) - // TODO notifications = https://github.com/elastic/elasticsearch/pull/120064 - // TODO anomaly results - // TODO .ml-inference-XXXXXX - requires alias - // TODO .ml-inference-native-XXXXXX - requires alias (index added in 8.0) ), indexNameExpressionResolver, client - ) + ), + new MlAnomaliesIndexUpdate(indexNameExpressionResolver, client) ) ); clusterService.addListener(mlAutoUpdateService); @@ -1372,7 +1380,7 @@ public List> getPersistentTasksExecutor( client, expressionResolver, getLicenseState(), - machineLearningExtension.get().includeNodeInfo() + anomalyDetectionAuditor.get() ), new TransportStartDatafeedAction.StartDatafeedPersistentTasksExecutor(datafeedRunner.get(), expressionResolver, threadPool), new TransportStartDataFrameAnalyticsAction.TaskExecutor( @@ -1393,7 +1401,7 @@ public List> getPersistentTasksExecutor( expressionResolver, client, getLicenseState(), - machineLearningExtension.get().includeNodeInfo() + anomalyDetectionAuditor.get() ) ); } @@ -2104,35 +2112,33 @@ public void cleanUpFeature( final Map results = new ConcurrentHashMap<>(); - ActionListener unsetResetModeListener = ActionListener.wrap( - success -> client.execute( + ActionListener unsetResetModeListener = ActionListener.wrap(success -> { + // reset the auditors as aliases used may be removed + resetAuditors(); + + client.execute(SetResetModeAction.INSTANCE, SetResetModeActionRequest.disabled(true), ActionListener.wrap(resetSuccess -> { + finalListener.onResponse(success); + logger.info("Finished machine learning feature reset"); + }, resetFailure -> { + logger.error("failed to disable reset mode after state otherwise successful machine learning reset", resetFailure); + finalListener.onFailure( + ExceptionsHelper.serverError( + "failed to disable reset mode after state otherwise successful machine learning reset", + resetFailure + ) + ); + })); + }, failure -> { + logger.error("failed to reset machine learning", failure); + client.execute( SetResetModeAction.INSTANCE, - SetResetModeActionRequest.disabled(true), - ActionListener.wrap(resetSuccess -> { - finalListener.onResponse(success); - logger.info("Finished machine learning feature reset"); - }, resetFailure -> { - logger.error("failed to disable reset mode after state otherwise successful machine learning reset", resetFailure); - finalListener.onFailure( - ExceptionsHelper.serverError( - "failed to disable reset mode after state otherwise successful machine learning reset", - resetFailure - ) - ); + SetResetModeActionRequest.disabled(false), + ActionListener.wrap(resetSuccess -> finalListener.onFailure(failure), resetFailure -> { + logger.error("failed to disable reset mode after state clean up failure", resetFailure); + finalListener.onFailure(failure); }) - ), - failure -> { - logger.error("failed to reset machine learning", failure); - client.execute( - SetResetModeAction.INSTANCE, - SetResetModeActionRequest.disabled(false), - ActionListener.wrap(resetSuccess -> finalListener.onFailure(failure), resetFailure -> { - logger.error("failed to disable reset mode after state clean up failure", resetFailure); - finalListener.onFailure(failure); - }) - ); - } - ); + ); + }); // Stop all model deployments ActionListener pipelineValidation = unsetResetModeListener.delegateFailureAndWrap( @@ -2285,6 +2291,18 @@ public void cleanUpFeature( client.execute(SetResetModeAction.INSTANCE, SetResetModeActionRequest.enabled(), afterResetModeSet); } + private void resetAuditors() { + if (anomalyDetectionAuditor.get() != null) { + anomalyDetectionAuditor.get().reset(); + } + if (dataFrameAnalyticsAuditor.get() != null) { + dataFrameAnalyticsAuditor.get().reset(); + } + if (inferenceAuditor.get() != null) { + inferenceAuditor.get().reset(); + } + } + @Override public BreakerSettings getCircuitBreaker(Settings settingsToUse) { return BreakerSettings.updateFromSettings( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdate.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdate.java new file mode 100644 index 0000000000000..27bce6747b32f --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdate.java @@ -0,0 +1,235 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; +import org.elasticsearch.xpack.core.ml.utils.MlIndexAndAlias; +import org.elasticsearch.xpack.core.ml.utils.MlStrings; + +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; + +/** + * Rollover the various .ml-anomalies result indices + * updating the read and write aliases + */ +public class MlAnomaliesIndexUpdate implements MlAutoUpdateService.UpdateAction { + + private static final Logger logger = LogManager.getLogger(MlAnomaliesIndexUpdate.class); + + private final IndexNameExpressionResolver expressionResolver; + private final OriginSettingClient client; + + public MlAnomaliesIndexUpdate(IndexNameExpressionResolver expressionResolver, Client client) { + this.expressionResolver = expressionResolver; + this.client = new OriginSettingClient(client, ML_ORIGIN); + } + + @Override + public boolean isMinTransportVersionSupported(TransportVersion minTransportVersion) { + // Automatic rollover does not require any new features + // but wait for all nodes to be upgraded anyway + return minTransportVersion.onOrAfter(TransportVersions.ML_ROLLOVER_LEGACY_INDICES); + } + + @Override + public boolean isAbleToRun(ClusterState latestState) { + // Find the .ml-anomalies-shared and all custom results indices + String[] indices = expressionResolver.concreteIndexNames( + latestState, + IndicesOptions.lenientExpandOpenHidden(), + AnomalyDetectorsIndex.jobResultsIndexPattern() + ); + + for (String index : indices) { + IndexRoutingTable routingTable = latestState.getRoutingTable().index(index); + if (routingTable == null || routingTable.allPrimaryShardsActive() == false) { + return false; + } + } + return true; + } + + @Override + public String getName() { + return "ml_anomalies_index_update"; + } + + @Override + public void runUpdate(ClusterState latestState) { + List failures = new ArrayList<>(); + + // list all indices starting .ml-anomalies- + // this includes the shared index and all custom results indices + String[] indices = expressionResolver.concreteIndexNames( + latestState, + IndicesOptions.lenientExpandOpenHidden(), + AnomalyDetectorsIndex.jobResultsIndexPattern() + ); + + for (String index : indices) { + boolean isCompatibleIndexVersion = MlIndexAndAlias.indexIsReadWriteCompatibleInV9( + latestState.metadata().index(index).getCreationVersion() + ); + + if (isCompatibleIndexVersion) { + continue; + } + + PlainActionFuture updated = new PlainActionFuture<>(); + rollAndUpdateAliases(latestState, index, updated); + try { + updated.actionGet(); + } catch (Exception ex) { + var message = "failed rolling over legacy ml anomalies index [" + index + "]"; + logger.warn(message, ex); + if (ex instanceof ElasticsearchException elasticsearchException) { + failures.add(new ElasticsearchStatusException(message, elasticsearchException.status(), elasticsearchException)); + } else { + failures.add(new ElasticsearchStatusException(message, RestStatus.REQUEST_TIMEOUT, ex)); + } + + break; + } + } + + if (failures.isEmpty()) { + logger.info("legacy ml anomalies indices rolled over and aliases updated"); + return; + } + + var exception = new ElasticsearchStatusException("failed to roll over legacy ml anomalies", RestStatus.CONFLICT); + failures.forEach(exception::addSuppressed); + throw exception; + } + + private void rollAndUpdateAliases(ClusterState clusterState, String index, ActionListener listener) { + // Create an alias specifically for rolling over. + // The ml-anomalies index has aliases for each job anyone + // of which could be used but that means one alias is + // treated differently. + // Using a `.` in the alias name avoids any conflicts + // as AD job Ids cannot start with `.` + String rolloverAlias = index + ".rollover_alias"; + + // If the index does not end in a digit then rollover does not know + // what to name the new index so it must be specified in the request. + // Otherwise leave null and rollover will calculate the new name + String newIndexName = MlIndexAndAlias.has6DigitSuffix(index) ? null : index + MlIndexAndAlias.FIRST_INDEX_SIX_DIGIT_SUFFIX; + IndicesAliasesRequestBuilder aliasRequestBuilder = client.admin().indices().prepareAliases(); + + SubscribableListener.newForked( + l -> { createAliasForRollover(index, rolloverAlias, l.map(AcknowledgedResponse::isAcknowledged)); } + ).andThen((l, success) -> { + rollover(rolloverAlias, newIndexName, l); + }).andThen((l, newIndexNameResponse) -> { + addIndexAliasesRequests(aliasRequestBuilder, index, newIndexNameResponse, clusterState); + // Delete the new alias created for the rollover action + aliasRequestBuilder.removeAlias(newIndexNameResponse, rolloverAlias); + updateAliases(aliasRequestBuilder, l); + }).addListener(listener); + } + + private void rollover(String alias, @Nullable String newIndexName, ActionListener listener) { + client.admin().indices().rolloverIndex(new RolloverRequest(alias, newIndexName), listener.delegateFailure((l, response) -> { + l.onResponse(response.getNewIndex()); + })); + } + + private void createAliasForRollover(String indexName, String aliasName, ActionListener listener) { + logger.info("creating alias for rollover [{}]", aliasName); + client.admin() + .indices() + .prepareAliases() + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index(indexName).alias(aliasName).isHidden(true)) + .execute(listener); + } + + private void updateAliases(IndicesAliasesRequestBuilder request, ActionListener listener) { + request.execute(listener.delegateFailure((l, response) -> l.onResponse(Boolean.TRUE))); + } + + IndicesAliasesRequestBuilder addIndexAliasesRequests( + IndicesAliasesRequestBuilder aliasRequestBuilder, + String oldIndex, + String newIndex, + ClusterState clusterState + ) { + // Multiple jobs can share the same index each job + // has a read and write alias that needs updating + // after the rollover + var meta = clusterState.metadata().index(oldIndex); + assert meta != null; + if (meta == null) { + return aliasRequestBuilder; + } + + for (var alias : meta.getAliases().values()) { + if (isAnomaliesWriteAlias(alias.alias())) { + aliasRequestBuilder.addAliasAction( + IndicesAliasesRequest.AliasActions.add().index(newIndex).alias(alias.alias()).isHidden(true).writeIndex(true) + ); + aliasRequestBuilder.addAliasAction(IndicesAliasesRequest.AliasActions.remove().index(oldIndex).alias(alias.alias())); + } else if (isAnomaliesReadAlias(alias.alias())) { + String jobId = AnomalyDetectorsIndex.jobIdFromAlias(alias.alias()); + aliasRequestBuilder.addAliasAction( + IndicesAliasesRequest.AliasActions.add() + .index(newIndex) + .alias(alias.alias()) + .isHidden(true) + .filter(QueryBuilders.termQuery(Job.ID.getPreferredName(), jobId)) + ); + } + } + + return aliasRequestBuilder; + } + + static boolean isAnomaliesWriteAlias(String aliasName) { + return aliasName.startsWith(AnomalyDetectorsIndexFields.RESULTS_INDEX_WRITE_PREFIX); + } + + static boolean isAnomaliesReadAlias(String aliasName) { + if (aliasName.startsWith(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX) == false) { + return false; + } + + // See {@link AnomalyDetectorsIndex#jobResultsAliasedName} + String jobIdPart = aliasName.substring(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX.length()); + // If this is a write alias it will start with a `.` character + // which is not a valid job id. + return MlStrings.isValidId(jobIdPart); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrationEligibilityCheck.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrationEligibilityCheck.java index d1137069fea41..b2af9bb872705 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrationEligibilityCheck.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrationEligibilityCheck.java @@ -65,6 +65,6 @@ static boolean mlConfigIndexIsAllocated(ClusterState clusterState) { } IndexRoutingTable routingTable = clusterState.getRoutingTable().index(configIndexOrAlias.getWriteIndex()); - return routingTable != null && routingTable.allPrimaryShardsActive() && routingTable.readyForSearch(clusterState); + return routingTable != null && routingTable.allPrimaryShardsActive() && routingTable.readyForSearch(); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexRollover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexRollover.java index 7dbafdc2676ba..c079e5dfde737 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexRollover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexRollover.java @@ -117,7 +117,7 @@ public void runUpdate(ClusterState latestState) { } if (failures.isEmpty()) { - logger.info("ML legacy indies rolled over"); + logger.info("ML legacy indices rolled over"); return; } @@ -136,7 +136,10 @@ private void rolloverLegacyIndices(ClusterState clusterState, String indexPatter } String latestIndex = MlIndexAndAlias.latestIndex(concreteIndices); - boolean isCompatibleIndexVersion = isCompatibleIndexVersion(clusterState.metadata().index(latestIndex).getCreationVersion()); + // Indices created before 8.0 are read only in 9 + boolean isCompatibleIndexVersion = MlIndexAndAlias.indexIsReadWriteCompatibleInV9( + clusterState.metadata().index(latestIndex).getCreationVersion() + ); boolean hasAlias = clusterState.getMetadata().hasAlias(alias); if (isCompatibleIndexVersion && hasAlias) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexTemplateRegistry.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexTemplateRegistry.java index 91e738bf2183b..02fcc2b4465f3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexTemplateRegistry.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexTemplateRegistry.java @@ -43,7 +43,8 @@ public class MlIndexTemplateRegistry extends IndexTemplateRegistry { * 10000001: TODO - reason */ public static final int ML_INDEX_TEMPLATE_VERSION = 10000000 + AnomalyDetectorsIndex.RESULTS_INDEX_MAPPINGS_VERSION - + NotificationsIndex.NOTIFICATIONS_INDEX_MAPPINGS_VERSION + MlStatsIndex.STATS_INDEX_MAPPINGS_VERSION; + + NotificationsIndex.NOTIFICATIONS_INDEX_MAPPINGS_VERSION + MlStatsIndex.STATS_INDEX_MAPPINGS_VERSION + + NotificationsIndex.NOTIFICATIONS_INDEX_TEMPLATE_VERSION; private static final String ROOT_RESOURCE_PATH = "/ml/"; private static final String ANOMALY_DETECTION_PATH = ROOT_RESOURCE_PATH + "anomalydetection/"; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java index 919f0a526b8ae..e49901ea9976b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java @@ -134,10 +134,9 @@ protected void doExecute( TaskId taskId = new TaskId(clusterService.localNode().getId(), task.getId()); BooleanSupplier isTimedOutSupplier = () -> Instant.now(clock).isAfter(timeoutTime); - AnomalyDetectionAuditor anomalyDetectionAuditor = new AnomalyDetectionAuditor(client, clusterService, auditor.includeNodeInfo()); if (Strings.isNullOrEmpty(request.getJobId()) || Strings.isAllOrWildcard(request.getJobId())) { - List dataRemovers = createDataRemovers(client, taskId, anomalyDetectionAuditor); + List dataRemovers = createDataRemovers(client, taskId, auditor); threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) .execute(ActionRunnable.wrap(listener, l -> deleteExpiredData(request, dataRemovers, l, isTimedOutSupplier))); } else { @@ -152,7 +151,7 @@ protected void doExecute( List jobs = jobBuilders.stream().map(Job.Builder::build).collect(Collectors.toList()); String[] jobIds = jobs.stream().map(Job::getId).toArray(String[]::new); request.setExpandedJobIds(jobIds); - List dataRemovers = createDataRemovers(jobs, taskId, anomalyDetectionAuditor); + List dataRemovers = createDataRemovers(jobs, taskId, auditor); deleteExpiredData(request, dataRemovers, l, isTimedOutSupplier); })) ) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangeType.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangeType.java index 7df542b59107b..580209b2d821d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangeType.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangeType.java @@ -121,6 +121,10 @@ public String getName() { return NAME; } + public String getReason() { + return reason; + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java index 9fe9a5226f286..ec459ab81dcfa 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java @@ -62,9 +62,7 @@ public boolean isAbleToRun(ClusterState latestState) { continue; } IndexRoutingTable routingTable = latestState.getRoutingTable().index(index); - if (routingTable == null - || routingTable.allPrimaryShardsActive() == false - || routingTable.readyForSearch(latestState) == false) { + if (routingTable == null || routingTable.allPrimaryShardsActive() == false || routingTable.readyForSearch() == false) { return false; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java index 31add7b37ac5f..787142bac94a7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java @@ -206,9 +206,7 @@ private AssignmentFailure verifyIndicesActive() { for (String concreteIndex : concreteIndices) { IndexRoutingTable routingTable = clusterState.getRoutingTable().index(concreteIndex); - if (routingTable == null - || routingTable.allPrimaryShardsActive() == false - || routingTable.readyForSearch(clusterState) == false) { + if (routingTable == null || routingTable.allPrimaryShardsActive() == false || routingTable.readyForSearch() == false) { return new AssignmentFailure( "cannot start datafeed [" + datafeedId diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeAnalyticsProcessFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeAnalyticsProcessFactory.java index 5e24393be0a22..6e377770ed0ef 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeAnalyticsProcessFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeAnalyticsProcessFactory.java @@ -149,7 +149,7 @@ private void createNativeProcess( ProcessPipes processPipes ) { AnalyticsBuilder analyticsBuilder = new AnalyticsBuilder( - env::tmpFile, + env::tmpDir, nativeController, processPipes, analyticsProcessConfig, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeMemoryUsageEstimationProcessFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeMemoryUsageEstimationProcessFactory.java index 8d106be13882d..99adf6b6d506d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeMemoryUsageEstimationProcessFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeMemoryUsageEstimationProcessFactory.java @@ -116,7 +116,7 @@ private void createNativeProcess( ProcessPipes processPipes ) { AnalyticsBuilder analyticsBuilder = new AnalyticsBuilder( - env::tmpFile, + env::tmpDir, nativeController, processPipes, analyticsProcessConfig, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java index 67f2ea74464d0..ba9c5cd7267b5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java @@ -240,9 +240,7 @@ static boolean verifyIndicesExistAndPrimaryShardsAreActive(ClusterState clusterS return false; } IndexRoutingTable routingTable = clusterState.getRoutingTable().index(index); - if (routingTable == null - || routingTable.allPrimaryShardsActive() == false - || routingTable.readyForSearch(clusterState) == false) { + if (routingTable == null || routingTable.allPrimaryShardsActive() == false || routingTable.readyForSearch() == false) { return false; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java index 6b14e60c00247..e50d67e068756 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java @@ -8,6 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; @@ -386,15 +387,15 @@ public static final class Factory implements Processor.Factory, Consumer auditor; private volatile ClusterState clusterState = ClusterState.EMPTY_STATE; private volatile int maxIngestProcessors; private volatile MlConfigVersion minNodeVersion = MlConfigVersion.CURRENT; - public Factory(Client client, ClusterService clusterService, Settings settings, boolean includeNodeInfo) { + public Factory(Client client, ClusterService clusterService, Settings settings, SetOnce auditor) { this.client = client; this.maxIngestProcessors = MAX_INFERENCE_PROCESSORS.get(settings); - this.auditor = new InferenceAuditor(client, clusterService, includeNodeInfo); + this.auditor = auditor; clusterService.getClusterSettings().addSettingsUpdateConsumer(MAX_INFERENCE_PROCESSORS, this::setMaxIngestProcessors); } @@ -481,7 +482,7 @@ public InferenceProcessor create( return fromInputFieldConfiguration( client, - auditor, + auditor.get(), tag, description, modelId, @@ -509,7 +510,7 @@ public InferenceProcessor create( } return fromTargetFieldConfiguration( client, - auditor, + auditor.get(), tag, description, targetField, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/QueryFeatureExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/QueryFeatureExtractor.java index 08c141c0858ca..26d5125c94c32 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/QueryFeatureExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/QueryFeatureExtractor.java @@ -51,7 +51,7 @@ public void setNextReader(LeafReaderContext segmentContext) throws IOException { } Scorer scorer = weight.scorer(segmentContext); if (scorer != null) { - disiPriorityQueue.add(new DisiWrapper(scorer)); + disiPriorityQueue.add(new DisiWrapper(scorer, false)); } scorers.add(scorer); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java index 2d4ea308a6693..9c6d953cee5fb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java @@ -209,9 +209,9 @@ public static Path writeNormalizerInitState(String jobId, String state, Environm // createTempFile has a race condition where it may return the same // temporary file name to different threads if called simultaneously // from multiple threads, hence add the thread ID to avoid this - FileUtils.recreateTempDirectoryIfNeeded(env.tmpFile()); + FileUtils.recreateTempDirectoryIfNeeded(env.tmpDir()); Path stateFile = Files.createTempFile( - env.tmpFile(), + env.tmpDir(), jobId + "_quantiles_" + Thread.currentThread().getId(), QUANTILES_FILE_EXTENSION ); @@ -227,8 +227,8 @@ private void buildScheduledEventsConfig(List command) throws IOException if (scheduledEvents.isEmpty()) { return; } - FileUtils.recreateTempDirectoryIfNeeded(env.tmpFile()); - Path eventsConfigFile = Files.createTempFile(env.tmpFile(), "eventsConfig", JSON_EXTENSION); + FileUtils.recreateTempDirectoryIfNeeded(env.tmpDir()); + Path eventsConfigFile = Files.createTempFile(env.tmpDir(), "eventsConfig", JSON_EXTENSION); filesToDelete.add(eventsConfigFile); List scheduledEventToRuleWriters = scheduledEvents.stream() @@ -252,8 +252,8 @@ private void buildScheduledEventsConfig(List command) throws IOException } private void buildJobConfig(List command) throws IOException { - FileUtils.recreateTempDirectoryIfNeeded(env.tmpFile()); - Path configFile = Files.createTempFile(env.tmpFile(), "config", JSON_EXTENSION); + FileUtils.recreateTempDirectoryIfNeeded(env.tmpDir()); + Path configFile = Files.createTempFile(env.tmpDir(), "config", JSON_EXTENSION); filesToDelete.add(configFile); try ( OutputStreamWriter osw = new OutputStreamWriter(Files.newOutputStream(configFile), StandardCharsets.UTF_8); @@ -271,8 +271,8 @@ private void buildFiltersConfig(List command) throws IOException { if (referencedFilters.isEmpty()) { return; } - FileUtils.recreateTempDirectoryIfNeeded(env.tmpFile()); - Path filtersConfigFile = Files.createTempFile(env.tmpFile(), "filtersConfig", JSON_EXTENSION); + FileUtils.recreateTempDirectoryIfNeeded(env.tmpDir()); + Path filtersConfigFile = Files.createTempFile(env.tmpDir(), "filtersConfig", JSON_EXTENSION); filesToDelete.add(filtersConfigFile); try ( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/JobModelSnapshotUpgrader.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/JobModelSnapshotUpgrader.java index d69acab30451a..d42eb8f748b51 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/JobModelSnapshotUpgrader.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/JobModelSnapshotUpgrader.java @@ -12,19 +12,27 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; -import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.persistent.PersistentTasksCustomMetadata.PersistentTask; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.process.autodetect.output.FlushAcknowledgement; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeState; import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeTaskState; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -44,9 +52,7 @@ import java.util.HashMap; import java.util.Map; import java.util.Objects; -import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; -import java.util.concurrent.Future; import java.util.concurrent.TimeoutException; import java.util.function.BiConsumer; import java.util.function.Consumer; @@ -153,6 +159,55 @@ synchronized void start() { executor.execute(); } + private void removeDuplicateModelSnapshotDoc(Consumer runAfter) { + String snapshotDocId = jobId + "_model_snapshot_" + snapshotId; + client.prepareSearch(AnomalyDetectorsIndex.jobResultsIndexPattern()) + .setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds(snapshotDocId))) + .setSize(2) + .addSort(ModelSnapshot.MIN_VERSION.getPreferredName(), org.elasticsearch.search.sort.SortOrder.ASC) + .execute(ActionListener.wrap(searchResponse -> { + if (searchResponse.getHits().getTotalHits().value() > 1) { + deleteOlderSnapshotDoc(searchResponse, runAfter); + } else { + onFinish.accept(null); + } + }, e -> { + logger.warn(() -> format("[%s] [%s] error during search for model snapshot documents", jobId, snapshotId), e); + onFinish.accept(null); + })); + } + + private void deleteOlderSnapshotDoc(SearchResponse searchResponse, Consumer runAfter) { + SearchHit firstHit = searchResponse.getHits().getAt(0); + logger.debug(() -> format("[%s] deleting duplicate model snapshot doc [%s]", jobId, firstHit.getId())); + client.prepareDelete() + .setIndex(firstHit.getIndex()) + .setId(firstHit.getId()) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .execute(ActionListener.runAfter(ActionListener.wrap(deleteResponse -> { + if ((deleteResponse.getResult() == DocWriteResponse.Result.DELETED) == false) { + logger.warn( + () -> format( + "[%s] [%s] failed to delete old snapshot [%s] result document, document not found", + jobId, + snapshotId, + ModelSizeStats.RESULT_TYPE_FIELD.getPreferredName() + ) + ); + } + }, e -> { + logger.warn( + () -> format( + "[%s] [%s] failed to delete old snapshot [%s] result document", + jobId, + snapshotId, + ModelSizeStats.RESULT_TYPE_FIELD.getPreferredName() + ), + e + ); + }), () -> runAfter.accept(null))); + } + void setTaskToFailed(String reason, ActionListener> listener) { SnapshotUpgradeTaskState taskState = new SnapshotUpgradeTaskState(SnapshotUpgradeState.FAILED, task.getAllocationId(), reason); task.updatePersistentTaskState(taskState, ActionListener.wrap(listener::onResponse, f -> { @@ -259,7 +314,7 @@ void restoreState() { logger.error(() -> format("[%s] [%s] failed to write old state", jobId, snapshotId), e); setTaskToFailed( "Failed to write old state due to: " + e.getMessage(), - ActionListener.wrap(t -> shutdown(e), f -> shutdown(e)) + ActionListener.running(() -> shutdownWithFailure(e)) ); return; } @@ -273,7 +328,7 @@ void restoreState() { logger.error(() -> format("[%s] [%s] failed to flush after writing old state", jobId, snapshotId), e); nextStep = () -> setTaskToFailed( "Failed to flush after writing old state due to: " + e.getMessage(), - ActionListener.wrap(t -> shutdown(e), f -> shutdown(e)) + ActionListener.running(() -> shutdownWithFailure(e)) ); } else { logger.debug( @@ -295,7 +350,7 @@ private void requestStateWrite() { new SnapshotUpgradeTaskState(SnapshotUpgradeState.SAVING_NEW_STATE, task.getAllocationId(), ""), ActionListener.wrap(readingNewState -> { if (continueRunning.get() == false) { - shutdown(null); + shutdownWithFailure(null); return; } submitOperation(() -> { @@ -310,12 +365,12 @@ private void requestStateWrite() { // Execute callback in the UTILITY thread pool, as the current thread in the callback will be one in the // autodetectWorkerExecutor. Trying to run the callback in that executor will cause a dead lock as that // executor has a single processing queue. - (aVoid, e) -> threadPool.executor(UTILITY_THREAD_POOL_NAME).execute(() -> shutdown(e)) + (aVoid, e) -> threadPool.executor(UTILITY_THREAD_POOL_NAME).execute(() -> handlePersistingState(e)) ); logger.debug("[{}] [{}] asked for state to be persisted", jobId, snapshotId); }, f -> { logger.error(() -> format("[%s] [%s] failed to update snapshot upgrader task to started", jobId, snapshotId), f); - shutdown( + shutdownWithFailure( new ElasticsearchStatusException( "Failed to start snapshot upgrade [{}] for job [{}]", RestStatus.INTERNAL_SERVER_ERROR, @@ -378,17 +433,45 @@ private void checkResultsProcessorIsAlive() { } } - void shutdown(Exception e) { + private void handlePersistingState(@Nullable Exception exception) { + assert Thread.currentThread().getName().contains(UTILITY_THREAD_POOL_NAME); + + if (exception != null) { + shutdownWithFailure(exception); + } else { + stopProcess((aVoid, e) -> { + threadPool.executor(UTILITY_THREAD_POOL_NAME).execute(() -> { + autodetectWorkerExecutor.shutdownNow(); + // If there are two snapshot documents in the results indices with the same snapshot id, + // remove the old one. This can happen when the result index has been rolled over and + // the write alias is pointing to the new index. + removeDuplicateModelSnapshotDoc(onFinish); + }); + + }); + } + } + + void shutdownWithFailure(Exception e) { + stopProcess((aVoid, ignored) -> { + threadPool.executor(UTILITY_THREAD_POOL_NAME).execute(() -> { + onFinish.accept(e); + autodetectWorkerExecutor.shutdownNow(); + }); + }); + } + + private void stopProcess(BiConsumer, Exception> runNext) { logger.debug("[{}] [{}] shutdown initiated", jobId, snapshotId); // No point in sending an action to the executor if the process has died if (process.isProcessAlive() == false) { logger.debug("[{}] [{}] process is dead, no need to shutdown", jobId, snapshotId); - onFinish.accept(e); - autodetectWorkerExecutor.shutdownNow(); stateStreamer.cancel(); + runNext.accept(null, null); return; } - Future future = autodetectWorkerExecutor.submit(() -> { + + submitOperation(() -> { try { logger.debug("[{}] [{}] shutdown is now occurring", jobId, snapshotId); if (process.isReady()) { @@ -401,24 +484,10 @@ void shutdown(Exception e) { processor.awaitCompletion(); } catch (IOException | TimeoutException exc) { logger.warn(() -> format("[%s] [%s] failed to shutdown process", jobId, snapshotId), exc); - } finally { - onFinish.accept(e); } logger.debug("[{}] [{}] connection for upgrade has been closed, process is shutdown", jobId, snapshotId); - }); - try { - future.get(); - autodetectWorkerExecutor.shutdownNow(); - } catch (InterruptedException interrupt) { - Thread.currentThread().interrupt(); - } catch (ExecutionException executionException) { - if (processor.isProcessKilled()) { - // In this case the original exception is spurious and highly misleading - throw ExceptionsHelper.conflictStatusException("close snapshot upgrade interrupted by kill request"); - } else { - throw FutureUtils.rethrowExecutionException(executionException); - } - } + return Void.TYPE; + }, runNext); } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTaskExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTaskExecutor.java index cc3f8f0dd1e67..42f722e330a19 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTaskExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTaskExecutor.java @@ -69,7 +69,7 @@ public SnapshotUpgradeTaskExecutor( IndexNameExpressionResolver expressionResolver, Client client, XPackLicenseState licenseState, - boolean includeNodeInfo + AnomalyDetectionAuditor auditor ) { super( MlTasks.JOB_SNAPSHOT_UPGRADE_TASK_NAME, @@ -80,7 +80,7 @@ public SnapshotUpgradeTaskExecutor( expressionResolver ); this.autodetectProcessManager = autodetectProcessManager; - this.auditor = new AnomalyDetectionAuditor(client, clusterService, includeNodeInfo); + this.auditor = auditor; this.jobResultsProvider = new JobResultsProvider(client, settings, expressionResolver); this.client = client; this.licenseState = licenseState; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java index 9c37ebc0abfd8..b2acff5d1b199 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java @@ -115,14 +115,14 @@ public OpenJobPersistentTasksExecutor( Client client, IndexNameExpressionResolver expressionResolver, XPackLicenseState licenseState, - boolean includeNodeInfo + AnomalyDetectionAuditor auditor ) { super(MlTasks.JOB_TASK_NAME, MachineLearning.UTILITY_THREAD_POOL_NAME, settings, clusterService, memoryTracker, expressionResolver); this.autodetectProcessManager = Objects.requireNonNull(autodetectProcessManager); this.datafeedConfigProvider = Objects.requireNonNull(datafeedConfigProvider); this.client = Objects.requireNonNull(client); this.jobResultsProvider = new JobResultsProvider(client, settings, expressionResolver); - this.auditor = new AnomalyDetectionAuditor(client, clusterService, includeNodeInfo); + this.auditor = auditor; this.licenseState = licenseState; clusterService.addListener(event -> clusterState = event.state()); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AbstractMlAuditor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AbstractMlAuditor.java index 31e33bcd3f62e..99b03c2725411 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AbstractMlAuditor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AbstractMlAuditor.java @@ -9,10 +9,16 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.common.notifications.AbstractAuditMessage; import org.elasticsearch.xpack.core.common.notifications.AbstractAuditMessageFactory; import org.elasticsearch.xpack.core.common.notifications.AbstractAuditor; @@ -20,6 +26,8 @@ import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.elasticsearch.xpack.ml.MlIndexTemplateRegistry; +import java.io.IOException; + import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; abstract class AbstractMlAuditor extends AbstractAuditor { @@ -27,14 +35,20 @@ abstract class AbstractMlAuditor extends Abstrac private static final Logger logger = LogManager.getLogger(AbstractMlAuditor.class); private volatile boolean isResetMode; - protected AbstractMlAuditor(Client client, AbstractAuditMessageFactory messageFactory, ClusterService clusterService) { + protected AbstractMlAuditor( + Client client, + AbstractAuditMessageFactory messageFactory, + ClusterService clusterService, + IndexNameExpressionResolver indexNameExpressionResolver + ) { super( new OriginSettingClient(client, ML_ORIGIN), - NotificationsIndex.NOTIFICATIONS_INDEX, - MlIndexTemplateRegistry.NOTIFICATIONS_TEMPLATE, + NotificationsIndex.NOTIFICATIONS_INDEX_WRITE_ALIAS, clusterService.getNodeName(), messageFactory, - clusterService + clusterService, + indexNameExpressionResolver, + clusterService.threadPool().generic() ); clusterService.addListener(event -> { if (event.metadataChanged()) { @@ -65,4 +79,29 @@ protected void writeBacklog() { super.writeBacklog(); } } + + @Override + protected TransportPutComposableIndexTemplateAction.Request putTemplateRequest() { + var templateConfig = MlIndexTemplateRegistry.NOTIFICATIONS_TEMPLATE; + try ( + var parser = JsonXContent.jsonXContent.createParser( + XContentParserConfiguration.EMPTY, + MlIndexTemplateRegistry.NOTIFICATIONS_TEMPLATE.loadBytes() + ) + ) { + return new TransportPutComposableIndexTemplateAction.Request(templateConfig.getTemplateName()).indexTemplate( + ComposableIndexTemplate.parse(parser) + ).masterNodeTimeout(MASTER_TIMEOUT); + } catch (IOException e) { + throw new ElasticsearchParseException("unable to parse composable template " + templateConfig.getTemplateName(), e); + } + } + + protected int templateVersion() { + return MlIndexTemplateRegistry.NOTIFICATIONS_TEMPLATE.getVersion(); + } + + protected IndexDetails indexDetails() { + return new IndexDetails(NotificationsIndex.NOTIFICATIONS_INDEX_PREFIX, NotificationsIndex.NOTIFICATIONS_INDEX_VERSION); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AnomalyDetectionAuditor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AnomalyDetectionAuditor.java index 5c47f92c3df11..87b6ee9444e05 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AnomalyDetectionAuditor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AnomalyDetectionAuditor.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.notifications; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.xpack.core.ml.notifications.AnomalyDetectionAuditMessage; @@ -14,8 +15,13 @@ public class AnomalyDetectionAuditor extends AbstractMlAuditor { private final boolean includeNodeInfo; - public InferenceAuditor(Client client, ClusterService clusterService, boolean includeNodeInfo) { - super(client, InferenceAuditMessage::new, clusterService); + public InferenceAuditor( + Client client, + ClusterService clusterService, + IndexNameExpressionResolver indexNameExpressionResolver, + boolean includeNodeInfo + ) { + super(client, InferenceAuditMessage::new, clusterService, indexNameExpressionResolver); this.includeNodeInfo = includeNodeInfo; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/SystemAuditor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/SystemAuditor.java index 8cfc445e592a0..4618a03af26c2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/SystemAuditor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/SystemAuditor.java @@ -8,16 +8,18 @@ package org.elasticsearch.xpack.ml.notifications; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.xpack.core.ml.notifications.SystemAuditMessage; public class SystemAuditor extends AbstractMlAuditor { - public SystemAuditor(Client client, ClusterService clusterService) { + public SystemAuditor(Client client, ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver) { super( client, (resourceId, message, level, timestamp, nodeName) -> new SystemAuditMessage(message, level, timestamp, nodeName), - clusterService + clusterService, + indexNameExpressionResolver ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/NativeStorageProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/NativeStorageProvider.java index df97b39d2e397..594f72398bc9d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/NativeStorageProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/NativeStorageProvider.java @@ -52,7 +52,7 @@ public NativeStorageProvider(Environment environment, ByteSizeValue minDiskSpace */ public void cleanupLocalTmpStorageInCaseOfUncleanShutdown() { try { - for (Path p : environment.dataFiles()) { + for (Path p : environment.dataDirs()) { IOUtils.rm(p.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER)); } } catch (Exception e) { @@ -79,7 +79,7 @@ public Path tryGetLocalTmpStorage(String uniqueIdentifier, ByteSizeValue request } private Path tryAllocateStorage(String uniqueIdentifier, ByteSizeValue requestedSize) { - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { try { if (getUsableSpace(path) >= requestedSize.getBytes() + minLocalStorageAvailable.getBytes()) { Path tmpDirectory = path.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER).resolve(uniqueIdentifier); @@ -97,7 +97,7 @@ private Path tryAllocateStorage(String uniqueIdentifier, ByteSizeValue requested public boolean localTmpStorageHasEnoughSpace(Path path, ByteSizeValue requestedSize) { Path realPath = path.toAbsolutePath(); - for (Path p : environment.dataFiles()) { + for (Path p : environment.dataDirs()) { try { if (realPath.startsWith(p.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER))) { return getUsableSpace(p) >= requestedSize.getBytes() + minLocalStorageAvailable.getBytes(); @@ -122,7 +122,7 @@ public void cleanupLocalTmpStorage(String uniqueIdentifier) throws IOException { if (path != null) { // do not allow to breakout from the tmp storage provided Path realPath = path.toAbsolutePath(); - for (Path p : environment.dataFiles()) { + for (Path p : environment.dataDirs()) { if (realPath.startsWith(p.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER))) { IOUtils.rm(path); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/ProcessPipes.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/ProcessPipes.java index 6b09e38b02ea6..9f8378a5b0087 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/ProcessPipes.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/ProcessPipes.java @@ -94,7 +94,7 @@ public ProcessPipes( ) { this.namedPipeHelper = namedPipeHelper; this.jobId = jobId; - this.tempDir = env.tmpFile(); + this.tempDir = env.tmpDir(); this.timeout = timeout; // The way the pipe names are formed MUST match what is done in the controller main() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestFlushJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestFlushJobAction.java index 650c5d92e7589..e2bed63e18b32 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestFlushJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestFlushJobAction.java @@ -34,7 +34,9 @@ public class RestFlushJobAction extends BaseRestHandler { @Override public List routes() { - return List.of(new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/_flush")); + final String msg = "Forcing any buffered data to be processed is deprecated, " + + "in a future major version it will be compulsory to use a datafeed"; + return List.of(Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/_flush").deprecateAndKeep(msg).build()); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java index 0fcad773100ff..443e15a6018e3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; @@ -27,19 +26,11 @@ public class RestPostDataAction extends BaseRestHandler { private static final String DEFAULT_RESET_START = ""; private static final String DEFAULT_RESET_END = ""; - @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) - // these routes were ".deprecated" in RestApiVersion.V_8 which will require use of REST API compatibility headers to access - // this API in v9. It is unclear if this was intentional for v9, and the code has been updated to ".deprecateAndKeep" which will - // continue to emit deprecations warnings but will not require any special headers to access the API in v9. - // Please review and update the code and tests as needed. The original code remains commented out below for reference. @Override public List routes() { final String msg = "Posting data directly to anomaly detection jobs is deprecated, " + "in a future major version it will be compulsory to use a datafeed"; - return List.of( - // Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_data").deprecated(msg, RestApiVersion.V_8).build(), - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_data").deprecateAndKeep(msg).build() - ); + return List.of(Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_data").deprecateAndKeep(msg).build()); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/task/AbstractJobPersistentTasksExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/task/AbstractJobPersistentTasksExecutor.java index 7e0ff4f029bd4..33b5bc7bf9ebb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/task/AbstractJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/task/AbstractJobPersistentTasksExecutor.java @@ -67,9 +67,7 @@ public static List verifyIndicesPrimaryShardsAreActive( continue; } IndexRoutingTable routingTable = clusterState.getRoutingTable().index(index); - if (routingTable == null - || routingTable.allPrimaryShardsActive() == false - || routingTable.readyForSearch(clusterState) == false) { + if (routingTable == null || routingTable.allPrimaryShardsActive() == false || routingTable.readyForSearch() == false) { unavailableIndices.add(index); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelper.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelper.java index 6a5e328d7530a..84b00aca81f71 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelper.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelper.java @@ -78,7 +78,7 @@ public String getDefaultPipeDirectoryPrefix(Environment env) { // All these factors need to align for everything to work in production. If any changes // are made here then CNamedPipeFactory::defaultPath() in the C++ code will probably // also need to be changed. - return env.tmpFile().toString() + PathUtils.getDefaultFileSystem().getSeparator(); + return env.tmpDir().toString() + PathUtils.getDefaultFileSystem().getSeparator(); } /** diff --git a/x-pack/plugin/ml/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/ml/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 0000000000000..ff8f2a8f73eac --- /dev/null +++ b/x-pack/plugin/ml/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,2 @@ +org.elasticsearch.ml: + - manage_threads diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdateTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdateTests.java new file mode 100644 index 0000000000000..b203d756c3214 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdateTests.java @@ -0,0 +1,254 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; +import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; +import org.elasticsearch.action.admin.indices.rollover.RolloverAction; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; +import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ElasticsearchClient; +import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.AliasMetadata; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.same; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +public class MlAnomaliesIndexUpdateTests extends ESTestCase { + + public void testIsAnomaliesWriteAlias() { + assertTrue(MlAnomaliesIndexUpdate.isAnomaliesWriteAlias(AnomalyDetectorsIndex.resultsWriteAlias("foo"))); + assertFalse(MlAnomaliesIndexUpdate.isAnomaliesWriteAlias(AnomalyDetectorsIndex.jobResultsAliasedName("foo"))); + assertFalse(MlAnomaliesIndexUpdate.isAnomaliesWriteAlias("some-index")); + } + + public void testIsAnomaliesAlias() { + assertTrue(MlAnomaliesIndexUpdate.isAnomaliesReadAlias(AnomalyDetectorsIndex.jobResultsAliasedName("foo"))); + assertFalse(MlAnomaliesIndexUpdate.isAnomaliesReadAlias(AnomalyDetectorsIndex.resultsWriteAlias("foo"))); + assertFalse(MlAnomaliesIndexUpdate.isAnomaliesReadAlias("some-index")); + } + + public void testIsAbleToRun_IndicesDoNotExist() { + RoutingTable.Builder routingTable = RoutingTable.builder(); + var updater = new MlAnomaliesIndexUpdate(TestIndexNameExpressionResolver.newInstance(), mock(Client.class)); + + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.routingTable(routingTable.build()); + assertTrue(updater.isAbleToRun(csBuilder.build())); + } + + public void testIsAbleToRun_IndicesHaveNoRouting() { + IndexMetadata.Builder indexMetadata = IndexMetadata.builder(".ml-anomalies-shared"); + indexMetadata.settings( + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_INDEX_UUID, "_uuid") + ); + + Metadata.Builder metadata = Metadata.builder(); + metadata.put(indexMetadata); + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.routingTable(RoutingTable.builder().build()); // no routing to index + csBuilder.metadata(metadata); + + var updater = new MlAnomaliesIndexUpdate(TestIndexNameExpressionResolver.newInstance(), mock(Client.class)); + + assertFalse(updater.isAbleToRun(csBuilder.build())); + } + + public void testBuildIndexAliasesRequest() { + var anomaliesIndex = ".ml-anomalies-sharedindex"; + var jobs = List.of("job1", "job2"); + IndexMetadata.Builder indexMetadata = createSharedResultsIndex(anomaliesIndex, IndexVersion.current(), jobs); + Metadata.Builder metadata = Metadata.builder(); + metadata.put(indexMetadata); + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.metadata(metadata); + + var updater = new MlAnomaliesIndexUpdate( + TestIndexNameExpressionResolver.newInstance(), + new OriginSettingClient(mock(Client.class), "doesn't matter") + ); + + IndicesAliasesRequestBuilder aliasRequestBuilder = new IndicesAliasesRequestBuilder(mock(ElasticsearchClient.class)); + + var newIndex = anomaliesIndex + "-000001"; + var request = updater.addIndexAliasesRequests(aliasRequestBuilder, anomaliesIndex, newIndex, csBuilder.build()); + var actions = request.request().getAliasActions(); + assertThat(actions, hasSize(6)); + + // The order in which the alias actions are created + // is not preserved so look for the item in the list + for (var job : jobs) { + var expected = new AliasActionMatcher( + AnomalyDetectorsIndex.resultsWriteAlias(job), + newIndex, + IndicesAliasesRequest.AliasActions.Type.ADD + ); + assertThat(actions.stream().filter(expected::matches).count(), equalTo(1L)); + + expected = new AliasActionMatcher( + AnomalyDetectorsIndex.resultsWriteAlias(job), + anomaliesIndex, + IndicesAliasesRequest.AliasActions.Type.REMOVE + ); + assertThat(actions.stream().filter(expected::matches).count(), equalTo(1L)); + + expected = new AliasActionMatcher( + AnomalyDetectorsIndex.jobResultsAliasedName(job), + newIndex, + IndicesAliasesRequest.AliasActions.Type.ADD + ); + assertThat(actions.stream().filter(expected::matches).count(), equalTo(1L)); + } + } + + public void testRunUpdate_UpToDateIndices() { + String indexName = ".ml-anomalies-sharedindex"; + var jobs = List.of("job1", "job2"); + IndexMetadata.Builder indexMetadata = createSharedResultsIndex(indexName, IndexVersion.current(), jobs); + + Metadata.Builder metadata = Metadata.builder(); + metadata.put(indexMetadata); + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.metadata(metadata); + + var client = mock(Client.class); + var updater = new MlAnomaliesIndexUpdate(TestIndexNameExpressionResolver.newInstance(), client); + updater.runUpdate(csBuilder.build()); + // everything up to date so no action for the client + verify(client).settings(); + verify(client).threadPool(); + verifyNoMoreInteractions(client); + } + + public void testRunUpdate_LegacyIndex() { + String indexName = ".ml-anomalies-sharedindex"; + var jobs = List.of("job1", "job2"); + IndexMetadata.Builder indexMetadata = createSharedResultsIndex(indexName, IndexVersions.V_7_17_0, jobs); + + Metadata.Builder metadata = Metadata.builder(); + metadata.put(indexMetadata); + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.metadata(metadata); + + var client = mockClientWithRolloverAndAlias(indexName); + var updater = new MlAnomaliesIndexUpdate(TestIndexNameExpressionResolver.newInstance(), client); + + updater.runUpdate(csBuilder.build()); + verify(client).settings(); + verify(client, times(7)).threadPool(); + verify(client, times(2)).execute(same(TransportIndicesAliasesAction.TYPE), any(), any()); // create rollover alias and update + verify(client).execute(same(RolloverAction.INSTANCE), any(), any()); // index rolled over + verifyNoMoreInteractions(client); + } + + private record AliasActionMatcher(String aliasName, String index, IndicesAliasesRequest.AliasActions.Type actionType) { + boolean matches(IndicesAliasesRequest.AliasActions aliasAction) { + return aliasAction.actionType() == actionType + && aliasAction.aliases()[0].equals(aliasName) + && aliasAction.indices()[0].equals(index); + } + } + + private IndexMetadata.Builder createSharedResultsIndex(String indexName, IndexVersion indexVersion, List jobs) { + IndexMetadata.Builder indexMetadata = IndexMetadata.builder(indexName); + indexMetadata.settings( + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, indexVersion) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_INDEX_UUID, "_uuid") + ); + + for (var jobId : jobs) { + indexMetadata.putAlias(AliasMetadata.builder(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)).isHidden(true).build()); + indexMetadata.putAlias( + AliasMetadata.builder(AnomalyDetectorsIndex.resultsWriteAlias(jobId)).writeIndex(true).isHidden(true).build() + ); + } + + return indexMetadata; + } + + @SuppressWarnings("unchecked") + static Client mockClientWithRolloverAndAlias(String indexName) { + var client = mock(Client.class); + + var aliasRequestCount = new AtomicInteger(0); + + doAnswer(invocationOnMock -> { + ActionListener actionListener = (ActionListener) invocationOnMock.getArguments()[2]; + actionListener.onResponse(new RolloverResponse(indexName, indexName + "-new", Map.of(), false, true, true, true, true)); + return null; + }).when(client).execute(same(RolloverAction.INSTANCE), any(RolloverRequest.class), any(ActionListener.class)); + + doAnswer(invocationOnMock -> { + ActionListener actionListener = (ActionListener) invocationOnMock + .getArguments()[2]; + var request = (IndicesAliasesRequest) invocationOnMock.getArguments()[1]; + // Check the rollover alias is create and deleted + if (aliasRequestCount.getAndIncrement() == 0) { + var addAliasAction = new AliasActionMatcher( + indexName + ".rollover_alias", + indexName, + IndicesAliasesRequest.AliasActions.Type.ADD + ); + assertEquals(1L, request.getAliasActions().stream().filter(addAliasAction::matches).count()); + } else { + var removeAliasAction = new AliasActionMatcher( + indexName + ".rollover_alias", + indexName + "-new", + IndicesAliasesRequest.AliasActions.Type.REMOVE + ); + assertEquals(1L, request.getAliasActions().stream().filter(removeAliasAction::matches).count()); + } + + actionListener.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); + + return null; + }).when(client).execute(same(TransportIndicesAliasesAction.TYPE), any(IndicesAliasesRequest.class), any(ActionListener.class)); + + var threadPool = mock(ThreadPool.class); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + when(client.threadPool()).thenReturn(threadPool); + + return client; + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlIndexRolloverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlIndexRolloverTests.java index aa59028a4cc0d..491b20f0a2d3e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlIndexRolloverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlIndexRolloverTests.java @@ -258,7 +258,7 @@ public void testIsCompatibleIndexVersion() { } @SuppressWarnings("unchecked") - private Client mockClientWithRolloverAndAlias() { + static Client mockClientWithRolloverAndAlias() { var client = mock(Client.class); doAnswer(invocationOnMock -> { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java index bb973bf4359e8..105911d8e4bfd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.ml.action; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.bulk.FailureStoreMetrics; @@ -31,6 +32,7 @@ import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; +import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; import org.junit.Before; import java.time.Instant; @@ -95,7 +97,12 @@ public Map getProcessors(Processor.Parameters paramet when(licenseState.isAllowed(MachineLearningField.ML_API_FEATURE)).thenReturn(true); factoryMap.put( InferenceProcessor.TYPE, - new InferenceProcessor.Factory(parameters.client, parameters.ingestService.getClusterService(), Settings.EMPTY, true) + new InferenceProcessor.Factory( + parameters.client, + parameters.ingestService.getClusterService(), + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ) ); factoryMap.put("not_inference", new NotInferenceProcessor.Factory()); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangeDetectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangeDetectorTests.java index 36076bbb0ec25..9a0338b90156b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangeDetectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangeDetectorTests.java @@ -192,7 +192,7 @@ public void testMultipleChanges() { ChangeType type = new ChangeDetector(bucketValues).detect(0.05); tp += type instanceof ChangeType.TrendChange ? 1 : 0; } - assertThat(tp, greaterThan(90)); + assertThat(tp, greaterThan(80)); } public void testProblemDistributionChange() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java index 637a9f73cbcbb..7ffddc9721bdf 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.ml.inference.ingest; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchStatusException; @@ -59,6 +60,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ZeroShotClassificationConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ZeroShotClassificationConfigUpdate; import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; import org.junit.Before; import java.io.IOException; @@ -109,279 +111,263 @@ public void setUpVariables() { } public void testCreateProcessorWithTooManyExisting() { - Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); - - includeNodeInfoValues.forEach(includeNodeInfo -> { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( - client, - clusterService, - Settings.builder().put(InferenceProcessor.MAX_INFERENCE_PROCESSORS.getKey(), 1).build(), - includeNodeInfo - ); + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.builder().put(InferenceProcessor.MAX_INFERENCE_PROCESSORS.getKey(), 1).build(), + new SetOnce<>(mock(InferenceAuditor.class)) + ); - try { - processorFactory.accept(buildClusterStateWithModelReferences("model1")); - } catch (IOException ioe) { - throw new AssertionError(ioe.getMessage()); - } + try { + processorFactory.accept(buildClusterStateWithModelReferences("model1")); + } catch (IOException ioe) { + throw new AssertionError(ioe.getMessage()); + } - ElasticsearchStatusException ex = expectThrows( - ElasticsearchStatusException.class, - () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, Collections.emptyMap()) - ); + ElasticsearchStatusException ex = expectThrows( + ElasticsearchStatusException.class, + () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, Collections.emptyMap()) + ); - assertThat( - ex.getMessage(), - equalTo( - "Max number of inference processors reached, total inference processors [1]. " - + "Adjust the setting [xpack.ml.max_inference_processors]: [1] if a greater number is desired." - ) - ); - }); + assertThat( + ex.getMessage(), + equalTo( + "Max number of inference processors reached, total inference processors [1]. " + + "Adjust the setting [xpack.ml.max_inference_processors]: [1] if a greater number is desired." + ) + ); } public void testCreateProcessorWithInvalidInferenceConfig() { - Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); - - includeNodeInfoValues.forEach(includeNodeInfo -> { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( - client, - clusterService, - Settings.EMPTY, - includeNodeInfo - ); + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); - Map config = new HashMap<>() { - { - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "result"); - put(InferenceProcessor.INFERENCE_CONFIG, Collections.singletonMap("unknown_type", Collections.emptyMap())); - } - }; + Map config = new HashMap<>() { + { + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "result"); + put(InferenceProcessor.INFERENCE_CONFIG, Collections.singletonMap("unknown_type", Collections.emptyMap())); + } + }; - ElasticsearchStatusException ex = expectThrows( - ElasticsearchStatusException.class, - () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, config) - ); - assertThat( - ex.getMessage(), - equalTo( - "unrecognized inference configuration type [unknown_type]." - + " Supported types [classification, regression, fill_mask, ner, pass_through, " - + "question_answering, text_classification, text_embedding, text_expansion, " - + "text_similarity, zero_shot_classification]" - ) - ); + ElasticsearchStatusException ex = expectThrows( + ElasticsearchStatusException.class, + () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, config) + ); + assertThat( + ex.getMessage(), + equalTo( + "unrecognized inference configuration type [unknown_type]." + + " Supported types [classification, regression, fill_mask, ner, pass_through, " + + "question_answering, text_classification, text_embedding, text_expansion, " + + "text_similarity, zero_shot_classification]" + ) + ); - Map config2 = new HashMap<>() { - { - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "result"); - put(InferenceProcessor.INFERENCE_CONFIG, Collections.singletonMap("regression", "boom")); - } - }; - ex = expectThrows( - ElasticsearchStatusException.class, - () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, config2) - ); - assertThat(ex.getMessage(), equalTo("inference_config must be an object with one inference type mapped to an object.")); + Map config2 = new HashMap<>() { + { + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "result"); + put(InferenceProcessor.INFERENCE_CONFIG, Collections.singletonMap("regression", "boom")); + } + }; + ex = expectThrows( + ElasticsearchStatusException.class, + () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, config2) + ); + assertThat(ex.getMessage(), equalTo("inference_config must be an object with one inference type mapped to an object.")); - Map config3 = new HashMap<>() { - { - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "result"); - put(InferenceProcessor.INFERENCE_CONFIG, Collections.emptyMap()); - } - }; - ex = expectThrows( - ElasticsearchStatusException.class, - () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, config3) - ); - assertThat(ex.getMessage(), equalTo("inference_config must be an object with one inference type mapped to an object.")); - }); + Map config3 = new HashMap<>() { + { + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "result"); + put(InferenceProcessor.INFERENCE_CONFIG, Collections.emptyMap()); + } + }; + ex = expectThrows( + ElasticsearchStatusException.class, + () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, config3) + ); + assertThat(ex.getMessage(), equalTo("inference_config must be an object with one inference type mapped to an object.")); } public void testCreateProcessorWithTooOldMinNodeVersion() { - Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); - - includeNodeInfoValues.forEach(includeNodeInfo -> { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( - client, - clusterService, - Settings.EMPTY, - includeNodeInfo - ); - try { - processorFactory.accept(builderClusterStateWithModelReferences(MlConfigVersion.V_7_5_0, "model1")); - } catch (IOException ioe) { - throw new AssertionError(ioe.getMessage()); + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); + try { + processorFactory.accept(builderClusterStateWithModelReferences(MlConfigVersion.V_7_5_0, "model1")); + } catch (IOException ioe) { + throw new AssertionError(ioe.getMessage()); + } + Map regression = new HashMap<>() { + { + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "result"); + put( + InferenceProcessor.INFERENCE_CONFIG, + Collections.singletonMap(RegressionConfig.NAME.getPreferredName(), Collections.emptyMap()) + ); } - Map regression = new HashMap<>() { - { - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "result"); - put( - InferenceProcessor.INFERENCE_CONFIG, - Collections.singletonMap(RegressionConfig.NAME.getPreferredName(), Collections.emptyMap()) - ); - } - }; + }; + + ElasticsearchException ex = expectThrows( + ElasticsearchException.class, + () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, regression) + ); + assertThat( + ex.getMessage(), + equalTo("Configuration [regression] requires minimum node version [7.6.0] (current minimum node version [7.5.0]") + ); + + Map classification = new HashMap<>() { + { + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "result"); + put( + InferenceProcessor.INFERENCE_CONFIG, + Collections.singletonMap( + ClassificationConfig.NAME.getPreferredName(), + Collections.singletonMap(ClassificationConfig.NUM_TOP_CLASSES.getPreferredName(), 1) + ) + ); + } + }; + + ex = expectThrows( + ElasticsearchException.class, + () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, classification) + ); + assertThat( + ex.getMessage(), + equalTo("Configuration [classification] requires minimum node version [7.6.0] (current minimum node version [7.5.0]") + ); + } + public void testCreateProcessorWithTooOldMinNodeVersionNlp() throws IOException { + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); + try { + processorFactory.accept(builderClusterStateWithModelReferences(MlConfigVersion.V_7_5_0, "model1")); + } catch (IOException ioe) { + throw new AssertionError(ioe.getMessage()); + } + + for (String name : List.of( + FillMaskConfig.NAME, + NerConfig.NAME, + PassThroughConfig.NAME, + QuestionAnsweringConfig.NAME, + TextClassificationConfig.NAME, + TextEmbeddingConfig.NAME, + TextExpansionConfigUpdate.NAME, + TextSimilarityConfig.NAME, + ZeroShotClassificationConfig.NAME + )) { ElasticsearchException ex = expectThrows( ElasticsearchException.class, - () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, regression) + () -> processorFactory.inferenceConfigUpdateFromMap(Map.of(name, Map.of())) ); assertThat( ex.getMessage(), - equalTo("Configuration [regression] requires minimum node version [7.6.0] (current minimum node version [7.5.0]") + equalTo("Configuration [" + name + "] requires minimum node version [8.0.0] (current minimum node version [7.5.0]") ); + } - Map classification = new HashMap<>() { - { - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "result"); - put( - InferenceProcessor.INFERENCE_CONFIG, - Collections.singletonMap( - ClassificationConfig.NAME.getPreferredName(), - Collections.singletonMap(ClassificationConfig.NUM_TOP_CLASSES.getPreferredName(), 1) - ) - ); - } - }; - - ex = expectThrows( + for (String name : List.of(ClassificationConfig.NAME.getPreferredName(), RegressionConfig.NAME.getPreferredName())) { + ElasticsearchException ex = expectThrows( ElasticsearchException.class, - () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, classification) + () -> processorFactory.inferenceConfigUpdateFromMap(Map.of(name, Map.of())) ); assertThat( ex.getMessage(), - equalTo("Configuration [classification] requires minimum node version [7.6.0] (current minimum node version [7.5.0]") + equalTo("Configuration [" + name + "] requires minimum node version [7.6.0] (current minimum node version [7.5.0]") ); - }); + } } - public void testCreateProcessorWithTooOldMinNodeVersionNlp() throws IOException { - Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); - - includeNodeInfoValues.forEach(includeNodeInfo -> { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( - client, - clusterService, - Settings.EMPTY, - includeNodeInfo - ); - try { - processorFactory.accept(builderClusterStateWithModelReferences(MlConfigVersion.V_7_5_0, "model1")); - } catch (IOException ioe) { - throw new AssertionError(ioe.getMessage()); - } + public void testCreateProcessor() { + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); - for (String name : List.of( - FillMaskConfig.NAME, - NerConfig.NAME, - PassThroughConfig.NAME, - QuestionAnsweringConfig.NAME, - TextClassificationConfig.NAME, - TextEmbeddingConfig.NAME, - TextExpansionConfigUpdate.NAME, - TextSimilarityConfig.NAME, - ZeroShotClassificationConfig.NAME - )) { - ElasticsearchException ex = expectThrows( - ElasticsearchException.class, - () -> processorFactory.inferenceConfigUpdateFromMap(Map.of(name, Map.of())) - ); - assertThat( - ex.getMessage(), - equalTo("Configuration [" + name + "] requires minimum node version [8.0.0] (current minimum node version [7.5.0]") + Map regression = new HashMap<>() { + { + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "result"); + put( + InferenceProcessor.INFERENCE_CONFIG, + Collections.singletonMap(RegressionConfig.NAME.getPreferredName(), Collections.emptyMap()) ); } + }; - for (String name : List.of(ClassificationConfig.NAME.getPreferredName(), RegressionConfig.NAME.getPreferredName())) { - ElasticsearchException ex = expectThrows( - ElasticsearchException.class, - () -> processorFactory.inferenceConfigUpdateFromMap(Map.of(name, Map.of())) - ); - assertThat( - ex.getMessage(), - equalTo("Configuration [" + name + "] requires minimum node version [7.6.0] (current minimum node version [7.5.0]") + var processor = processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, regression); + assertFalse(processor.isConfiguredWithInputsFields()); + assertEquals("my_model", processor.getModelId()); + assertEquals("result", processor.getTargetField()); + assertThat(processor.getFieldMap().entrySet(), empty()); + assertNull(processor.getInputs()); + + Map classification = new HashMap<>() { + { + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "result"); + put( + InferenceProcessor.INFERENCE_CONFIG, + Collections.singletonMap( + ClassificationConfig.NAME.getPreferredName(), + Collections.singletonMap(ClassificationConfig.NUM_TOP_CLASSES.getPreferredName(), 1) + ) ); } - }); - } - - public void testCreateProcessor() { - Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); - - includeNodeInfoValues.forEach(includeNodeInfo -> { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( - client, - clusterService, - Settings.EMPTY, - includeNodeInfo - ); - - Map regression = new HashMap<>() { - { - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "result"); - put( - InferenceProcessor.INFERENCE_CONFIG, - Collections.singletonMap(RegressionConfig.NAME.getPreferredName(), Collections.emptyMap()) - ); - } - }; - - var processor = processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, regression); - assertEquals(includeNodeInfo, processor.getAuditor().includeNodeInfo()); - assertFalse(processor.isConfiguredWithInputsFields()); - assertEquals("my_model", processor.getModelId()); - assertEquals("result", processor.getTargetField()); - assertThat(processor.getFieldMap().entrySet(), empty()); - assertNull(processor.getInputs()); - - Map classification = new HashMap<>() { - { - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "result"); - put( - InferenceProcessor.INFERENCE_CONFIG, - Collections.singletonMap( - ClassificationConfig.NAME.getPreferredName(), - Collections.singletonMap(ClassificationConfig.NUM_TOP_CLASSES.getPreferredName(), 1) - ) - ); - } - }; + }; - processor = processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, classification); - assertFalse(processor.isConfiguredWithInputsFields()); + processor = processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, classification); + assertFalse(processor.isConfiguredWithInputsFields()); - Map mininmal = new HashMap<>() { - { - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "result"); - } - }; + Map mininmal = new HashMap<>() { + { + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "result"); + } + }; - processor = processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, mininmal); - assertFalse(processor.isConfiguredWithInputsFields()); - assertEquals("my_model", processor.getModelId()); - assertEquals("result", processor.getTargetField()); - assertNull(processor.getInputs()); - }); + processor = processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, mininmal); + assertFalse(processor.isConfiguredWithInputsFields()); + assertEquals("my_model", processor.getModelId()); + assertEquals("result", processor.getTargetField()); + assertNull(processor.getInputs()); } public void testCreateProcessorWithFieldMap() { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory(client, clusterService, Settings.EMPTY, false); + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); Map config = new HashMap<>() { { @@ -406,7 +392,12 @@ public void testCreateProcessorWithFieldMap() { } public void testCreateProcessorWithInputOutputs() { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory(client, clusterService, Settings.EMPTY, false); + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); Map config = new HashMap<>(); config.put(InferenceProcessor.MODEL_ID, "my_model"); @@ -436,101 +427,90 @@ public void testCreateProcessorWithInputOutputs() { } public void testCreateProcessorWithDuplicateFields() { - Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); - - includeNodeInfoValues.forEach(includeNodeInfo -> { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( - client, - clusterService, - Settings.EMPTY, - includeNodeInfo - ); + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); - Map regression = new HashMap<>() { - { - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "ml"); - put( - InferenceProcessor.INFERENCE_CONFIG, - Collections.singletonMap( - RegressionConfig.NAME.getPreferredName(), - Collections.singletonMap(RegressionConfig.RESULTS_FIELD.getPreferredName(), "warning") - ) - ); - } - }; + Map regression = new HashMap<>() { + { + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "ml"); + put( + InferenceProcessor.INFERENCE_CONFIG, + Collections.singletonMap( + RegressionConfig.NAME.getPreferredName(), + Collections.singletonMap(RegressionConfig.RESULTS_FIELD.getPreferredName(), "warning") + ) + ); + } + }; - Exception ex = expectThrows( - Exception.class, - () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, regression) - ); - assertThat(ex.getMessage(), equalTo("Invalid inference config. " + "More than one field is configured as [warning]")); - }); + Exception ex = expectThrows( + Exception.class, + () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, regression) + ); + assertThat(ex.getMessage(), equalTo("Invalid inference config. " + "More than one field is configured as [warning]")); } public void testCreateProcessorWithIgnoreMissing() { - Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); - - includeNodeInfoValues.forEach(includeNodeInfo -> { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( - client, - clusterService, - Settings.EMPTY, - includeNodeInfo - ); + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); - Map regression = new HashMap<>() { - { - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put("ignore_missing", Boolean.TRUE); - put( - InferenceProcessor.INFERENCE_CONFIG, - Collections.singletonMap( - RegressionConfig.NAME.getPreferredName(), - Collections.singletonMap(RegressionConfig.RESULTS_FIELD.getPreferredName(), "warning") - ) - ); - } - }; + Map regression = new HashMap<>() { + { + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put("ignore_missing", Boolean.TRUE); + put( + InferenceProcessor.INFERENCE_CONFIG, + Collections.singletonMap( + RegressionConfig.NAME.getPreferredName(), + Collections.singletonMap(RegressionConfig.RESULTS_FIELD.getPreferredName(), "warning") + ) + ); + } + }; - Exception ex = expectThrows( - Exception.class, - () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, regression) - ); - assertThat(ex.getMessage(), equalTo("Invalid inference config. " + "More than one field is configured as [warning]")); - }); + Exception ex = expectThrows( + Exception.class, + () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, regression) + ); + assertThat(ex.getMessage(), equalTo("Invalid inference config. " + "More than one field is configured as [warning]")); } public void testParseInferenceConfigFromMap() { - Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); - - includeNodeInfoValues.forEach(includeNodeInfo -> { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( - client, - clusterService, - Settings.EMPTY, - includeNodeInfo + + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); + for (var nameAndMap : List.of( + Tuple.tuple(ClassificationConfig.NAME.getPreferredName(), Map.of()), + Tuple.tuple(RegressionConfig.NAME.getPreferredName(), Map.of()), + Tuple.tuple(FillMaskConfig.NAME, Map.of()), + Tuple.tuple(NerConfig.NAME, Map.of()), + Tuple.tuple(PassThroughConfig.NAME, Map.of()), + Tuple.tuple(TextClassificationConfig.NAME, Map.of()), + Tuple.tuple(TextEmbeddingConfig.NAME, Map.of()), + Tuple.tuple(TextExpansionConfig.NAME, Map.of()), + Tuple.tuple(ZeroShotClassificationConfig.NAME, Map.of()), + Tuple.tuple(QuestionAnsweringConfig.NAME, Map.of("question", "What is the answer to life, the universe and everything?")) + )) { + assertThat( + processorFactory.inferenceConfigUpdateFromMap(Map.of(nameAndMap.v1(), nameAndMap.v2())).getName(), + equalTo(nameAndMap.v1()) ); - for (var nameAndMap : List.of( - Tuple.tuple(ClassificationConfig.NAME.getPreferredName(), Map.of()), - Tuple.tuple(RegressionConfig.NAME.getPreferredName(), Map.of()), - Tuple.tuple(FillMaskConfig.NAME, Map.of()), - Tuple.tuple(NerConfig.NAME, Map.of()), - Tuple.tuple(PassThroughConfig.NAME, Map.of()), - Tuple.tuple(TextClassificationConfig.NAME, Map.of()), - Tuple.tuple(TextEmbeddingConfig.NAME, Map.of()), - Tuple.tuple(TextExpansionConfig.NAME, Map.of()), - Tuple.tuple(ZeroShotClassificationConfig.NAME, Map.of()), - Tuple.tuple(QuestionAnsweringConfig.NAME, Map.of("question", "What is the answer to life, the universe and everything?")) - )) { - assertThat( - processorFactory.inferenceConfigUpdateFromMap(Map.of(nameAndMap.v1(), nameAndMap.v2())).getName(), - equalTo(nameAndMap.v1()) - ); - } - }); + } } public void testCreateProcessorWithIncompatibleTargetFieldSetting() { @@ -538,7 +518,7 @@ public void testCreateProcessorWithIncompatibleTargetFieldSetting() { client, clusterService, Settings.EMPTY, - randomBoolean() + new SetOnce<>(mock(InferenceAuditor.class)) ); Map input = new HashMap<>() { @@ -574,7 +554,7 @@ public void testCreateProcessorWithIncompatibleResultFieldSetting() { client, clusterService, Settings.EMPTY, - randomBoolean() + new SetOnce<>(mock(InferenceAuditor.class)) ); Map input = new HashMap<>() { @@ -616,7 +596,7 @@ public void testCreateProcessorWithInputFields() { client, clusterService, Settings.EMPTY, - randomBoolean() + new SetOnce<>(mock(InferenceAuditor.class)) ); Map inputMap1 = new HashMap<>() { @@ -683,7 +663,7 @@ public void testCreateProcessorWithInputFieldSingleOrList() { client, clusterService, Settings.EMPTY, - randomBoolean() + new SetOnce<>(mock(InferenceAuditor.class)) ); for (var isList : new boolean[] { true, false }) { @@ -727,7 +707,7 @@ public void testCreateProcessorWithInputFieldWrongType() { client, clusterService, Settings.EMPTY, - randomBoolean() + new SetOnce<>(mock(InferenceAuditor.class)) ); { @@ -784,7 +764,7 @@ public void testParsingInputFields() { client, clusterService, Settings.EMPTY, - randomBoolean() + new SetOnce<>(mock(InferenceAuditor.class)) ); int numInputs = randomIntBetween(1, 3); @@ -808,7 +788,7 @@ public void testParsingInputFieldsDuplicateFieldNames() { client, clusterService, Settings.EMPTY, - randomBoolean() + new SetOnce<>(mock(InferenceAuditor.class)) ); int numInputs = 2; @@ -860,7 +840,7 @@ public void testParsingInputFieldsGivenNoInputs() { client, clusterService, Settings.EMPTY, - randomBoolean() + new SetOnce<>(mock(InferenceAuditor.class)) ); var e = expectThrows(ElasticsearchParseException.class, () -> processorFactory.parseInputFields("my_processor", List.of())); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java index 64251c05af7c8..d88e1235241d8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java @@ -62,6 +62,7 @@ import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; import org.elasticsearch.xpack.ml.job.JobNodeSelector; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; +import org.elasticsearch.xpack.ml.notifications.AnomalyDetectionAuditor; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; import org.junit.Before; @@ -309,7 +310,7 @@ private OpenJobPersistentTasksExecutor createExecutor(Settings settings) { client, TestIndexNameExpressionResolver.newInstance(), licenseState, - true + mock(AnomalyDetectionAuditor.class) ); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/NativeStorageProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/NativeStorageProviderTests.java index f2a4add8444bb..22a6ff630f2bc 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/NativeStorageProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/NativeStorageProviderTests.java @@ -123,7 +123,7 @@ public void testTmpStorageCleanupOnStart() throws IOException { private NativeStorageProvider createNativeStorageProvider(Map paths) throws IOException { Environment environment = mock(Environment.class); - when(environment.dataFiles()).thenReturn(paths.keySet().toArray(new Path[paths.size()])); + when(environment.dataDirs()).thenReturn(paths.keySet().toArray(new Path[paths.size()])); NativeStorageProvider storageProvider = spy(new NativeStorageProvider(environment, ByteSizeValue.ofGb(5))); doAnswer( diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelperTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelperTests.java index d09f0cbb59c1b..fc1b5abc04fbb 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelperTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/NamedPipeHelperTests.java @@ -67,7 +67,7 @@ public void testOpenForInputGivenPipeIsRegularFile() throws IOException { Environment env = TestEnvironment.newEnvironment( Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build() ); - Path tempFile = Files.createTempFile(env.tmpFile(), "not a named pipe", null); + Path tempFile = Files.createTempFile(env.tmpDir(), "not a named pipe", null); IOException ioe = ESTestCase.expectThrows( IOException.class, @@ -83,7 +83,7 @@ public void testOpenForOutputGivenPipeIsRegularFile() throws IOException { Environment env = TestEnvironment.newEnvironment( Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build() ); - Path tempFile = Files.createTempFile(env.tmpFile(), "not a named pipe", null); + Path tempFile = Files.createTempFile(env.tmpDir(), "not a named pipe", null); IOException ioe = ESTestCase.expectThrows( IOException.class, diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java index cfd322d04e92f..6a76d6749489a 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java @@ -77,7 +77,7 @@ public class MonitoringTemplateRegistry extends IndexTemplateRegistry { * writes monitoring data in ECS format as of 8.0. These templates define the ECS schema as well as alias fields for the old monitoring * mappings that point to the corresponding ECS fields. */ - public static final int STACK_MONITORING_REGISTRY_VERSION = 8_00_00_99 + 19; + public static final int STACK_MONITORING_REGISTRY_VERSION = 8_00_00_99 + 21; private static final String STACK_MONITORING_REGISTRY_VERSION_VARIABLE = "xpack.stack.monitoring.template.release.version"; private static final String STACK_TEMPLATE_VERSION = "8"; private static final String STACK_TEMPLATE_VERSION_VARIABLE = "xpack.stack.monitoring.template.version"; diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverIT.java new file mode 100644 index 0000000000000..f98231a647470 --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverIT.java @@ -0,0 +1,838 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.apache.lucene.search.TotalHits; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.InnerHitBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.collapse.CollapseBuilder; +import org.elasticsearch.search.retriever.CompoundRetrieverBuilder; +import org.elasticsearch.search.retriever.KnnRetrieverBuilder; +import org.elasticsearch.search.retriever.StandardRetrieverBuilder; +import org.elasticsearch.search.retriever.TestRetrieverBuilder; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; +import org.elasticsearch.search.vectors.QueryVectorBuilder; +import org.elasticsearch.search.vectors.TestQueryVectorBuilderPlugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.rank.rrf.RRFRankPlugin; +import org.junit.Before; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +@ESIntegTestCase.ClusterScope(minNumDataNodes = 2) +public class LinearRetrieverIT extends ESIntegTestCase { + + protected static String INDEX = "test_index"; + protected static final String DOC_FIELD = "doc"; + protected static final String TEXT_FIELD = "text"; + protected static final String VECTOR_FIELD = "vector"; + protected static final String TOPIC_FIELD = "topic"; + + @Override + protected Collection> nodePlugins() { + return List.of(RRFRankPlugin.class); + } + + @Before + public void setup() throws Exception { + setupIndex(); + } + + protected void setupIndex() { + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 1, + "element_type": "float", + "similarity": "l2_norm", + "index": true, + "index_options": { + "type": "flat" + } + }, + "text": { + "type": "text" + }, + "doc": { + "type": "keyword" + }, + "topic": { + "type": "keyword" + }, + "views": { + "type": "nested", + "properties": { + "last30d": { + "type": "integer" + }, + "all": { + "type": "integer" + } + } + } + } + } + """; + createIndex(INDEX, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)).build()); + admin().indices().preparePutMapping(INDEX).setSource(mapping, XContentType.JSON).get(); + indexDoc(INDEX, "doc_1", DOC_FIELD, "doc_1", TOPIC_FIELD, "technology", TEXT_FIELD, "term"); + indexDoc( + INDEX, + "doc_2", + DOC_FIELD, + "doc_2", + TOPIC_FIELD, + "astronomy", + TEXT_FIELD, + "search term term", + VECTOR_FIELD, + new float[] { 2.0f } + ); + indexDoc(INDEX, "doc_3", DOC_FIELD, "doc_3", TOPIC_FIELD, "technology", VECTOR_FIELD, new float[] { 3.0f }); + indexDoc(INDEX, "doc_4", DOC_FIELD, "doc_4", TOPIC_FIELD, "technology", TEXT_FIELD, "term term term term"); + indexDoc(INDEX, "doc_5", DOC_FIELD, "doc_5", TOPIC_FIELD, "science", TEXT_FIELD, "irrelevant stuff"); + indexDoc( + INDEX, + "doc_6", + DOC_FIELD, + "doc_6", + TEXT_FIELD, + "search term term term term term term", + VECTOR_FIELD, + new float[] { 6.0f } + ); + indexDoc( + INDEX, + "doc_7", + DOC_FIELD, + "doc_7", + TOPIC_FIELD, + "biology", + TEXT_FIELD, + "term term term term term term term", + VECTOR_FIELD, + new float[] { 7.0f } + ); + refresh(INDEX); + } + + public void testLinearRetrieverWithAggs() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this one retrieves docs 1, 2, 4, 6, and 7 + StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(9L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(8L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(7L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_7")).boost(6L)) + ); + // this one retrieves docs 2 and 6 due to prefilter + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + // this one retrieves docs 2, 3, 6, and 7 + KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 2.0f }, null, 10, 100, null, null); + + // all requests would have an equal weight and use the identity normalizer + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standard0, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null), + new CompoundRetrieverBuilder.RetrieverSource(knnRetrieverBuilder, null) + ), + rankWindowSize + ) + ); + source.size(1); + source.aggregation(AggregationBuilders.terms("topic_agg").field(TOPIC_FIELD)); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + ElasticsearchAssertions.assertResponse(req, resp -> { + assertNull(resp.pointInTimeId()); + assertNotNull(resp.getHits().getTotalHits()); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getHits().length, equalTo(1)); + assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); + + assertNotNull(resp.getAggregations()); + assertNotNull(resp.getAggregations().get("topic_agg")); + Terms terms = resp.getAggregations().get("topic_agg"); + + assertThat(terms.getBucketByKey("technology").getDocCount(), equalTo(3L)); + assertThat(terms.getBucketByKey("astronomy").getDocCount(), equalTo(1L)); + assertThat(terms.getBucketByKey("biology").getDocCount(), equalTo(1L)); + }); + } + + public void testLinearWithCollapse() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this one retrieves docs 1, 2, 4, 6, and 7 + // with scores 10, 9, 8, 7, 6 + StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(9L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(8L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(7L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_7")).boost(6L)) + ); + // this one retrieves docs 2 and 6 due to prefilter + // with scores 20, 5 + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + // this one retrieves docs 2, 3, 6, and 7 + // with scores 1, 0.5, 0.05882353, 0.03846154 + KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 2.0f }, null, 10, 100, null, null); + // final ranking with no-normalizer would be: doc 2, 6, 1, 4, 7, 3 + // doc 1: 10 + // doc 2: 9 + 20 + 1 = 30 + // doc 3: 0.5 + // doc 4: 8 + // doc 6: 7 + 5 + 0.05882353 = 12.05882353 + // doc 7: 6 + 0.03846154 = 6.03846154 + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standard0, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null), + new CompoundRetrieverBuilder.RetrieverSource(knnRetrieverBuilder, null) + ), + rankWindowSize + ) + ); + source.collapse( + new CollapseBuilder(TOPIC_FIELD).setInnerHits( + new InnerHitBuilder("a").addSort(new FieldSortBuilder(DOC_FIELD).order(SortOrder.DESC)).setSize(10) + ) + ); + source.fetchField(TOPIC_FIELD); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + ElasticsearchAssertions.assertResponse(req, resp -> { + assertNull(resp.pointInTimeId()); + assertNotNull(resp.getHits().getTotalHits()); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getHits().length, equalTo(4)); + assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); + assertThat(resp.getHits().getAt(0).getScore(), equalTo(30f)); + assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_6")); + assertThat((double) resp.getHits().getAt(1).getScore(), closeTo(12.0588f, 0.0001f)); + assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_1")); + assertThat(resp.getHits().getAt(2).getScore(), equalTo(10f)); + assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(0).getId(), equalTo("doc_4")); + assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(1).getId(), equalTo("doc_3")); + assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(2).getId(), equalTo("doc_1")); + assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_7")); + assertThat((double) resp.getHits().getAt(3).getScore(), closeTo(6.0384f, 0.0001f)); + }); + } + + public void testLinearRetrieverWithCollapseAndAggs() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this one retrieves docs 1, 2, 4, 6, and 7 + // with scores 10, 9, 8, 7, 6 + StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(9L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(8L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(7L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_7")).boost(6L)) + ); + // this one retrieves docs 2 and 6 due to prefilter + // with scores 20, 5 + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + // this one retrieves docs 2, 3, 6, and 7 + // with scores 1, 0.5, 0.05882353, 0.03846154 + KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 2.0f }, null, 10, 100, null, null); + // final ranking with no-normalizer would be: doc 2, 6, 1, 4, 7, 3 + // doc 1: 10 + // doc 2: 9 + 20 + 1 = 30 + // doc 3: 0.5 + // doc 4: 8 + // doc 6: 7 + 5 + 0.05882353 = 12.05882353 + // doc 7: 6 + 0.03846154 = 6.03846154 + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standard0, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null), + new CompoundRetrieverBuilder.RetrieverSource(knnRetrieverBuilder, null) + ), + rankWindowSize + ) + ); + source.collapse( + new CollapseBuilder(TOPIC_FIELD).setInnerHits( + new InnerHitBuilder("a").addSort(new FieldSortBuilder(DOC_FIELD).order(SortOrder.DESC)).setSize(10) + ) + ); + source.fetchField(TOPIC_FIELD); + source.aggregation(AggregationBuilders.terms("topic_agg").field(TOPIC_FIELD)); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + ElasticsearchAssertions.assertResponse(req, resp -> { + assertNull(resp.pointInTimeId()); + assertNotNull(resp.getHits().getTotalHits()); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getHits().length, equalTo(4)); + assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); + assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_6")); + assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_1")); + assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(0).getId(), equalTo("doc_4")); + assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(1).getId(), equalTo("doc_3")); + assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(2).getId(), equalTo("doc_1")); + assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_7")); + + assertNotNull(resp.getAggregations()); + assertNotNull(resp.getAggregations().get("topic_agg")); + Terms terms = resp.getAggregations().get("topic_agg"); + + assertThat(terms.getBucketByKey("technology").getDocCount(), equalTo(3L)); + assertThat(terms.getBucketByKey("astronomy").getDocCount(), equalTo(1L)); + assertThat(terms.getBucketByKey("biology").getDocCount(), equalTo(1L)); + }); + } + + public void testMultipleLinearRetrievers() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this one retrieves docs 1, 2, 4, 6, and 7 + // with scores 10, 9, 8, 7, 6 + StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(9L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(8L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(7L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_7")).boost(6L)) + ); + // this one retrieves docs 2 and 6 due to prefilter + // with scores 20, 5 + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource( + // this one returns docs doc 2, 1, 6, 4, 7 + // with scores 38, 20, 19, 16, 12 + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standard0, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null) + ), + rankWindowSize, + new float[] { 2.0f, 1.0f }, + new ScoreNormalizer[] { IdentityScoreNormalizer.INSTANCE, IdentityScoreNormalizer.INSTANCE } + ), + null + ), + // this one bring just doc 7 which should be ranked first eventually with a score of 100 + new CompoundRetrieverBuilder.RetrieverSource( + new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 7.0f }, null, 1, 100, null, null), + null + ) + ), + rankWindowSize, + new float[] { 1.0f, 100.0f }, + new ScoreNormalizer[] { IdentityScoreNormalizer.INSTANCE, IdentityScoreNormalizer.INSTANCE } + ) + ); + + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + ElasticsearchAssertions.assertResponse(req, resp -> { + assertNull(resp.pointInTimeId()); + assertNotNull(resp.getHits().getTotalHits()); + assertThat(resp.getHits().getTotalHits().value(), equalTo(5L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_7")); + assertThat(resp.getHits().getAt(0).getScore(), equalTo(112f)); + assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_2")); + assertThat(resp.getHits().getAt(1).getScore(), equalTo(38f)); + assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_1")); + assertThat(resp.getHits().getAt(2).getScore(), equalTo(20f)); + assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_6")); + assertThat(resp.getHits().getAt(3).getScore(), equalTo(19f)); + assertThat(resp.getHits().getAt(4).getId(), equalTo("doc_4")); + assertThat(resp.getHits().getAt(4).getScore(), equalTo(16f)); + }); + } + + public void testLinearExplainWithNamedRetrievers() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this one retrieves docs 1, 2, 4, 6, and 7 + // with scores 10, 9, 8, 7, 6 + StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(9L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(8L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(7L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_7")).boost(6L)) + ); + standard0.retrieverName("my_custom_retriever"); + // this one retrieves docs 2 and 6 due to prefilter + // with scores 20, 5 + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + // this one retrieves docs 2, 3, 6, and 7 + // with scores 1, 0.5, 0.05882353, 0.03846154 + KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 2.0f }, null, 10, 100, null, null); + // final ranking with no-normalizer would be: doc 2, 6, 1, 4, 7, 3 + // doc 1: 10 + // doc 2: 9 + 20 + 1 = 30 + // doc 3: 0.5 + // doc 4: 8 + // doc 6: 7 + 5 + 0.05882353 = 12.05882353 + // doc 7: 6 + 0.03846154 = 6.03846154 + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standard0, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null), + new CompoundRetrieverBuilder.RetrieverSource(knnRetrieverBuilder, null) + ), + rankWindowSize + ) + ); + source.explain(true); + source.size(1); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + ElasticsearchAssertions.assertResponse(req, resp -> { + assertNull(resp.pointInTimeId()); + assertNotNull(resp.getHits().getTotalHits()); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getHits().length, equalTo(1)); + assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); + assertThat(resp.getHits().getAt(0).getExplanation().isMatch(), equalTo(true)); + assertThat(resp.getHits().getAt(0).getExplanation().getDescription(), containsString("sum of:")); + assertThat(resp.getHits().getAt(0).getExplanation().getDetails().length, equalTo(2)); + var rrfDetails = resp.getHits().getAt(0).getExplanation().getDetails()[0]; + assertThat(rrfDetails.getDetails().length, equalTo(3)); + assertThat( + rrfDetails.getDescription(), + equalTo( + "weighted linear combination score: [30.0] computed for normalized scores [9.0, 20.0, 1.0] " + + "and weights [1.0, 1.0, 1.0] as sum of (weight[i] * score[i]) for each query." + ) + ); + + assertThat( + rrfDetails.getDetails()[0].getDescription(), + containsString( + "weighted score: [9.0] in query at index [0] [my_custom_retriever] computed as [1.0 * 9.0] " + + "using score normalizer [none] for original matching query with score" + ) + ); + assertThat( + rrfDetails.getDetails()[1].getDescription(), + containsString( + "weighted score: [20.0] in query at index [1] computed as [1.0 * 20.0] using score normalizer [none] " + + "for original matching query with score:" + ) + ); + assertThat( + rrfDetails.getDetails()[2].getDescription(), + containsString( + "weighted score: [1.0] in query at index [2] computed as [1.0 * 1.0] using score normalizer [none] " + + "for original matching query with score" + ) + ); + }); + } + + public void testLinearExplainWithAnotherNestedLinear() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this one retrieves docs 1, 2, 4, 6, and 7 + // with scores 10, 9, 8, 7, 6 + StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(9L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(8L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(7L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_7")).boost(6L)) + ); + standard0.retrieverName("my_custom_retriever"); + // this one retrieves docs 2 and 6 due to prefilter + // with scores 20, 5 + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + // this one retrieves docs 2, 3, 6, and 7 + // with scores 1, 0.5, 0.05882353, 0.03846154 + KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 2.0f }, null, 10, 100, null, null); + // final ranking with no-normalizer would be: doc 2, 6, 1, 4, 7, 3 + // doc 1: 10 + // doc 2: 9 + 20 + 1 = 30 + // doc 3: 0.5 + // doc 4: 8 + // doc 6: 7 + 5 + 0.05882353 = 12.05882353 + // doc 7: 6 + 0.03846154 = 6.03846154 + LinearRetrieverBuilder nestedLinear = new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standard0, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null), + new CompoundRetrieverBuilder.RetrieverSource(knnRetrieverBuilder, null) + ), + rankWindowSize + ); + nestedLinear.retrieverName("nested_linear"); + // this one retrieves docs 6 with a score of 100 + StandardRetrieverBuilder standard2 = new StandardRetrieverBuilder( + QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(20L) + ); + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(nestedLinear, null), + new CompoundRetrieverBuilder.RetrieverSource(standard2, null) + ), + rankWindowSize, + new float[] { 1, 5f }, + new ScoreNormalizer[] { IdentityScoreNormalizer.INSTANCE, IdentityScoreNormalizer.INSTANCE } + ) + ); + source.explain(true); + source.size(1); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + ElasticsearchAssertions.assertResponse(req, resp -> { + assertNull(resp.pointInTimeId()); + assertNotNull(resp.getHits().getTotalHits()); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getHits().length, equalTo(1)); + assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); + assertThat(resp.getHits().getAt(0).getExplanation().isMatch(), equalTo(true)); + assertThat(resp.getHits().getAt(0).getExplanation().getDescription(), containsString("sum of:")); + assertThat(resp.getHits().getAt(0).getExplanation().getDetails().length, equalTo(2)); + var linearTopLevel = resp.getHits().getAt(0).getExplanation().getDetails()[0]; + assertThat(linearTopLevel.getDetails().length, equalTo(2)); + assertThat( + linearTopLevel.getDescription(), + containsString( + "weighted linear combination score: [112.05882] computed for normalized scores [12.058824, 20.0] " + + "and weights [1.0, 5.0] as sum of (weight[i] * score[i]) for each query." + ) + ); + assertThat(linearTopLevel.getDetails()[0].getDescription(), containsString("weighted score: [12.058824]")); + assertThat(linearTopLevel.getDetails()[0].getDescription(), containsString("nested_linear")); + assertThat(linearTopLevel.getDetails()[1].getDescription(), containsString("weighted score: [100.0]")); + + var linearNested = linearTopLevel.getDetails()[0]; + assertThat(linearNested.getDetails()[0].getDetails().length, equalTo(3)); + assertThat(linearNested.getDetails()[0].getDetails()[0].getDescription(), containsString("weighted score: [7.0]")); + assertThat(linearNested.getDetails()[0].getDetails()[1].getDescription(), containsString("weighted score: [5.0]")); + assertThat(linearNested.getDetails()[0].getDetails()[2].getDescription(), containsString("weighted score: [0.05882353]")); + + var standard0Details = linearTopLevel.getDetails()[1]; + assertThat(standard0Details.getDetails()[0].getDescription(), containsString("ConstantScore")); + }); + } + + public void testLinearInnerRetrieverAll4xxSearchErrors() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this will throw a 4xx error during evaluation + StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( + QueryBuilders.constantScoreQuery(QueryBuilders.rangeQuery(VECTOR_FIELD).gte(10)) + ); + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standard0, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null) + ), + rankWindowSize + ) + ); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + Exception ex = expectThrows(ElasticsearchStatusException.class, req::get); + assertThat(ex, instanceOf(ElasticsearchStatusException.class)); + assertThat( + ex.getMessage(), + containsString( + "[linear] search failed - retrievers '[standard]' returned errors. All failures are attached as suppressed exceptions." + ) + ); + assertThat(ExceptionsHelper.status(ex), equalTo(RestStatus.BAD_REQUEST)); + assertThat(ex.getSuppressed().length, equalTo(1)); + assertThat(ex.getSuppressed()[0].getCause().getCause(), instanceOf(IllegalArgumentException.class)); + } + + public void testLinearInnerRetrieverMultipleErrorsOne5xx() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this will throw a 4xx error during evaluation + StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( + QueryBuilders.constantScoreQuery(QueryBuilders.rangeQuery(VECTOR_FIELD).gte(10)) + ); + // this will throw a 5xx error + TestRetrieverBuilder testRetrieverBuilder = new TestRetrieverBuilder("val") { + @Override + public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder, boolean compoundUsed) { + searchSourceBuilder.aggregation(AggregationBuilders.avg("some_invalid_param")); + } + }; + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standard0, null), + new CompoundRetrieverBuilder.RetrieverSource(testRetrieverBuilder, null) + ), + rankWindowSize + ) + ); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + Exception ex = expectThrows(ElasticsearchStatusException.class, req::get); + assertThat(ex, instanceOf(ElasticsearchStatusException.class)); + assertThat( + ex.getMessage(), + containsString( + "[linear] search failed - retrievers '[standard, test]' returned errors. " + + "All failures are attached as suppressed exceptions." + ) + ); + assertThat(ExceptionsHelper.status(ex), equalTo(RestStatus.INTERNAL_SERVER_ERROR)); + assertThat(ex.getSuppressed().length, equalTo(2)); + assertThat(ex.getSuppressed()[0].getCause().getCause(), instanceOf(IllegalArgumentException.class)); + assertThat(ex.getSuppressed()[1].getCause().getCause(), instanceOf(IllegalStateException.class)); + } + + public void testLinearInnerRetrieverErrorWhenExtractingToSource() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + TestRetrieverBuilder failingRetriever = new TestRetrieverBuilder("some value") { + @Override + public QueryBuilder topDocsQuery() { + return QueryBuilders.matchAllQuery(); + } + + @Override + public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder, boolean compoundUsed) { + throw new UnsupportedOperationException("simulated failure"); + } + }; + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(failingRetriever, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null) + ), + rankWindowSize + ) + ); + source.size(1); + expectThrows(UnsupportedOperationException.class, () -> client().prepareSearch(INDEX).setSource(source).get()); + } + + public void testLinearInnerRetrieverErrorOnTopDocs() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + TestRetrieverBuilder failingRetriever = new TestRetrieverBuilder("some value") { + @Override + public QueryBuilder topDocsQuery() { + throw new UnsupportedOperationException("simulated failure"); + } + + @Override + public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder, boolean compoundUsed) { + searchSourceBuilder.query(QueryBuilders.matchAllQuery()); + } + }; + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(failingRetriever, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null) + ), + rankWindowSize + ) + ); + source.size(1); + source.aggregation(AggregationBuilders.terms("topic_agg").field(TOPIC_FIELD)); + expectThrows(UnsupportedOperationException.class, () -> client().prepareSearch(INDEX).setSource(source).get()); + } + + public void testLinearFiltersPropagatedToKnnQueryVectorBuilder() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this will retriever all but 7 only due to top-level filter + StandardRetrieverBuilder standardRetriever = new StandardRetrieverBuilder(QueryBuilders.matchAllQuery()); + // this will too retrieve just doc 7 + KnnRetrieverBuilder knnRetriever = new KnnRetrieverBuilder( + "vector", + null, + new TestQueryVectorBuilderPlugin.TestQueryVectorBuilder(new float[] { 3 }), + 10, + 10, + null, + null + ); + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standardRetriever, null), + new CompoundRetrieverBuilder.RetrieverSource(knnRetriever, null) + ), + rankWindowSize + ) + ); + source.retriever().getPreFilterQueryBuilders().add(QueryBuilders.boolQuery().must(QueryBuilders.termQuery(DOC_FIELD, "doc_7"))); + source.size(10); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + ElasticsearchAssertions.assertResponse(req, resp -> { + assertNull(resp.pointInTimeId()); + assertNotNull(resp.getHits().getTotalHits()); + assertThat(resp.getHits().getTotalHits().value(), equalTo(1L)); + assertThat(resp.getHits().getHits()[0].getId(), equalTo("doc_7")); + }); + } + + public void testRewriteOnce() { + final float[] vector = new float[] { 1 }; + AtomicInteger numAsyncCalls = new AtomicInteger(); + QueryVectorBuilder vectorBuilder = new QueryVectorBuilder() { + @Override + public void buildVector(Client client, ActionListener listener) { + numAsyncCalls.incrementAndGet(); + listener.onResponse(vector); + } + + @Override + public String getWriteableName() { + throw new IllegalStateException("Should not be called"); + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + throw new IllegalStateException("Should not be called"); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new IllegalStateException("Should not be called"); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + throw new IllegalStateException("Should not be called"); + } + }; + var knn = new KnnRetrieverBuilder("vector", null, vectorBuilder, 10, 10, null, null); + var standard = new StandardRetrieverBuilder(new KnnVectorQueryBuilder("vector", vectorBuilder, 10, 10, null)); + var rrf = new LinearRetrieverBuilder( + List.of(new CompoundRetrieverBuilder.RetrieverSource(knn, null), new CompoundRetrieverBuilder.RetrieverSource(standard, null)), + 10 + ); + assertResponse( + client().prepareSearch(INDEX).setSource(new SearchSourceBuilder().retriever(rrf)), + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(4L)) + ); + assertThat(numAsyncCalls.get(), equalTo(2)); + + // check that we use the rewritten vector to build the explain query + assertResponse( + client().prepareSearch(INDEX).setSource(new SearchSourceBuilder().retriever(rrf).explain(true)), + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(4L)) + ); + assertThat(numAsyncCalls.get(), equalTo(4)); + } +} diff --git a/x-pack/plugin/rank-rrf/src/main/java/module-info.java b/x-pack/plugin/rank-rrf/src/main/java/module-info.java index 4fd2a7e4d54f3..fbe467fdf3eae 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/module-info.java +++ b/x-pack/plugin/rank-rrf/src/main/java/module-info.java @@ -5,7 +5,7 @@ * 2.0. */ -import org.elasticsearch.xpack.rank.rrf.RRFFeatures; +import org.elasticsearch.xpack.rank.RankRRFFeatures; module org.elasticsearch.rank.rrf { requires org.apache.lucene.core; @@ -14,7 +14,9 @@ requires org.elasticsearch.server; requires org.elasticsearch.xcore; + exports org.elasticsearch.xpack.rank; exports org.elasticsearch.xpack.rank.rrf; + exports org.elasticsearch.xpack.rank.linear; - provides org.elasticsearch.features.FeatureSpecification with RRFFeatures; + provides org.elasticsearch.features.FeatureSpecification with RankRRFFeatures; } diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFFeatures.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/RankRRFFeatures.java similarity index 65% rename from x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFFeatures.java rename to x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/RankRRFFeatures.java index 494eaa508c14a..5966e17f20429 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFFeatures.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/RankRRFFeatures.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.rank.rrf; +package org.elasticsearch.xpack.rank; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; @@ -14,10 +14,14 @@ import static org.elasticsearch.search.retriever.CompoundRetrieverBuilder.INNER_RETRIEVERS_FILTER_SUPPORT; -/** - * A set of features specifically for the rrf plugin. - */ -public class RRFFeatures implements FeatureSpecification { +public class RankRRFFeatures implements FeatureSpecification { + + public static final NodeFeature LINEAR_RETRIEVER_SUPPORTED = new NodeFeature("linear_retriever_supported"); + + @Override + public Set getFeatures() { + return Set.of(LINEAR_RETRIEVER_SUPPORTED); + } @Override public Set getTestFeatures() { diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/IdentityScoreNormalizer.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/IdentityScoreNormalizer.java new file mode 100644 index 0000000000000..15af17a1db4ef --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/IdentityScoreNormalizer.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.apache.lucene.search.ScoreDoc; + +public class IdentityScoreNormalizer extends ScoreNormalizer { + + public static final IdentityScoreNormalizer INSTANCE = new IdentityScoreNormalizer(); + + public static final String NAME = "none"; + + @Override + public String getName() { + return NAME; + } + + @Override + public ScoreDoc[] normalizeScores(ScoreDoc[] docs) { + return docs; + } +} diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRankDoc.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRankDoc.java new file mode 100644 index 0000000000000..bb1c420bbd06c --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRankDoc.java @@ -0,0 +1,143 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.apache.lucene.search.Explanation; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.rank.RankDoc; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; + +import static org.elasticsearch.xpack.rank.linear.LinearRetrieverBuilder.DEFAULT_SCORE; +import static org.elasticsearch.xpack.rank.linear.LinearRetrieverComponent.DEFAULT_NORMALIZER; +import static org.elasticsearch.xpack.rank.linear.LinearRetrieverComponent.DEFAULT_WEIGHT; + +public class LinearRankDoc extends RankDoc { + + public static final String NAME = "linear_rank_doc"; + + final float[] weights; + final String[] normalizers; + public float[] normalizedScores; + + public LinearRankDoc(int doc, float score, int shardIndex) { + super(doc, score, shardIndex); + this.weights = null; + this.normalizers = null; + } + + public LinearRankDoc(int doc, float score, int shardIndex, float[] weights, String[] normalizers) { + super(doc, score, shardIndex); + this.weights = weights; + this.normalizers = normalizers; + } + + public LinearRankDoc(StreamInput in) throws IOException { + super(in); + weights = in.readOptionalFloatArray(); + normalizedScores = in.readOptionalFloatArray(); + normalizers = in.readOptionalStringArray(); + } + + @Override + public Explanation explain(Explanation[] sources, String[] queryNames) { + assert normalizedScores != null && weights != null && normalizers != null; + assert normalizedScores.length == sources.length; + + Explanation[] details = new Explanation[sources.length]; + for (int i = 0; i < sources.length; i++) { + final String queryAlias = queryNames[i] == null ? "" : " [" + queryNames[i] + "]"; + final String queryIdentifier = "at index [" + i + "]" + queryAlias; + final float weight = weights == null ? DEFAULT_WEIGHT : weights[i]; + final float normalizedScore = normalizedScores == null ? DEFAULT_SCORE : normalizedScores[i]; + final String normalizer = normalizers == null ? DEFAULT_NORMALIZER.getName() : normalizers[i]; + if (normalizedScore > 0) { + details[i] = Explanation.match( + weight * normalizedScore, + "weighted score: [" + + weight * normalizedScore + + "] in query " + + queryIdentifier + + " computed as [" + + weight + + " * " + + normalizedScore + + "]" + + " using score normalizer [" + + normalizer + + "]" + + " for original matching query with score:", + sources[i] + ); + } else { + final String description = "weighted score: [0], result not found in query " + queryIdentifier; + details[i] = Explanation.noMatch(description); + } + } + return Explanation.match( + score, + "weighted linear combination score: [" + + score + + "] computed for normalized scores " + + Arrays.toString(normalizedScores) + + (weights == null ? "" : " and weights " + Arrays.toString(weights)) + + " as sum of (weight[i] * score[i]) for each query.", + details + ); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeOptionalFloatArray(weights); + out.writeOptionalFloatArray(normalizedScores); + out.writeOptionalStringArray(normalizers); + } + + @Override + protected void doToXContent(XContentBuilder builder, Params params) throws IOException { + if (weights != null) { + builder.field("weights", weights); + } + if (normalizedScores != null) { + builder.field("normalizedScores", normalizedScores); + } + if (normalizers != null) { + builder.field("normalizers", normalizers); + } + } + + @Override + public boolean doEquals(RankDoc rd) { + LinearRankDoc lrd = (LinearRankDoc) rd; + return Arrays.equals(weights, lrd.weights) + && Arrays.equals(normalizedScores, lrd.normalizedScores) + && Arrays.equals(normalizers, lrd.normalizers); + } + + @Override + public int doHashCode() { + int result = Objects.hash(Arrays.hashCode(weights), Arrays.hashCode(normalizedScores), Arrays.hashCode(normalizers)); + return 31 * result; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.LINEAR_RETRIEVER_SUPPORT; + } +} diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverBuilder.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverBuilder.java new file mode 100644 index 0000000000000..66bbbf95bc9d6 --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverBuilder.java @@ -0,0 +1,208 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.apache.lucene.search.ScoreDoc; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.util.Maps; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.rank.RankBuilder; +import org.elasticsearch.search.rank.RankDoc; +import org.elasticsearch.search.retriever.CompoundRetrieverBuilder; +import org.elasticsearch.search.retriever.RetrieverBuilder; +import org.elasticsearch.search.retriever.RetrieverParserContext; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.rank.rrf.RRFRankPlugin; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.xpack.rank.RankRRFFeatures.LINEAR_RETRIEVER_SUPPORTED; +import static org.elasticsearch.xpack.rank.linear.LinearRetrieverComponent.DEFAULT_WEIGHT; + +/** + * The {@code LinearRetrieverBuilder} supports the combination of different retrievers through a weighted linear combination. + * For example, assume that we have retrievers r1 and r2, the final score of the {@code LinearRetrieverBuilder} is defined as + * {@code score(r)=w1*score(r1) + w2*score(r2)}. + * Each sub-retriever score can be normalized before being considered for the weighted linear sum, by setting the appropriate + * normalizer parameter. + * + */ +public final class LinearRetrieverBuilder extends CompoundRetrieverBuilder { + + public static final String NAME = "linear"; + + public static final ParseField RETRIEVERS_FIELD = new ParseField("retrievers"); + + public static final float DEFAULT_SCORE = 0f; + + private final float[] weights; + private final ScoreNormalizer[] normalizers; + + @SuppressWarnings("unchecked") + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + false, + args -> { + List retrieverComponents = (List) args[0]; + int rankWindowSize = args[1] == null ? RankBuilder.DEFAULT_RANK_WINDOW_SIZE : (int) args[1]; + List innerRetrievers = new ArrayList<>(); + float[] weights = new float[retrieverComponents.size()]; + ScoreNormalizer[] normalizers = new ScoreNormalizer[retrieverComponents.size()]; + int index = 0; + for (LinearRetrieverComponent component : retrieverComponents) { + innerRetrievers.add(new RetrieverSource(component.retriever, null)); + weights[index] = component.weight; + normalizers[index] = component.normalizer; + index++; + } + return new LinearRetrieverBuilder(innerRetrievers, rankWindowSize, weights, normalizers); + } + ); + + static { + PARSER.declareObjectArray(constructorArg(), LinearRetrieverComponent::fromXContent, RETRIEVERS_FIELD); + PARSER.declareInt(optionalConstructorArg(), RANK_WINDOW_SIZE_FIELD); + RetrieverBuilder.declareBaseParserFields(NAME, PARSER); + } + + private static float[] getDefaultWeight(int size) { + float[] weights = new float[size]; + Arrays.fill(weights, DEFAULT_WEIGHT); + return weights; + } + + private static ScoreNormalizer[] getDefaultNormalizers(int size) { + ScoreNormalizer[] normalizers = new ScoreNormalizer[size]; + Arrays.fill(normalizers, IdentityScoreNormalizer.INSTANCE); + return normalizers; + } + + public static LinearRetrieverBuilder fromXContent(XContentParser parser, RetrieverParserContext context) throws IOException { + if (context.clusterSupportsFeature(LINEAR_RETRIEVER_SUPPORTED) == false) { + throw new ParsingException(parser.getTokenLocation(), "unknown retriever [" + NAME + "]"); + } + if (RRFRankPlugin.LINEAR_RETRIEVER_FEATURE.check(XPackPlugin.getSharedLicenseState()) == false) { + throw LicenseUtils.newComplianceException("linear retriever"); + } + return PARSER.apply(parser, context); + } + + LinearRetrieverBuilder(List innerRetrievers, int rankWindowSize) { + this(innerRetrievers, rankWindowSize, getDefaultWeight(innerRetrievers.size()), getDefaultNormalizers(innerRetrievers.size())); + } + + public LinearRetrieverBuilder( + List innerRetrievers, + int rankWindowSize, + float[] weights, + ScoreNormalizer[] normalizers + ) { + super(innerRetrievers, rankWindowSize); + if (weights.length != innerRetrievers.size()) { + throw new IllegalArgumentException("The number of weights must match the number of inner retrievers"); + } + if (normalizers.length != innerRetrievers.size()) { + throw new IllegalArgumentException("The number of normalizers must match the number of inner retrievers"); + } + this.weights = weights; + this.normalizers = normalizers; + } + + @Override + protected LinearRetrieverBuilder clone(List newChildRetrievers, List newPreFilterQueryBuilders) { + LinearRetrieverBuilder clone = new LinearRetrieverBuilder(newChildRetrievers, rankWindowSize, weights, normalizers); + clone.preFilterQueryBuilders = newPreFilterQueryBuilders; + clone.retrieverName = retrieverName; + return clone; + } + + @Override + protected SearchSourceBuilder finalizeSourceBuilder(SearchSourceBuilder sourceBuilder) { + sourceBuilder.trackScores(true); + return sourceBuilder; + } + + @Override + protected RankDoc[] combineInnerRetrieverResults(List rankResults, boolean isExplain) { + Map docsToRankResults = Maps.newMapWithExpectedSize(rankWindowSize); + final String[] normalizerNames = Arrays.stream(normalizers).map(ScoreNormalizer::getName).toArray(String[]::new); + for (int result = 0; result < rankResults.size(); result++) { + final ScoreNormalizer normalizer = normalizers[result] == null ? IdentityScoreNormalizer.INSTANCE : normalizers[result]; + ScoreDoc[] originalScoreDocs = rankResults.get(result); + ScoreDoc[] normalizedScoreDocs = normalizer.normalizeScores(originalScoreDocs); + for (int scoreDocIndex = 0; scoreDocIndex < normalizedScoreDocs.length; scoreDocIndex++) { + LinearRankDoc rankDoc = docsToRankResults.computeIfAbsent( + new RankDoc.RankKey(originalScoreDocs[scoreDocIndex].doc, originalScoreDocs[scoreDocIndex].shardIndex), + key -> { + if (isExplain) { + LinearRankDoc doc = new LinearRankDoc(key.doc(), 0f, key.shardIndex(), weights, normalizerNames); + doc.normalizedScores = new float[rankResults.size()]; + return doc; + } else { + return new LinearRankDoc(key.doc(), 0f, key.shardIndex()); + } + } + ); + if (isExplain) { + rankDoc.normalizedScores[result] = normalizedScoreDocs[scoreDocIndex].score; + } + // if we do not have scores associated with this result set, just ignore its contribution to the final + // score computation by setting its score to 0. + final float docScore = false == Float.isNaN(normalizedScoreDocs[scoreDocIndex].score) + ? normalizedScoreDocs[scoreDocIndex].score + : DEFAULT_SCORE; + final float weight = Float.isNaN(weights[result]) ? DEFAULT_WEIGHT : weights[result]; + rankDoc.score += weight * docScore; + } + } + // sort the results based on the final score, tiebreaker based on smaller doc id + LinearRankDoc[] sortedResults = docsToRankResults.values().toArray(LinearRankDoc[]::new); + Arrays.sort(sortedResults); + // trim the results if needed, otherwise each shard will always return `rank_window_size` results. + LinearRankDoc[] topResults = new LinearRankDoc[Math.min(rankWindowSize, sortedResults.length)]; + for (int rank = 0; rank < topResults.length; ++rank) { + topResults[rank] = sortedResults[rank]; + topResults[rank].rank = rank + 1; + } + return topResults; + } + + @Override + public String getName() { + return NAME; + } + + public void doToXContent(XContentBuilder builder, Params params) throws IOException { + int index = 0; + if (innerRetrievers.isEmpty() == false) { + builder.startArray(RETRIEVERS_FIELD.getPreferredName()); + for (var entry : innerRetrievers) { + builder.startObject(); + builder.field(LinearRetrieverComponent.RETRIEVER_FIELD.getPreferredName(), entry.retriever()); + builder.field(LinearRetrieverComponent.WEIGHT_FIELD.getPreferredName(), weights[index]); + builder.field(LinearRetrieverComponent.NORMALIZER_FIELD.getPreferredName(), normalizers[index].getName()); + builder.endObject(); + index++; + } + builder.endArray(); + } + builder.field(RANK_WINDOW_SIZE_FIELD.getPreferredName(), rankWindowSize); + } +} diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverComponent.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverComponent.java new file mode 100644 index 0000000000000..bb0d79d3fe488 --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverComponent.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.elasticsearch.search.retriever.RetrieverBuilder; +import org.elasticsearch.search.retriever.RetrieverParserContext; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class LinearRetrieverComponent implements ToXContentObject { + + public static final ParseField RETRIEVER_FIELD = new ParseField("retriever"); + public static final ParseField WEIGHT_FIELD = new ParseField("weight"); + public static final ParseField NORMALIZER_FIELD = new ParseField("normalizer"); + + static final float DEFAULT_WEIGHT = 1f; + static final ScoreNormalizer DEFAULT_NORMALIZER = IdentityScoreNormalizer.INSTANCE; + + RetrieverBuilder retriever; + float weight; + ScoreNormalizer normalizer; + + public LinearRetrieverComponent(RetrieverBuilder retrieverBuilder, Float weight, ScoreNormalizer normalizer) { + assert retrieverBuilder != null; + this.retriever = retrieverBuilder; + this.weight = weight == null ? DEFAULT_WEIGHT : weight; + this.normalizer = normalizer == null ? DEFAULT_NORMALIZER : normalizer; + if (this.weight < 0) { + throw new IllegalArgumentException("[weight] must be non-negative"); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(RETRIEVER_FIELD.getPreferredName(), retriever); + builder.field(WEIGHT_FIELD.getPreferredName(), weight); + builder.field(NORMALIZER_FIELD.getPreferredName(), normalizer.getName()); + return builder; + } + + @SuppressWarnings("unchecked") + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "retriever-component", + false, + args -> { + RetrieverBuilder retrieverBuilder = (RetrieverBuilder) args[0]; + Float weight = (Float) args[1]; + ScoreNormalizer normalizer = (ScoreNormalizer) args[2]; + return new LinearRetrieverComponent(retrieverBuilder, weight, normalizer); + } + ); + + static { + PARSER.declareNamedObject(constructorArg(), (p, c, n) -> { + RetrieverBuilder innerRetriever = p.namedObject(RetrieverBuilder.class, n, c); + c.trackRetrieverUsage(innerRetriever.getName()); + return innerRetriever; + }, RETRIEVER_FIELD); + PARSER.declareFloat(optionalConstructorArg(), WEIGHT_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> ScoreNormalizer.valueOf(p.text()), + NORMALIZER_FIELD, + ObjectParser.ValueType.STRING + ); + } + + public static LinearRetrieverComponent fromXContent(XContentParser parser, RetrieverParserContext context) throws IOException { + return PARSER.apply(parser, context); + } +} diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/MinMaxScoreNormalizer.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/MinMaxScoreNormalizer.java new file mode 100644 index 0000000000000..56b42b48a5d47 --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/MinMaxScoreNormalizer.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.apache.lucene.search.ScoreDoc; + +public class MinMaxScoreNormalizer extends ScoreNormalizer { + + public static final MinMaxScoreNormalizer INSTANCE = new MinMaxScoreNormalizer(); + + public static final String NAME = "minmax"; + + private static final float EPSILON = 1e-6f; + + public MinMaxScoreNormalizer() {} + + @Override + public String getName() { + return NAME; + } + + @Override + public ScoreDoc[] normalizeScores(ScoreDoc[] docs) { + if (docs.length == 0) { + return docs; + } + // create a new array to avoid changing ScoreDocs in place + ScoreDoc[] scoreDocs = new ScoreDoc[docs.length]; + float min = Float.MAX_VALUE; + float max = Float.MIN_VALUE; + boolean atLeastOneValidScore = false; + for (ScoreDoc rd : docs) { + if (false == atLeastOneValidScore && false == Float.isNaN(rd.score)) { + atLeastOneValidScore = true; + } + if (rd.score > max) { + max = rd.score; + } + if (rd.score < min) { + min = rd.score; + } + } + if (false == atLeastOneValidScore) { + // we do not have any scores to normalize, so we just return the original array + return docs; + } + + boolean minEqualsMax = Math.abs(min - max) < EPSILON; + for (int i = 0; i < docs.length; i++) { + float score; + if (minEqualsMax) { + score = min; + } else { + score = (docs[i].score - min) / (max - min); + } + scoreDocs[i] = new ScoreDoc(docs[i].doc, score, docs[i].shardIndex); + } + return scoreDocs; + } +} diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/ScoreNormalizer.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/ScoreNormalizer.java new file mode 100644 index 0000000000000..48334b9adf957 --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/ScoreNormalizer.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.apache.lucene.search.ScoreDoc; + +/** + * A no-op {@link ScoreNormalizer} that does not modify the scores. + */ +public abstract class ScoreNormalizer { + + public static ScoreNormalizer valueOf(String normalizer) { + if (MinMaxScoreNormalizer.NAME.equalsIgnoreCase(normalizer)) { + return MinMaxScoreNormalizer.INSTANCE; + } else if (IdentityScoreNormalizer.NAME.equalsIgnoreCase(normalizer)) { + return IdentityScoreNormalizer.INSTANCE; + + } else { + throw new IllegalArgumentException("Unknown normalizer [" + normalizer + "]"); + } + } + + public abstract String getName(); + + public abstract ScoreDoc[] normalizeScores(ScoreDoc[] docs); +} diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java index 9404d863f1d28..251015b21ff50 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java @@ -17,6 +17,8 @@ import org.elasticsearch.search.rank.RankShardResult; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xpack.rank.linear.LinearRankDoc; +import org.elasticsearch.xpack.rank.linear.LinearRetrieverBuilder; import java.util.List; @@ -28,6 +30,12 @@ public class RRFRankPlugin extends Plugin implements SearchPlugin { License.OperationMode.ENTERPRISE ); + public static final LicensedFeature.Momentary LINEAR_RETRIEVER_FEATURE = LicensedFeature.momentary( + null, + "linear-retriever", + License.OperationMode.ENTERPRISE + ); + public static final String NAME = "rrf"; @Override @@ -35,7 +43,8 @@ public List getNamedWriteables() { return List.of( new NamedWriteableRegistry.Entry(RankBuilder.class, NAME, RRFRankBuilder::new), new NamedWriteableRegistry.Entry(RankShardResult.class, NAME, RRFRankShardResult::new), - new NamedWriteableRegistry.Entry(RankDoc.class, RRFRankDoc.NAME, RRFRankDoc::new) + new NamedWriteableRegistry.Entry(RankDoc.class, RRFRankDoc.NAME, RRFRankDoc::new), + new NamedWriteableRegistry.Entry(RankDoc.class, LinearRankDoc.NAME, LinearRankDoc::new) ); } @@ -46,6 +55,9 @@ public List getNamedXContent() { @Override public List> getRetrievers() { - return List.of(new RetrieverSpec<>(new ParseField(NAME), RRFRetrieverBuilder::fromXContent)); + return List.of( + new RetrieverSpec<>(new ParseField(NAME), RRFRetrieverBuilder::fromXContent), + new RetrieverSpec<>(new ParseField(LinearRetrieverBuilder.NAME), LinearRetrieverBuilder::fromXContent) + ); } } diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java index 93445a9ce5ac9..a32f7ba1f923d 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java @@ -101,6 +101,7 @@ public String getName() { protected RRFRetrieverBuilder clone(List newRetrievers, List newPreFilterQueryBuilders) { RRFRetrieverBuilder clone = new RRFRetrieverBuilder(newRetrievers, this.rankWindowSize, this.rankConstant); clone.preFilterQueryBuilders = newPreFilterQueryBuilders; + clone.retrieverName = retrieverName; return clone; } diff --git a/x-pack/plugin/rank-rrf/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/x-pack/plugin/rank-rrf/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification index 605e999b66c66..528b7e35bee65 100644 --- a/x-pack/plugin/rank-rrf/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ b/x-pack/plugin/rank-rrf/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -5,4 +5,4 @@ # 2.0. # -org.elasticsearch.xpack.rank.rrf.RRFFeatures +org.elasticsearch.xpack.rank.RankRRFFeatures diff --git a/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/linear/LinearRankDocTests.java b/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/linear/LinearRankDocTests.java new file mode 100644 index 0000000000000..051aa6bddb4d7 --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/linear/LinearRankDocTests.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.search.rank.AbstractRankDocWireSerializingTestCase; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.rank.rrf.RRFRankPlugin; + +import java.io.IOException; +import java.util.List; + +public class LinearRankDocTests extends AbstractRankDocWireSerializingTestCase { + + protected LinearRankDoc createTestRankDoc() { + int queries = randomIntBetween(2, 20); + float[] weights = new float[queries]; + String[] normalizers = new String[queries]; + float[] normalizedScores = new float[queries]; + for (int i = 0; i < queries; i++) { + weights[i] = randomFloat(); + normalizers[i] = randomAlphaOfLengthBetween(1, 10); + normalizedScores[i] = randomFloat(); + } + LinearRankDoc rankDoc = new LinearRankDoc(randomNonNegativeInt(), randomFloat(), randomIntBetween(0, 1), weights, normalizers); + rankDoc.rank = randomNonNegativeInt(); + rankDoc.normalizedScores = normalizedScores; + return rankDoc; + } + + @Override + protected List getAdditionalNamedWriteables() { + try (RRFRankPlugin rrfRankPlugin = new RRFRankPlugin()) { + return rrfRankPlugin.getNamedWriteables(); + } catch (IOException ex) { + throw new AssertionError("Failed to create RRFRankPlugin", ex); + } + } + + @Override + protected Writeable.Reader instanceReader() { + return LinearRankDoc::new; + } + + @Override + protected LinearRankDoc mutateInstance(LinearRankDoc instance) throws IOException { + LinearRankDoc mutated = new LinearRankDoc( + instance.doc, + instance.score, + instance.shardIndex, + instance.weights, + instance.normalizers + ); + mutated.normalizedScores = instance.normalizedScores; + mutated.rank = instance.rank; + if (frequently()) { + mutated.doc = randomValueOtherThan(instance.doc, ESTestCase::randomNonNegativeInt); + } + if (frequently()) { + mutated.score = randomValueOtherThan(instance.score, ESTestCase::randomFloat); + } + if (frequently()) { + mutated.shardIndex = randomValueOtherThan(instance.shardIndex, ESTestCase::randomNonNegativeInt); + } + if (frequently()) { + mutated.rank = randomValueOtherThan(instance.rank, ESTestCase::randomNonNegativeInt); + } + if (frequently()) { + for (int i = 0; i < mutated.normalizedScores.length; i++) { + if (frequently()) { + mutated.normalizedScores[i] = randomFloat(); + } + } + } + if (frequently()) { + for (int i = 0; i < mutated.weights.length; i++) { + if (frequently()) { + mutated.weights[i] = randomFloat(); + } + } + } + if (frequently()) { + for (int i = 0; i < mutated.normalizers.length; i++) { + if (frequently()) { + mutated.normalizers[i] = randomValueOtherThan(instance.normalizers[i], () -> randomAlphaOfLengthBetween(1, 10)); + } + } + } + return mutated; + } +} diff --git a/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverBuilderParsingTests.java b/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverBuilderParsingTests.java new file mode 100644 index 0000000000000..5cc66c6f50d3c --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverBuilderParsingTests.java @@ -0,0 +1,101 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.retriever.CompoundRetrieverBuilder; +import org.elasticsearch.search.retriever.RetrieverBuilder; +import org.elasticsearch.search.retriever.RetrieverParserContext; +import org.elasticsearch.search.retriever.TestRetrieverBuilder; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.usage.SearchUsage; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentParser; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static java.util.Collections.emptyList; + +public class LinearRetrieverBuilderParsingTests extends AbstractXContentTestCase { + private static List xContentRegistryEntries; + + @BeforeClass + public static void init() { + xContentRegistryEntries = new SearchModule(Settings.EMPTY, emptyList()).getNamedXContents(); + } + + @AfterClass + public static void afterClass() throws Exception { + xContentRegistryEntries = null; + } + + @Override + protected LinearRetrieverBuilder createTestInstance() { + int rankWindowSize = randomInt(100); + int num = randomIntBetween(1, 3); + List innerRetrievers = new ArrayList<>(); + float[] weights = new float[num]; + ScoreNormalizer[] normalizers = new ScoreNormalizer[num]; + for (int i = 0; i < num; i++) { + innerRetrievers.add( + new CompoundRetrieverBuilder.RetrieverSource(TestRetrieverBuilder.createRandomTestRetrieverBuilder(), null) + ); + weights[i] = randomFloat(); + normalizers[i] = randomScoreNormalizer(); + } + return new LinearRetrieverBuilder(innerRetrievers, rankWindowSize, weights, normalizers); + } + + @Override + protected LinearRetrieverBuilder doParseInstance(XContentParser parser) throws IOException { + return (LinearRetrieverBuilder) RetrieverBuilder.parseTopLevelRetrieverBuilder( + parser, + new RetrieverParserContext(new SearchUsage(), n -> true) + ); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + List entries = new ArrayList<>(xContentRegistryEntries); + entries.add( + new NamedXContentRegistry.Entry( + RetrieverBuilder.class, + TestRetrieverBuilder.TEST_SPEC.getName(), + (p, c) -> TestRetrieverBuilder.TEST_SPEC.getParser().fromXContent(p, (RetrieverParserContext) c), + TestRetrieverBuilder.TEST_SPEC.getName().getForRestApiVersion() + ) + ); + entries.add( + new NamedXContentRegistry.Entry( + RetrieverBuilder.class, + new ParseField(LinearRetrieverBuilder.NAME), + (p, c) -> LinearRetrieverBuilder.PARSER.apply(p, (RetrieverParserContext) c) + ) + ); + return new NamedXContentRegistry(entries); + } + + private static ScoreNormalizer randomScoreNormalizer() { + if (randomBoolean()) { + return MinMaxScoreNormalizer.INSTANCE; + } else { + return IdentityScoreNormalizer.INSTANCE; + } + } +} diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/java/org/elasticsearch/xpack/rank/rrf/LinearRankClientYamlTestSuiteIT.java b/x-pack/plugin/rank-rrf/src/yamlRestTest/java/org/elasticsearch/xpack/rank/rrf/LinearRankClientYamlTestSuiteIT.java new file mode 100644 index 0000000000000..8af4ae307a51a --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/java/org/elasticsearch/xpack/rank/rrf/LinearRankClientYamlTestSuiteIT.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.rrf; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; + +/** Runs yaml rest tests. */ +public class LinearRankClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .nodes(2) + .module("mapper-extras") + .module("rank-rrf") + .module("lang-painless") + .module("x-pack-inference") + .setting("xpack.license.self_generated.type", "trial") + .plugin("inference-service-test") + .build(); + + public LinearRankClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return ESClientYamlSuiteTestCase.createParameters(new String[] { "linear" }); + } + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/license/100_license.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/license/100_license.yml index cd227eec4e227..42d0fa1998246 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/license/100_license.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/license/100_license.yml @@ -111,3 +111,43 @@ setup: - match: { status: 403 } - match: { error.type: security_exception } - match: { error.reason: "current license is non-compliant for [Reciprocal Rank Fusion (RRF)]" } + + +--- +"linear retriever invalid license": + - requires: + cluster_features: [ "linear_retriever_supported" ] + reason: "Support for linear retriever" + + - do: + catch: forbidden + search: + index: test + body: + track_total_hits: false + fields: [ "text" ] + retriever: + linear: + retrievers: [ + { + knn: { + field: vector, + query_vector: [ 0.0 ], + k: 3, + num_candidates: 3 + } + }, + { + standard: { + query: { + term: { + text: term + } + } + } + } + ] + + - match: { status: 403 } + - match: { error.type: security_exception } + - match: { error.reason: "current license is non-compliant for [linear retriever]" } diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/linear/10_linear_retriever.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/linear/10_linear_retriever.yml new file mode 100644 index 0000000000000..70db6c1543365 --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/linear/10_linear_retriever.yml @@ -0,0 +1,1065 @@ +setup: + - requires: + cluster_features: [ "linear_retriever_supported" ] + reason: "Support for linear retriever" + test_runner_features: close_to + + - do: + indices.create: + index: test + body: + mappings: + properties: + vector: + type: dense_vector + dims: 1 + index: true + similarity: l2_norm + index_options: + type: flat + keyword: + type: keyword + other_keyword: + type: keyword + timestamp: + type: date + + - do: + bulk: + refresh: true + index: test + body: + - '{"index": {"_id": 1 }}' + - '{"vector": [1], "keyword": "one", "other_keyword": "other", "timestamp": "2021-01-01T00:00:00"}' + - '{"index": {"_id": 2 }}' + - '{"vector": [2], "keyword": "two", "timestamp": "2022-01-01T00:00:00"}' + - '{"index": {"_id": 3 }}' + - '{"vector": [3], "keyword": "three", "timestamp": "2023-01-01T00:00:00"}' + - '{"index": {"_id": 4 }}' + - '{"vector": [4], "keyword": "four", "other_keyword": "other", "timestamp": "2024-01-01T00:00:00"}' + +--- +"basic linear weighted combination of a standard and knn retrievers": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + } + } + }, + weight: 0.5 + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + + - match: { hits.total.value: 2 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 5.0 } + - match: { hits.hits.1._id: "4" } + - match: { hits.hits.1._score: 2.0 } + +--- +"basic linear weighted combination - interleaved results": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + # this one will return docs 1 and doc 2 with scores 20 and 10 respectively + retriever: { + standard: { + query: { + bool: { + should: [ + { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "two" + } + } + }, + boost: 5.0 + } + } + ] + } + } + } + }, + weight: 2 + }, + { + # this one will return docs 3 and doc 4 with scores 15 and 12 respectively + retriever: { + standard: { + query: { + bool: { + should: [ + { + constant_score: { + filter: { + term: { + keyword: { + value: "three" + } + } + }, + boost: 5.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "four" + } + } + }, + boost: 4.0 + } + } + ] + } + } + } + }, + weight: 3 + } + ] + + - match: { hits.total.value: 4 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 20.0 } + - match: { hits.hits.1._id: "3" } + - match: { hits.hits.1._score: 15.0 } + - match: { hits.hits.2._id: "4" } + - match: { hits.hits.2._score: 12.0 } + - match: { hits.hits.3._id: "2" } + - match: { hits.hits.3._score: 10.0 } + +--- +"should normalize initial scores": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + bool: { + should: [ + { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "two" + } + } + }, + boost: 9.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "three" + } + } + }, + boost: 5.0 + } + } + ] + } + } + } + }, + weight: 10.0, + normalizer: "minmax" + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + + - match: { hits.total.value: 4 } + - match: { hits.hits.0._id: "1" } + - match: {hits.hits.0._score: 10.0} + - match: { hits.hits.1._id: "2" } + - match: {hits.hits.1._score: 8.0} + - match: { hits.hits.2._id: "4" } + - match: {hits.hits.2._score: 2.0} + - match: { hits.hits.2._score: 2.0 } + - match: { hits.hits.3._id: "3" } + - close_to: { hits.hits.3._score: { value: 0.0, error: 0.001 } } + +--- +"should throw on unknown normalizer": + - do: + catch: /Unknown normalizer \[aardvark\]/ + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + } + } + }, + weight: 1.0, + normalizer: "aardvark" + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + +--- +"should throw on negative weights": + - do: + catch: /\[weight\] must be non-negative/ + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + } + } + }, + weight: 1.0 + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: -10 + } + ] + +--- +"pagination within a consistent rank_window_size": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + bool: { + should: [ + { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "two" + } + } + }, + boost: 9.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "three" + } + } + }, + boost: 5.0 + } + } + ] + } + } + } + }, + weight: 10.0, + normalizer: "minmax" + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + from: 2 + size: 1 + + - match: { hits.total.value: 4 } + - length: { hits.hits: 1 } + - match: { hits.hits.0._id: "4" } + - match: { hits.hits.0._score: 2.0 } + + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + bool: { + should: [ + { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "two" + } + } + }, + boost: 9.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "three" + } + } + }, + boost: 5.0 + } + } + ] + } + } + } + }, + weight: 10.0, + normalizer: "minmax" + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + from: 3 + size: 1 + + - match: { hits.total.value: 4 } + - match: { hits.hits.0._id: "3" } + - close_to: { hits.hits.0._score: { value: 0.0, error: 0.001 } } + +--- +"should throw when rank_window_size less than size": + - do: + catch: "/\\[linear\\] requires \\[rank_window_size: 2\\] be greater than or equal to \\[size: 10\\]/" + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + match_all: { } + } + } + }, + weight: 10.0, + normalizer: "minmax" + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + rank_window_size: 2 + size: 10 +--- +"should respect rank_window_size for normalization and returned hits": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + bool: { + should: [ + { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "two" + } + } + }, + boost: 9.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "three" + } + } + }, + boost: 5.0 + } + } + ] + } + } + } + }, + weight: 1.0, + normalizer: "minmax" + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + rank_window_size: 2 + size: 2 + + - match: { hits.total.value: 4 } + - match: { hits.hits.0._id: "4" } + - match: { hits.hits.0._score: 2.0 } + - match: { hits.hits.1._id: "1" } + - match: { hits.hits.1._score: 1.0 } + +--- +"explain should provide info on weights and inner retrievers": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + bool: { + should: [ + { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "four" + } + } + }, + boost: 1.0 + } + } + ] + } + }, + _name: "my_standard_retriever" + } + }, + weight: 10.0, + normalizer: "minmax" + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 20.0 + } + ] + explain: true + size: 2 + + - match: { hits.hits.0._id: "4" } + - match: { hits.hits.0._explanation.description: "/weighted.linear.combination.score:.\\[20.0].computed.for.normalized.scores.\\[.*,.1.0\\].and.weights.\\[10.0,.20.0\\].as.sum.of.\\(weight\\[i\\].*.score\\[i\\]\\).for.each.query./"} + - match: { hits.hits.0._explanation.details.0.value: 0.0 } + - match: { hits.hits.0._explanation.details.0.description: "/.*weighted.score.*result.not.found.in.query.at.index.\\[0\\].\\[my_standard_retriever\\]/" } + - match: { hits.hits.0._explanation.details.1.value: 20.0 } + - match: { hits.hits.0._explanation.details.1.description: "/.*weighted.score.*using.score.normalizer.\\[none\\].*/" } + - match: { hits.hits.1._id: "1" } + - match: { hits.hits.1._explanation.description: "/weighted.linear.combination.score:.\\[10.0].computed.for.normalized.scores.\\[1.0,.0.0\\].and.weights.\\[10.0,.20.0\\].as.sum.of.\\(weight\\[i\\].*.score\\[i\\]\\).for.each.query./"} + - match: { hits.hits.1._explanation.details.0.value: 10.0 } + - match: { hits.hits.1._explanation.details.0.description: "/.*weighted.score.*\\[my_standard_retriever\\].*using.score.normalizer.\\[minmax\\].*/" } + - match: { hits.hits.1._explanation.details.1.value: 0.0 } + - match: { hits.hits.1._explanation.details.1.description: "/.*weighted.score.*result.not.found.in.query.at.index.\\[1\\]/" } + +--- +"collapsing results": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + } + } + }, + weight: 0.5 + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + collapse: + field: other_keyword + inner_hits: { + name: sub_hits, + sort: + { + keyword: { + order: desc + } + } + } + - match: { hits.hits.0._id: "1" } + - length: { hits.hits.0.inner_hits.sub_hits.hits.hits : 2 } + - match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._id: "1" } + - match: { hits.hits.0.inner_hits.sub_hits.hits.hits.1._id: "4" } + +--- +"multiple nested linear retrievers": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + } + } + }, + weight: 0.5 + }, + { + retriever: { + linear: { + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + term: { + keyword: { + value: "two" + } + } + }, + boost: 20.0 + } + } + } + } + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + } + } + ] + } + }, + weight: 2.0 + } + ] + + - match: { hits.total.value: 3 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0._score: 40.0 } + - match: { hits.hits.1._id: "1" } + - match: { hits.hits.1._score: 5.0 } + - match: { hits.hits.2._id: "4" } + - match: { hits.hits.2._score: 2.0 } + +--- +"linear retriever with filters": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + } + } + }, + weight: 0.5 + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + filter: + term: + keyword: "four" + + + - match: { hits.total.value: 1 } + - length: {hits.hits: 1} + - match: { hits.hits.0._id: "4" } + - match: { hits.hits.0._score: 2.0 } + +--- +"linear retriever with filters on nested retrievers": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + }, + filter: { + term: { + keyword: "four" + } + } + } + }, + weight: 0.5 + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + + - match: { hits.total.value: 1 } + - length: {hits.hits: 1} + - match: { hits.hits.0._id: "4" } + - match: { hits.hits.0._score: 2.0 } + + +--- +"linear retriever with custom sort and score for nested retrievers": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + bool: { + should: [ + { + term: { + keyword: { + value: "one" # this will give doc 1 a normalized score of 10 because min == max + } + } + }, + { + term: { + keyword: { + value: "two" # this will give doc 2 a normalized score of 10 because min == max + } + } + } ] + } + }, + boost: 10.0 + } + }, + sort: { + timestamp: { + order: "asc" + } + } + } + }, + weight: 1.0, + normalizer: "minmax" + }, + { + # because we're sorting on timestamp and use a rank window size of 3, we will only get to see + # docs 3 and 2. + # their `scores` (which are the timestamps) are: + # doc 3: 1672531200000 (2023-01-01T00:00:00) + # doc 2: 1640995200000 (2022-01-01T00:00:00) + # doc 1: 1609459200000 (2021-01-01T00:00:00) + # and their normalized scores based on the provided conf + # will be: + # normalized(doc3) = 1. + # normalized(doc2) = 0.5 + # normalized(doc1) = 0 + retriever: { + standard: { + query: { + function_score: { + query: { + bool: { + should: [ + { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "two" + } + } + }, + boost: 9.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "three" + } + } + }, + boost: 1.0 + } + } + ] + } + }, + functions: [ { + script_score: { + script: { + source: "doc['timestamp'].value.millis" + } + } + } ], + "boost_mode": "replace" + } + }, + sort: { + timestamp: { + order: "desc" + } + } + } + }, + weight: 1.0, + normalizer: "minmax" + } + ] + rank_window_size: 3 + size: 2 + + - match: { hits.total.value: 3 } + - length: {hits.hits: 2} + - match: { hits.hits.0._id: "2" } + - close_to: { hits.hits.0._score: { value: 10.5, error: 0.001 } } + - match: { hits.hits.1._id: "1" } + - match: { hits.hits.1._score: 10 } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java index 7ede898fa0425..965554023643c 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java @@ -70,6 +70,10 @@ public class Rollup extends Plugin implements ActionPlugin, PersistentTaskPlugin { + public static final String DEPRECATION_MESSAGE = + "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information."; + public static final String DEPRECATION_KEY = "rollup_removal"; + // Introduced in ES version 6.3 public static final int ROLLUP_VERSION_V1 = 1; // Introduced in ES Version 6.4 diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportDeleteRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportDeleteRollupJobAction.java index 0d5a9c86cc3b8..72036f82cdd74 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportDeleteRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportDeleteRollupJobAction.java @@ -16,6 +16,8 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.injection.guice.Inject; @@ -31,12 +33,17 @@ import java.util.List; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_KEY; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_MESSAGE; + public class TransportDeleteRollupJobAction extends TransportTasksAction< RollupJobTask, DeleteRollupJobAction.Request, DeleteRollupJobAction.Response, DeleteRollupJobAction.Response> { + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportDeleteRollupJobAction.class); + @Inject public TransportDeleteRollupJobAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { super( @@ -52,6 +59,7 @@ public TransportDeleteRollupJobAction(TransportService transportService, ActionF @Override protected void doExecute(Task task, DeleteRollupJobAction.Request request, ActionListener listener) { + DEPRECATION_LOGGER.warn(DeprecationCategory.API, DEPRECATION_KEY, DEPRECATION_MESSAGE); final ClusterState state = clusterService.state(); final DiscoveryNodes nodes = state.nodes(); @@ -93,7 +101,6 @@ protected void taskOperation( RollupJobTask jobTask, ActionListener listener ) { - assert jobTask.getConfig().getId().equals(request.getId()); IndexerState state = ((RollupJobStatus) jobTask.getStatus()).getIndexerState(); if (state.equals(IndexerState.STOPPED)) { diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java index b4e468ac0bffe..39ef1b6f4ea0c 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java @@ -14,6 +14,8 @@ import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -32,8 +34,13 @@ import java.util.concurrent.Executor; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_KEY; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_MESSAGE; + public class TransportGetRollupCapsAction extends HandledTransportAction { + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportGetRollupCapsAction.class); + private final ClusterService clusterService; private final Executor managementExecutor; @@ -53,6 +60,7 @@ public TransportGetRollupCapsAction(TransportService transportService, ClusterSe @Override protected void doExecute(Task task, GetRollupCapsAction.Request request, ActionListener listener) { + DEPRECATION_LOGGER.warn(DeprecationCategory.API, DEPRECATION_KEY, DEPRECATION_MESSAGE); // Workaround for https://github.com/elastic/elasticsearch/issues/97916 - TODO remove this when we can managementExecutor.execute(ActionRunnable.wrap(listener, l -> doExecuteForked(request.getIndexPattern(), l))); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java index c2a81c6bb16ef..062d06a71c10a 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java @@ -14,6 +14,8 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -32,10 +34,15 @@ import java.util.concurrent.Executor; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_KEY; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_MESSAGE; + public class TransportGetRollupIndexCapsAction extends HandledTransportAction< GetRollupIndexCapsAction.Request, GetRollupIndexCapsAction.Response> { + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportGetRollupCapsAction.class); + private final ClusterService clusterService; private final IndexNameExpressionResolver resolver; private final Executor managementExecutor; @@ -66,6 +73,7 @@ protected void doExecute( GetRollupIndexCapsAction.Request request, ActionListener listener ) { + DEPRECATION_LOGGER.warn(DeprecationCategory.API, DEPRECATION_KEY, DEPRECATION_MESSAGE); // Workaround for https://github.com/elastic/elasticsearch/issues/97916 - TODO remove this when we can managementExecutor.execute(ActionRunnable.wrap(listener, l -> doExecuteForked(request, l))); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java index 12cea1c305020..e52a595c0a1f9 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java @@ -16,6 +16,8 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.injection.guice.Inject; @@ -34,12 +36,17 @@ import java.util.List; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_KEY; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_MESSAGE; + public class TransportGetRollupJobAction extends TransportTasksAction< RollupJobTask, GetRollupJobsAction.Request, GetRollupJobsAction.Response, GetRollupJobsAction.Response> { + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportGetRollupCapsAction.class); + @Inject public TransportGetRollupJobAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { super( @@ -55,6 +62,7 @@ public TransportGetRollupJobAction(TransportService transportService, ActionFilt @Override protected void doExecute(Task task, GetRollupJobsAction.Request request, ActionListener listener) { + DEPRECATION_LOGGER.warn(DeprecationCategory.API, DEPRECATION_KEY, DEPRECATION_MESSAGE); final ClusterState state = clusterService.state(); final DiscoveryNodes nodes = state.nodes(); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java index d124d5014c7e1..a399253512503 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java @@ -63,10 +63,13 @@ import java.util.Set; import static org.elasticsearch.xpack.core.ClientHelper.assertNoAuthorizationHeader; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_KEY; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_MESSAGE; public class TransportPutRollupJobAction extends AcknowledgedTransportMasterNodeAction { private static final Logger LOGGER = LogManager.getLogger(TransportPutRollupJobAction.class); + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportPutRollupJobAction.class); private static final XContentParserConfiguration PARSER_CONFIGURATION = XContentParserConfiguration.EMPTY.withFiltering( null, Set.of("_doc._meta._rollup"), @@ -76,7 +79,6 @@ public class TransportPutRollupJobAction extends AcknowledgedTransportMasterNode private final PersistentTasksService persistentTasksService; private final Client client; - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(TransportPutRollupJobAction.class); @Inject public TransportPutRollupJobAction( @@ -109,6 +111,7 @@ protected void masterOperation( ClusterState clusterState, ActionListener listener ) { + DEPRECATION_LOGGER.warn(DeprecationCategory.API, DEPRECATION_KEY, DEPRECATION_MESSAGE); XPackPlugin.checkReadyForXPackCustomMetadata(clusterState); checkForDeprecatedTZ(request); @@ -150,7 +153,7 @@ static void checkForDeprecatedTZ(PutRollupJobAction.Request request) { String timeZone = request.getConfig().getGroupConfig().getDateHistogram().getTimeZone(); String modernTZ = DateUtils.DEPRECATED_LONG_TIMEZONES.get(timeZone); if (modernTZ != null) { - deprecationLogger.warn( + DEPRECATION_LOGGER.warn( DeprecationCategory.PARSING, "deprecated_timezone", "Creating Rollup job [" diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index 34d788d5f094d..c9294c8080421 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -27,6 +27,8 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.index.query.BoolQueryBuilder; @@ -73,9 +75,13 @@ import java.util.stream.Collectors; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_KEY; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_MESSAGE; public class TransportRollupSearchAction extends TransportAction { + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportRollupSearchAction.class); + private final Client client; private final NamedWriteableRegistry registry; private final BigArrays bigArrays; @@ -115,6 +121,7 @@ public TransportRollupSearchAction( @Override protected void doExecute(Task task, SearchRequest request, ActionListener listener) { + DEPRECATION_LOGGER.warn(DeprecationCategory.API, DEPRECATION_KEY, DEPRECATION_MESSAGE); String[] indices = resolver.concreteIndexNames(clusterService.state(), request); RollupSearchContext rollupSearchContext = separateIndices(indices, clusterService.state().getMetadata().indices()); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStartRollupAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStartRollupAction.java index 24a04cac40092..aa09fb0cd9f1d 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStartRollupAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStartRollupAction.java @@ -13,21 +13,29 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction; import org.elasticsearch.xpack.rollup.job.RollupJobTask; import java.util.List; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_KEY; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_MESSAGE; + public class TransportStartRollupAction extends TransportTasksAction< RollupJobTask, StartRollupJobAction.Request, StartRollupJobAction.Response, StartRollupJobAction.Response> { + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportStartRollupAction.class); + @Inject public TransportStartRollupAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { super( @@ -46,6 +54,12 @@ protected List processTasks(StartRollupJobAction.Request request) return TransportTaskHelper.doProcessTasks(request.getId(), taskManager); } + @Override + protected void doExecute(Task task, StartRollupJobAction.Request request, ActionListener listener) { + DEPRECATION_LOGGER.warn(DeprecationCategory.API, DEPRECATION_KEY, DEPRECATION_MESSAGE); + super.doExecute(task, request, listener); + } + @Override protected void taskOperation( CancellableTask actionTask, diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStopRollupAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStopRollupAction.java index e4fe926f18feb..833e2dff9485d 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStopRollupAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStopRollupAction.java @@ -14,6 +14,8 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; import org.elasticsearch.injection.guice.Inject; @@ -29,12 +31,17 @@ import java.util.List; import java.util.function.BooleanSupplier; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_KEY; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_MESSAGE; + public class TransportStopRollupAction extends TransportTasksAction< RollupJobTask, StopRollupJobAction.Request, StopRollupJobAction.Response, StopRollupJobAction.Response> { + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportStopRollupAction.class); + private final ThreadPool threadPool; @Inject @@ -63,6 +70,7 @@ protected List processTasks(StopRollupJobAction.Request request) @Override protected void doExecute(Task task, StopRollupJobAction.Request request, ActionListener listener) { + DEPRECATION_LOGGER.warn(DeprecationCategory.API, DEPRECATION_KEY, DEPRECATION_MESSAGE); super.doExecute(task, request, listener); } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java index da60f9a84c3ba..7bf87c6e9aff6 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java @@ -8,9 +8,6 @@ package org.elasticsearch.xpack.searchablesnapshots; import org.apache.lucene.search.TotalHits; -import org.apache.lucene.store.ByteBuffersDirectory; -import org.apache.lucene.store.Directory; -import org.apache.lucene.store.FilterDirectory; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainRequest; import org.elasticsearch.action.admin.cluster.allocation.TransportClusterAllocationExplainAction; @@ -72,14 +69,11 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.SNAPSHOT_RECOVERY_STATE_FACTORY_KEY; -import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.oneOf; import static org.hamcrest.Matchers.sameInstance; @@ -258,22 +252,8 @@ public void testCreateAndRestorePartialSearchableSnapshot() throws Exception { // the original shard size from the snapshot final long originalSize = snapshotShards.get(shardRouting.getId()).getStats().getTotalSize(); - totalExpectedSize += originalSize; - - final Directory unwrappedDir = FilterDirectory.unwrap( - internalCluster().getInstance(IndicesService.class, getDiscoveryNodes().resolveNode(shardRouting.currentNodeId()).getName()) - .indexServiceSafe(shardRouting.index()) - .getShard(shardRouting.getId()) - .store() - .directory() - ); - assertThat(shardRouting.toString(), unwrappedDir, notNullValue()); - assertThat(shardRouting.toString(), unwrappedDir, instanceOf(ByteBuffersDirectory.class)); - - final ByteBuffersDirectory inMemoryDir = (ByteBuffersDirectory) unwrappedDir; - assertThat(inMemoryDir.listAll(), arrayWithSize(0)); - assertThat(shardRouting.toString(), store.totalDataSetSizeInBytes(), equalTo(originalSize)); + totalExpectedSize += originalSize; } final StoreStats store = indicesStatsResponse.getTotal().getStore(); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java index 25617028fe544..c207ea1fde1ea 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java @@ -820,7 +820,7 @@ public void testSnapshotOfSearchableSnapshotIncludesNoDataButCanBeRestored() thr final String tmpRepositoryName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); createRepositoryNoVerify(tmpRepositoryName, "fs"); final Path repoPath = internalCluster().getCurrentMasterNodeInstance(Environment.class) - .resolveRepoFile( + .resolveRepoDir( clusterAdmin().prepareGetRepositories(TEST_REQUEST_TIMEOUT, tmpRepositoryName) .get() .repositories() diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java index 7eaf5d8f060c6..d8b0d5138a93d 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java @@ -23,8 +23,11 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.reindex.ReindexAction; +import org.elasticsearch.index.reindex.ReindexRequest; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.LuceneFilesExtensions; +import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.repositories.IndexId; @@ -63,9 +66,11 @@ import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.SNAPSHOT_INDEX_NAME_SETTING; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.SNAPSHOT_SNAPSHOT_ID_SETTING; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.SNAPSHOT_SNAPSHOT_NAME_SETTING; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; public class SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests extends BaseFrozenSearchableSnapshotsIntegTestCase { @@ -194,7 +199,6 @@ public void testCleanUpAfterIndicesAreDeleted() throws Exception { } }); } - logger.info("--> deleting indices, maintenance service should clean up snapshot blob cache index"); assertAcked(indicesAdmin().prepareDelete("mounted-*")); assertBusy(() -> { @@ -311,6 +315,46 @@ public void testPeriodicMaintenance() throws Exception { } } + public void testCleanUpMigratedSystemIndexAfterIndicesAreDeleted() throws Exception { + final String repositoryName = "repository"; + createRepository(repositoryName, FsRepository.TYPE); + + final Map> mountedIndices = mountRandomIndicesWithCache(repositoryName, 3, 10); + ensureYellow(SNAPSHOT_BLOB_CACHE_INDEX); + refreshSystemIndex(true); + + final long numberOfEntriesInCache = numberOfEntriesInCache(); + logger.info("--> found [{}] entries in snapshot blob cache", numberOfEntriesInCache); + assertThat(numberOfEntriesInCache, equalTo(mountedIndices.values().stream().mapToLong(Tuple::v2).sum())); + + migrateTheSystemIndex(); + + logger.info("--> deleting indices, maintenance service should clean up snapshot blob cache index"); + assertAcked(indicesAdmin().prepareDelete("mounted-*")); + assertBusy(() -> { + refreshSystemIndex(true); + assertHitCount(systemClient().prepareSearch(SNAPSHOT_BLOB_CACHE_INDEX).setSize(0), 0L); + }); + } + + /** + * Mimics migration of the {@link SearchableSnapshots#SNAPSHOT_BLOB_CACHE_INDEX} as done in + * {@link org.elasticsearch.upgrades.SystemIndexMigrator}, where the index is re-indexed, and replaced by an alias. + */ + private void migrateTheSystemIndex() { + final var migratedSnapshotBlobCache = SNAPSHOT_BLOB_CACHE_INDEX + SystemIndices.UPGRADED_INDEX_SUFFIX; + logger.info("--> migrating {} system index to {}", SNAPSHOT_BLOB_CACHE_INDEX, migratedSnapshotBlobCache); + var reindexRequest = new ReindexRequest().setSourceIndices(SNAPSHOT_BLOB_CACHE_INDEX) + .setDestIndex(migratedSnapshotBlobCache) + .setRefresh(true); + var resp = safeGet(client().execute(ReindexAction.INSTANCE, reindexRequest)); + assertThat(resp.getBulkFailures(), is(empty())); + indicesAdmin().prepareAliases() + .removeIndex(SNAPSHOT_BLOB_CACHE_INDEX) + .addAlias(migratedSnapshotBlobCache, SNAPSHOT_BLOB_CACHE_INDEX) + .get(); + } + /** * @return a {@link Client} that can be used to query the blob store cache system index */ diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java index c955457b78d60..3534988b25ce7 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java @@ -145,7 +145,7 @@ public void testConcurrentPrewarming() throws Exception { docsPerIndex.put(indexName, nbDocs); } - final Path repositoryPath = node().getEnvironment().resolveRepoFile(randomAlphaOfLength(10)); + final Path repositoryPath = node().getEnvironment().resolveRepoDir(randomAlphaOfLength(10)); final Settings.Builder repositorySettings = Settings.builder().put("location", repositoryPath); if (randomBoolean()) { repositorySettings.put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES); diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java index 778fd3045f7cc..f6a35fb98203d 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java @@ -337,7 +337,14 @@ public Collection createComponents(PluginServices services) { final BlobStoreCacheService blobStoreCacheService = new BlobStoreCacheService(client, SNAPSHOT_BLOB_CACHE_INDEX); this.blobStoreCacheService.set(blobStoreCacheService); clusterService.addListener( - new BlobStoreCacheMaintenanceService(settings, clusterService, threadPool, client, SNAPSHOT_BLOB_CACHE_INDEX) + new BlobStoreCacheMaintenanceService( + settings, + clusterService, + threadPool, + client, + services.systemIndices(), + SNAPSHOT_BLOB_CACHE_INDEX + ) ); components.add(blobStoreCacheService); } else { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java index 21e67212f1f51..44a5bc88abc3b 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java @@ -36,7 +36,6 @@ import org.elasticsearch.cluster.metadata.RepositoriesMetadata; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.routing.IndexRoutingTable; -import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; @@ -47,7 +46,6 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.ThrottledTaskRunner; import org.elasticsearch.core.AbstractRefCounted; -import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; @@ -57,6 +55,8 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -145,6 +145,7 @@ public class BlobStoreCacheMaintenanceService implements ClusterStateListener { private final Client clientWithOrigin; private final String systemIndexName; private final ThreadPool threadPool; + private final SystemIndexDescriptor systemIndexDescriptor; private volatile Scheduler.Cancellable periodicTask; private volatile TimeValue periodicTaskInterval; @@ -158,10 +159,12 @@ public BlobStoreCacheMaintenanceService( ClusterService clusterService, ThreadPool threadPool, Client client, + SystemIndices systemIndices, String systemIndexName ) { this.clientWithOrigin = new OriginSettingClient(Objects.requireNonNull(client), SEARCHABLE_SNAPSHOTS_ORIGIN); this.systemIndexName = Objects.requireNonNull(systemIndexName); + this.systemIndexDescriptor = Objects.requireNonNull(systemIndices.findMatchingDescriptor(systemIndexName)); this.clusterService = Objects.requireNonNull(clusterService); this.threadPool = Objects.requireNonNull(threadPool); this.periodicTaskInterval = SNAPSHOT_SNAPSHOT_CLEANUP_INTERVAL_SETTING.get(settings); @@ -181,10 +184,7 @@ public void clusterChanged(ClusterChangedEvent event) { if (state.getBlocks().hasGlobalBlock(STATE_NOT_RECOVERED_BLOCK)) { return; // state not fully recovered } - final ShardRouting primary = systemIndexPrimaryShard(state); - if (primary == null - || primary.active() == false - || Objects.equals(state.nodes().getLocalNodeId(), primary.currentNodeId()) == false) { + if (systemIndexPrimaryShardActiveAndAssignedToLocalNode(state) == false) { // system index primary shard does not exist or is not assigned to this data node stopPeriodicTask(); return; @@ -242,16 +242,20 @@ private synchronized void stopPeriodicTask() { } } - @Nullable - private ShardRouting systemIndexPrimaryShard(final ClusterState state) { - final IndexMetadata indexMetadata = state.metadata().index(systemIndexName); - if (indexMetadata != null) { - final IndexRoutingTable indexRoutingTable = state.routingTable().index(indexMetadata.getIndex()); - if (indexRoutingTable != null) { - return indexRoutingTable.shard(0).primaryShard(); + private boolean systemIndexPrimaryShardActiveAndAssignedToLocalNode(final ClusterState state) { + for (IndexMetadata indexMetadata : state.metadata()) { + if (indexMetadata.isSystem() && systemIndexDescriptor.matchesIndexPattern(indexMetadata.getIndex().getName())) { + final IndexRoutingTable indexRoutingTable = state.routingTable().index(indexMetadata.getIndex()); + if (indexRoutingTable == null || indexRoutingTable.shard(0) == null) { + continue; + } + final var primary = indexRoutingTable.shard(0).primaryShard(); + if (primary != null && primary.active() && Objects.equals(state.nodes().getLocalNodeId(), primary.currentNodeId())) { + return true; + } } } - return null; + return false; } private static boolean hasSearchableSnapshotWith(final ClusterState state, final String snapshotId, final String indexId) { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/CacheService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/CacheService.java index 2d8d78473501c..a02c32cd56d94 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/CacheService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/CacheService.java @@ -245,7 +245,11 @@ private void ensureLifecycleStarted() { final Lifecycle.State state = lifecycleState(); assert state != Lifecycle.State.INITIALIZED : state; if (state != Lifecycle.State.STARTED) { - throw new IllegalStateException("Failed to read data from cache: cache service is not started [" + state + "]"); + if (state == Lifecycle.State.STOPPED) { + throw new AlreadyClosedException("Failed to read data from cache: cache service is stopped"); + } else { + throw new IllegalStateException("Failed to read data from cache: cache service is not started [" + state + "]"); + } } } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java index bbdf371e1ed7b..d62443e492605 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java @@ -23,6 +23,7 @@ import org.elasticsearch.blobcache.shared.SharedBlobCacheService; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.RecoverySource; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; @@ -34,6 +35,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot; @@ -277,6 +279,10 @@ private BlobStoreIndexShardSnapshot.FileInfo fileInfo(final String name) throws @Override public final String[] listAll() { ensureOpen(); + return listAllFiles(); + } + + private String[] listAllFiles() { return files().stream().map(BlobStoreIndexShardSnapshot.FileInfo::physicalName).sorted(String::compareTo).toArray(String[]::new); } @@ -288,42 +294,39 @@ public final long fileLength(final String name) throws IOException { @Override public Set getPendingDeletions() { - throw unsupportedException(); + throw unsupportedException("getPendingDeletions"); } @Override - public void sync(Collection names) { - throw unsupportedException(); - } + public void sync(Collection names) {} @Override - public void syncMetaData() { - throw unsupportedException(); - } + public void syncMetaData() {} @Override public void deleteFile(String name) { - throw unsupportedException(); + throw unsupportedException("deleteFile(" + name + ')'); } @Override public IndexOutput createOutput(String name, IOContext context) { - throw unsupportedException(); + throw unsupportedException("createOutput(" + name + ", " + context + ')'); } @Override public IndexOutput createTempOutput(String prefix, String suffix, IOContext context) { - throw unsupportedException(); + throw unsupportedException("createTempOutput(" + prefix + ", " + suffix + ", " + context + ')'); } @Override public void rename(String source, String dest) { - throw unsupportedException(); + throw unsupportedException("rename(" + source + ", " + dest + ')'); } - private static UnsupportedOperationException unsupportedException() { - assert false : "this operation is not supported and should have not be called"; - return new UnsupportedOperationException("Searchable snapshot directory does not support this operation"); + private UnsupportedOperationException unsupportedException(String description) { + var message = "Searchable snapshot directory does not support the operation [" + description + ']'; + assert false : message + ", current directory files: " + Strings.arrayToCommaDelimitedString(this.listAllFiles()); + return new UnsupportedOperationException(message); } @Override @@ -612,24 +615,33 @@ public static Directory create( final Path cacheDir = CacheService.getShardCachePath(shardPath).resolve(snapshotId.getUUID()); Files.createDirectories(cacheDir); - return new InMemoryNoOpCommitDirectory( - new SearchableSnapshotDirectory( - blobContainerSupplier, - lazySnapshot::getOrCompute, - blobStoreCacheService, - initialRepository.getMetadata().name(), - snapshotId, - indexId, - shardPath.getShardId(), - indexSettings.getSettings(), - currentTimeNanosSupplier, - cache, - cacheDir, - shardPath, - threadPool, - sharedBlobCacheService - ) + final var dir = new SearchableSnapshotDirectory( + blobContainerSupplier, + lazySnapshot::getOrCompute, + blobStoreCacheService, + initialRepository.getMetadata().name(), + snapshotId, + indexId, + shardPath.getShardId(), + indexSettings.getSettings(), + currentTimeNanosSupplier, + cache, + cacheDir, + shardPath, + threadPool, + sharedBlobCacheService ); + + // Archives indices mounted as searchable snapshots always require a writeable Lucene directory in order to rewrite the segments + // infos file to the latest Lucene version. Similarly, searchable snapshot indices created before 9.0.0 also require a writeable + // directory because in previous versions commits were executed during recovery (to associate translogs with Lucene indices), + // creating additional files that need to be sent and written to replicas during peer-recoveries. From 9.0.0 we merged a change to + // skip commits creation during recovery for searchable snapshots (see https://github.com/elastic/elasticsearch/pull/118606). + var version = IndexMetadata.SETTING_INDEX_VERSION_COMPATIBILITY.get(indexSettings.getSettings()); + if (version.before(IndexVersions.UPGRADE_TO_LUCENE_10_0_0) || indexSettings.getIndexVersionCreated().isLegacyIndexVersion()) { + return new InMemoryNoOpCommitDirectory(dir); + } + return dir; } public static SearchableSnapshotDirectory unwrapDirectory(Directory dir) { diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInputTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInputTests.java index 53ea908ad8801..3d4d7f768c1b3 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInputTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInputTests.java @@ -98,7 +98,7 @@ public void testRandomReads() throws IOException { .put("path.home", createTempDir()) .build(); final Environment environment = TestEnvironment.newEnvironment(settings); - for (Path path : environment.dataFiles()) { + for (Path path : environment.dataDirs()) { Files.createDirectories(path); } SnapshotId snapshotId = new SnapshotId("_name", "_uuid"); diff --git a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/AutoConfigureNode.java b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/AutoConfigureNode.java index 3994fb50c7fc6..dbe0e0b0e9577 100644 --- a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/AutoConfigureNode.java +++ b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/AutoConfigureNode.java @@ -163,7 +163,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce final boolean inEnrollmentMode = options.has(enrollmentTokenParam); // skipping security auto-configuration because node considered as restarting. - for (Path dataPath : env.dataFiles()) { + for (Path dataPath : env.dataDirs()) { if (Files.isDirectory(dataPath) && false == isDirEmpty(dataPath)) { final String msg = "Skipping security auto configuration because it appears that the node is not starting up for the " + "first time. The node might already be part of a cluster and this auto setup utility is designed to configure " @@ -173,7 +173,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce } // pre-flight checks for the files that are going to be changed - final Path ymlPath = env.configFile().resolve("elasticsearch.yml"); + final Path ymlPath = env.configDir().resolve("elasticsearch.yml"); // it is odd for the `elasticsearch.yml` file to be missing or not be a regular (the node won't start) // but auto configuration should not be concerned with fixing it (by creating the file) and let the node startup fail if (false == Files.exists(ymlPath) || false == Files.isRegularFile(ymlPath, LinkOption.NOFOLLOW_LINKS)) { @@ -194,7 +194,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce ); notifyOfFailure(inEnrollmentMode, terminal, Terminal.Verbosity.NORMAL, ExitCodes.NOOP, msg); } - final Path keystorePath = KeyStoreWrapper.keystorePath(env.configFile()); + final Path keystorePath = KeyStoreWrapper.keystorePath(env.configDir()); // Inform that auto-configuration will not run if keystore cannot be read. if (Files.exists(keystorePath) && (false == Files.isRegularFile(keystorePath, LinkOption.NOFOLLOW_LINKS) || false == Files.isReadable(keystorePath))) { @@ -218,7 +218,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce checkExistingConfiguration(env.settings(), inEnrollmentMode, terminal); final ZonedDateTime autoConfigDate = ZonedDateTime.now(ZoneOffset.UTC); - final Path tempGeneratedTlsCertsDir = env.configFile() + final Path tempGeneratedTlsCertsDir = env.configDir() .resolve(String.format(Locale.ROOT, TLS_GENERATED_CERTS_DIR_NAME + ".%d.tmp", autoConfigDate.toInstant().getEpochSecond())); try { // it is useful to pre-create the sub-config dir in order to check that the config dir is writable and that file owners match @@ -247,12 +247,12 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce // If the node process works OK given the owner of the config dir, it should also tolerate the auto-created config dir, // provided that they both have the same owner and permissions. final UserPrincipal newFileOwner = Files.getOwner(tempGeneratedTlsCertsDir, LinkOption.NOFOLLOW_LINKS); - if (false == newFileOwner.equals(Files.getOwner(env.configFile(), LinkOption.NOFOLLOW_LINKS))) { + if (false == newFileOwner.equals(Files.getOwner(env.configDir(), LinkOption.NOFOLLOW_LINKS))) { // the following is only printed once, if the node starts successfully UserException userException = new UserException( ExitCodes.CONFIG, "Aborting auto configuration because of config dir ownership mismatch. Config dir is owned by " - + Files.getOwner(env.configFile(), LinkOption.NOFOLLOW_LINKS).getName() + + Files.getOwner(env.configDir(), LinkOption.NOFOLLOW_LINKS).getName() + " but auto-configuration directory would be owned by " + newFileOwner.getName() ); @@ -496,7 +496,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce } // save the existing keystore before replacing - final Path keystoreBackupPath = env.configFile() + final Path keystoreBackupPath = env.configDir() .resolve( String.format(Locale.ROOT, KeyStoreWrapper.KEYSTORE_FILENAME + ".%d.orig", autoConfigDate.toInstant().getEpochSecond()) ); @@ -514,7 +514,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce } final SetOnce nodeKeystorePassword = new SetOnce<>(); - try (KeyStoreWrapper nodeKeystore = KeyStoreWrapper.bootstrap(env.configFile(), () -> { + try (KeyStoreWrapper nodeKeystore = KeyStoreWrapper.bootstrap(env.configDir(), () -> { nodeKeystorePassword.set(new SecureString(terminal.readSecret(""))); return nodeKeystorePassword.get().clone(); })) { @@ -581,7 +581,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce nodeKeystore.setString("xpack.security.http.ssl.keystore.secure_password", httpKeystorePassword.getChars()); } // finally overwrites the node keystore (if the keystores have been successfully written) - nodeKeystore.save(env.configFile(), nodeKeystorePassword.get() == null ? new char[0] : nodeKeystorePassword.get().getChars()); + nodeKeystore.save(env.configDir(), nodeKeystorePassword.get() == null ? new char[0] : nodeKeystorePassword.get().getChars()); } catch (Throwable t) { // restore keystore to revert possible keystore bootstrap try { @@ -614,10 +614,10 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce try { // all certs and keys have been generated in the temp certs dir, therefore: // 1. backup (move) any previously existing tls certs dir (this backup is NOT removed when auto-conf finishes) - if (Files.exists(env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME))) { + if (Files.exists(env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME))) { moveDirectory( - env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME), - env.configFile() + env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME), + env.configDir() .resolve( String.format( Locale.ROOT, @@ -628,7 +628,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce ); } // 2. move the newly populated temp certs dir to its permanent static dir name - moveDirectory(tempGeneratedTlsCertsDir, env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME)); + moveDirectory(tempGeneratedTlsCertsDir, env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME)); } catch (Throwable t) { // restore keystore to revert possible keystore bootstrap try { @@ -649,7 +649,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce // revert any previously existing TLS certs try { if (Files.exists( - env.configFile() + env.configDir() .resolve( String.format( Locale.ROOT, @@ -659,7 +659,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce ) )) { moveDirectory( - env.configFile() + env.configDir() .resolve( String.format( Locale.ROOT, @@ -667,7 +667,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce autoConfigDate.toInstant().getEpochSecond() ) ), - env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME) + env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME) ); } } catch (Exception ex) { @@ -686,7 +686,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce final Environment localFinalEnv = env; final DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("dd-MM-yyyy HH:mm:ss", Locale.ROOT); List existingConfigLines = Files.readAllLines(ymlPath, StandardCharsets.UTF_8); - fullyWriteFile(env.configFile(), "elasticsearch.yml", true, stream -> { + fullyWriteFile(env.configDir(), "elasticsearch.yml", true, stream -> { try (BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(stream, StandardCharsets.UTF_8))) { // start with the existing config lines for (String line : existingConfigLines) { @@ -827,16 +827,16 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce } try { // this removes a statically named directory, so it is potentially dangerous - deleteDirectory(env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME)); + deleteDirectory(env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME)); } catch (Exception ex) { t.addSuppressed(ex); } - Path backupCertsDir = env.configFile() + Path backupCertsDir = env.configDir() .resolve( String.format(Locale.ROOT, TLS_GENERATED_CERTS_DIR_NAME + ".%d.orig", autoConfigDate.toInstant().getEpochSecond()) ); if (Files.exists(backupCertsDir)) { - moveDirectory(backupCertsDir, env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME)); + moveDirectory(backupCertsDir, env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME)); } throw t; } @@ -887,14 +887,14 @@ private Environment possiblyReconfigureNode(Environment env, Terminal terminal, // with --enrolment-token token, in the first place. final List existingConfigLines; try { - existingConfigLines = Files.readAllLines(env.configFile().resolve("elasticsearch.yml"), StandardCharsets.UTF_8); + existingConfigLines = Files.readAllLines(env.configDir().resolve("elasticsearch.yml"), StandardCharsets.UTF_8); } catch (IOException e) { // This shouldn't happen, we would have failed earlier but we need to catch the exception throw new UserException(ExitCodes.IO_ERROR, "Aborting enrolling to cluster. Unable to read elasticsearch.yml.", e); } final List existingConfigWithoutAutoconfiguration = removePreviousAutoconfiguration(existingConfigLines); if (false == existingConfigLines.equals(existingConfigWithoutAutoconfiguration) - && Files.exists(env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME))) { + && Files.exists(env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME))) { terminal.println(""); terminal.println("This node will be reconfigured to join an existing cluster, using the enrollment token that you provided."); terminal.println("This operation will overwrite the existing configuration. Specifically: "); @@ -907,7 +907,7 @@ private Environment possiblyReconfigureNode(Environment env, Terminal terminal, } removeAutoConfigurationFromKeystore(env, terminal); try { - fullyWriteFile(env.configFile(), "elasticsearch.yml", true, stream -> { + fullyWriteFile(env.configDir(), "elasticsearch.yml", true, stream -> { try (BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(stream, StandardCharsets.UTF_8))) { for (String l : existingConfigWithoutAutoconfiguration) { bw.write(l); @@ -915,7 +915,7 @@ private Environment possiblyReconfigureNode(Environment env, Terminal terminal, } } }); - deleteDirectory(env.configFile().resolve(TLS_GENERATED_CERTS_DIR_NAME)); + deleteDirectory(env.configDir().resolve(TLS_GENERATED_CERTS_DIR_NAME)); } catch (Throwable t) { throw new UserException( ExitCodes.IO_ERROR, @@ -1262,9 +1262,9 @@ static List removePreviousAutoconfiguration(List existingConfigL } private static void removeAutoConfigurationFromKeystore(Environment env, Terminal terminal) throws UserException { - if (Files.exists(KeyStoreWrapper.keystorePath(env.configFile()))) { + if (Files.exists(KeyStoreWrapper.keystorePath(env.configDir()))) { try ( - KeyStoreWrapper existingKeystore = KeyStoreWrapper.load(env.configFile()); + KeyStoreWrapper existingKeystore = KeyStoreWrapper.load(env.configDir()); SecureString keystorePassword = existingKeystore.hasPassword() ? new SecureString(terminal.readSecret("Enter password for the elasticsearch keystore: ")) : new SecureString(new char[0]); @@ -1288,7 +1288,7 @@ private static void removeAutoConfigurationFromKeystore(Environment env, Termina } existingKeystore.remove(setting); } - existingKeystore.save(env.configFile(), keystorePassword.getChars()); + existingKeystore.save(env.configDir(), keystorePassword.getChars()); } catch (Exception e) { terminal.errorPrintln(Terminal.Verbosity.VERBOSE, ""); terminal.errorPrintln(Terminal.Verbosity.VERBOSE, ExceptionsHelper.stackTrace(e)); diff --git a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/HttpCertificateCommand.java b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/HttpCertificateCommand.java index b67bb9898991f..0e96911405b30 100644 --- a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/HttpCertificateCommand.java +++ b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/HttpCertificateCommand.java @@ -508,7 +508,7 @@ private static Map buildSubstitutions(Environment env, Map runAutoConfigAndReturnCertificat SecureString httpKeystorePassword = nodeKeystore.getString("xpack.security.http.ssl.keystore.secure_password"); SecureString transportKeystorePassword = nodeKeystore.getString("xpack.security.transport.ssl.keystore.secure_password"); - final Settings newSettings = Settings.builder().loadFromPath(env.configFile().resolve("elasticsearch.yml")).build(); + final Settings newSettings = Settings.builder().loadFromPath(env.configDir().resolve("elasticsearch.yml")).build(); final String httpKeystorePath = newSettings.get("xpack.security.http.ssl.keystore.path"); final String transportKeystorePath = newSettings.get("xpack.security.transport.ssl.keystore.path"); diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java index 09dda0f708a86..dcf993ea4ce7a 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java @@ -344,13 +344,24 @@ private static String populateOtherUser() throws IOException { return otherUser; } + private void performRequestWithAdminUserIgnoreNotFound(RestClient targetFulfillingClusterClient, Request request) throws IOException { + try { + performRequestWithAdminUser(targetFulfillingClusterClient, request); + } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() != 404) { + throw e; + } + logger.info("Ignored \"not found\" exception", e); + } + } + @After public void wipeData() throws Exception { CheckedConsumer wipe = client -> { - performRequestWithAdminUser(client, new Request("DELETE", "/employees")); - performRequestWithAdminUser(client, new Request("DELETE", "/employees2")); - performRequestWithAdminUser(client, new Request("DELETE", "/employees3")); - performRequestWithAdminUser(client, new Request("DELETE", "/_enrich/policy/countries")); + performRequestWithAdminUserIgnoreNotFound(client, new Request("DELETE", "/employees")); + performRequestWithAdminUserIgnoreNotFound(client, new Request("DELETE", "/employees2")); + performRequestWithAdminUserIgnoreNotFound(client, new Request("DELETE", "/employees3")); + performRequestWithAdminUserIgnoreNotFound(client, new Request("DELETE", "/_enrich/policy/countries")); }; wipe.accept(fulfillingClusterClient); wipe.accept(client()); diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRCS1DeprecationIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRCS1DeprecationIT.java new file mode 100644 index 0000000000000..adf637b831fe5 --- /dev/null +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRCS1DeprecationIT.java @@ -0,0 +1,138 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.remotecluster; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.core.Strings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.LogType; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Locale; + +import static org.hamcrest.Matchers.containsString; + +/** + * Tests the deprecation of RCS1.0 (certificate-based) security model. + */ +public class RemoteClusterSecurityRCS1DeprecationIT extends AbstractRemoteClusterSecurityTestCase { + + public static final String REMOTE_CLUSTER_ALIAS = "my_remote_cluster"; + + static { + fulfillingCluster = ElasticsearchCluster.local().name("fulfilling-cluster").nodes(1).apply(commonClusterConfig).build(); + queryCluster = ElasticsearchCluster.local().nodes(1).name("query-cluster").apply(commonClusterConfig).build(); + } + + @ClassRule + public static TestRule clusterRule = RuleChain.outerRule(fulfillingCluster).around(queryCluster); + + public void testUsingRCS1GeneratesDeprecationWarning() throws Exception { + final boolean rcs1 = true; + final boolean useProxyMode = randomBoolean(); + configureRemoteCluster(REMOTE_CLUSTER_ALIAS, fulfillingCluster, rcs1, useProxyMode, randomBoolean()); + + { + // Query cluster -> add role for test user + var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); + putRoleRequest.setJsonEntity(""" + { + "indices": [ + { + "names": ["local_index"], + "privileges": ["read"] + } + ] + }"""); + assertOK(adminClient().performRequest(putRoleRequest)); + + // Query cluster -> create user and assign role + var putUserRequest = new Request("PUT", "/_security/user/" + REMOTE_SEARCH_USER); + putUserRequest.setJsonEntity(""" + { + "password": "x-pack-test-password", + "roles" : ["remote_search"] + }"""); + assertOK(adminClient().performRequest(putUserRequest)); + + // Query cluster -> create test index + var indexDocRequest = new Request("POST", "/local_index/_doc?refresh=true"); + indexDocRequest.setJsonEntity("{\"local_foo\": \"local_bar\"}"); + assertOK(client().performRequest(indexDocRequest)); + + // Fulfilling cluster -> create test indices + Request bulkRequest = new Request("POST", "/_bulk?refresh=true"); + bulkRequest.setJsonEntity(Strings.format(""" + { "index": { "_index": "index1" } } + { "foo": "bar" } + { "index": { "_index": "secretindex" } } + { "bar": "foo" } + """)); + assertOK(performRequestAgainstFulfillingCluster(bulkRequest)); + + // Fulfilling cluster -> add role for remote search user + var putRoleOnRemoteClusterRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); + putRoleOnRemoteClusterRequest.setJsonEntity(""" + { + "indices": [ + { + "names": ["index*"], + "privileges": ["read", "read_cross_cluster"] + } + ] + }"""); + assertOK(performRequestAgainstFulfillingCluster(putRoleOnRemoteClusterRequest)); + } + { + // perform a simple search request, so we can ensure the remote cluster is connected + final Request searchRequest = new Request( + "GET", + String.format( + Locale.ROOT, + "/%s:index1/_search?ccs_minimize_roundtrips=%s", + randomFrom(REMOTE_CLUSTER_ALIAS, "*", "my_remote_*"), + randomBoolean() + ) + ); + assertOK(performRequestWithRemoteSearchUser(searchRequest)); + } + { + // verify that the deprecation warning is logged + try (InputStream log = queryCluster.getNodeLog(0, LogType.DEPRECATION)) { + Streams.readAllLines( + log, + line -> assertThat( + line, + containsString( + "The remote cluster connection to [" + + REMOTE_CLUSTER_ALIAS + + "] is using the certificate-based security model. " + + "The certificate-based security model is deprecated and will be removed in a future major version. " + + "Migrate the remote cluster from the certificate-based to the API key-based security model." + ) + ) + ); + } + } + } + + private Response performRequestWithRemoteSearchUser(final Request request) throws IOException { + request.setOptions( + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", headerFromRandomAuthMethod(REMOTE_SEARCH_USER, PASS)) + ); + return client().performRequest(request); + } + +} diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java index 4cbd1cab21af9..307f59859c75a 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java @@ -15,12 +15,14 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Strings; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.LogType; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.junit.RunnableTestRuleAdapter; @@ -31,6 +33,7 @@ import org.junit.rules.TestRule; import java.io.IOException; +import java.io.InputStream; import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; import java.util.Arrays; @@ -607,6 +610,7 @@ public void testCrossClusterSearch() throws Exception { assertThat(exception6.getMessage(), containsString("invalid cross-cluster API key value")); } } + assertNoRcs1DeprecationWarnings(); } @SuppressWarnings("unchecked") @@ -681,4 +685,23 @@ private static void selectTasksWithOpaqueId( } } } + + private void assertNoRcs1DeprecationWarnings() throws IOException { + for (int i = 0; i < queryCluster.getNumNodes(); i++) { + try (InputStream log = queryCluster.getNodeLog(i, LogType.DEPRECATION)) { + Streams.readAllLines( + log, + line -> assertThat( + line, + not( + containsString( + "The certificate-based security model is deprecated and will be removed in a future major version. " + + "Migrate the remote cluster from the certificate-based to the API key-based security model." + ) + ) + ) + ); + } + } + } } diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 5a2d24e1aa3ce..7e571b6db2f92 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -327,6 +327,8 @@ public class Constants { "cluster:admin/xpack/watcher/settings/update", "cluster:admin/xpack/watcher/watch/put", "cluster:internal/remote_cluster/nodes", + "cluster:internal/xpack/inference", + "cluster:internal/xpack/inference/unified", "cluster:internal/xpack/ml/coordinatedinference", "cluster:internal/xpack/ml/datafeed/isolate", "cluster:internal/xpack/ml/datafeed/running_state", @@ -386,9 +388,8 @@ public class Constants { "cluster:monitor/xpack/enrich/stats", "cluster:monitor/xpack/eql/stats/dist", "cluster:monitor/xpack/esql/stats/dist", - "cluster:monitor/xpack/inference", + "cluster:monitor/xpack/inference/post", "cluster:monitor/xpack/inference/get", - "cluster:monitor/xpack/inference/unified", "cluster:monitor/xpack/inference/diagnostics/get", "cluster:monitor/xpack/inference/services/get", "cluster:monitor/xpack/info", @@ -518,7 +519,6 @@ public class Constants { "indices:admin/flush", "indices:admin/flush[s]", "indices:admin/forcemerge", - "indices:admin/freeze", "indices:admin/get", "indices:admin/analyze_disk_usage", "indices:admin/ilm/explain", diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadDuringStartupIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadDuringStartupIntegTests.java index 44f7a6d47e361..b1fda5f6c4e6e 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadDuringStartupIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadDuringStartupIntegTests.java @@ -55,7 +55,7 @@ public Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { Environment tmpEnv = TestEnvironment.newEnvironment(settings); // For each node, copy the original testnode.jks into each node's config directory. - Path nodeKeystorePath = tmpEnv.configFile().resolve("testnode.jks"); + Path nodeKeystorePath = tmpEnv.configDir().resolve("testnode.jks"); try { Path goodKeystorePath = getDataPath(goodKeyStoreFilePath); Files.copy(goodKeystorePath, nodeKeystorePath, StandardCopyOption.REPLACE_EXISTING); @@ -93,7 +93,7 @@ public void testReloadDuringStartup() throws Exception { final Environment env = internalCluster().getInstance(Environment.class, nodeName); final CountDownLatch beforeKeystoreFix = new CountDownLatch(2); // SYNC: Cert update & ES restart final CountDownLatch afterKeystoreFix = new CountDownLatch(1); // SYNC: Verify cluster after cert update - final Path nodeKeystorePath = env.configFile().resolve("testnode.jks"); // all nodes have good keystore + final Path nodeKeystorePath = env.configDir().resolve("testnode.jks"); // all nodes have good keystore final Path badKeystorePath = getDataPath(badKeyStoreFilePath); // stop a node, and apply this bad keystore final Path goodKeystorePath = getDataPath(goodKeyStoreFilePath); // start the node, and apply this good keystore assertTrue(Files.exists(nodeKeystorePath)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 6004f8ebf95c4..5cdd466e7bf00 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -65,7 +65,6 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeMetadata; import org.elasticsearch.features.FeatureService; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.http.HttpPreRequest; @@ -726,9 +725,9 @@ protected List getReloadableSecurityComponents() { * ES has already checked the file is actually in the config directory */ public static Path resolveSecuredConfigFile(Environment env, String file) { - Path config = env.configFile().resolve(file); + Path config = env.configDir().resolve(file); if (doPrivileged((PrivilegedAction) () -> Files.exists(config)) == false) { - Path legacyConfig = env.configFile().resolve("x-pack").resolve(file); + Path legacyConfig = env.configDir().resolve("x-pack").resolve(file); if (doPrivileged((PrivilegedAction) () -> Files.exists(legacyConfig))) { DeprecationLogger.getLogger(XPackPlugin.class) .warn( @@ -754,7 +753,6 @@ public Collection createComponents(PluginServices services) { services.scriptService(), services.xContentRegistry(), services.environment(), - services.nodeEnvironment().nodeMetadata(), services.indexNameExpressionResolver(), services.telemetryProvider(), new PersistentTasksService(services.clusterService(), services.threadPool(), services.client()) @@ -774,7 +772,6 @@ Collection createComponents( ScriptService scriptService, NamedXContentRegistry xContentRegistry, Environment environment, - NodeMetadata nodeMetadata, IndexNameExpressionResolver expressionResolver, TelemetryProvider telemetryProvider, PersistentTasksService persistentTasksService @@ -980,7 +977,6 @@ Collection createComponents( getLicenseState(), systemIndices.getMainIndexManager(), clusterService, - featureService, reservedRoleNameChecker, xContentRegistry ); @@ -1472,7 +1468,7 @@ public static List> getSettings(List securityExten settingsList.add(TokenService.DELETE_INTERVAL); settingsList.add(TokenService.DELETE_TIMEOUT); settingsList.addAll(SSLConfigurationSettings.getProfileSettings()); - settingsList.add(ApiKeyService.PASSWORD_HASHING_ALGORITHM); + settingsList.add(ApiKeyService.STORED_HASH_ALGO_SETTING); settingsList.add(ApiKeyService.DELETE_TIMEOUT); settingsList.add(ApiKeyService.DELETE_INTERVAL); settingsList.add(ApiKeyService.DELETE_RETENTION_PERIOD); @@ -1818,17 +1814,30 @@ static void validateForFips(Settings settings) { + " ] setting." ); } - Stream.of(ApiKeyService.PASSWORD_HASHING_ALGORITHM, XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM).forEach((setting) -> { - final var storedHashAlgo = setting.get(settings); - if (storedHashAlgo.toLowerCase(Locale.ROOT).startsWith("pbkdf2") == false) { - // log instead of validation error for backwards compatibility - logger.warn( - "Only PBKDF2 is allowed for stored credential hashing in a FIPS 140 JVM. " - + "Please set the appropriate value for [{}] setting.", - setting.getKey() - ); - } - }); + + final var serviceTokenStoredHashSettings = XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM; + final var serviceTokenStoredHashAlgo = serviceTokenStoredHashSettings.get(settings); + if (serviceTokenStoredHashAlgo.toLowerCase(Locale.ROOT).startsWith("pbkdf2") == false) { + // log instead of validation error for backwards compatibility + logger.warn( + "Only PBKDF2 is allowed for stored credential hashing in a FIPS 140 JVM. " + + "Please set the appropriate value for [{}] setting.", + serviceTokenStoredHashSettings.getKey() + ); + } + + final var apiKeyStoredHashSettings = ApiKeyService.STORED_HASH_ALGO_SETTING; + final var apiKeyStoredHashAlgo = apiKeyStoredHashSettings.get(settings); + if (apiKeyStoredHashAlgo.toLowerCase(Locale.ROOT).startsWith("ssha256") == false + && apiKeyStoredHashAlgo.toLowerCase(Locale.ROOT).startsWith("pbkdf2") == false) { + // log instead of validation error for backwards compatibility + logger.warn( + "[{}] is not recommended for stored API key hashing in a FIPS 140 JVM. The recommended hasher for [{}] is SSHA256.", + apiKeyStoredHashSettings, + apiKeyStoredHashSettings.getKey() + ); + } + final var cacheHashAlgoSettings = settings.filter(k -> k.endsWith(".cache.hash_algo")); cacheHashAlgoSettings.keySet().forEach((key) -> { final var setting = cacheHashAlgoSettings.get(key); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java index c792fa364a74a..fc09681ac26ed 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java @@ -72,11 +72,11 @@ public ChangePasswordRequestBuilder password(char[] password, Hasher hasher) { public ChangePasswordRequestBuilder passwordHash(char[] passwordHashChars, Hasher configuredHasher) { final Hasher resolvedHasher = Hasher.resolveFromHash(passwordHashChars); if (resolvedHasher.equals(configuredHasher) == false - && Hasher.getAvailableAlgoStoredHash().contains(resolvedHasher.name().toLowerCase(Locale.ROOT)) == false) { + && Hasher.getAvailableAlgoStoredPasswordHash().contains(resolvedHasher.name().toLowerCase(Locale.ROOT)) == false) { throw new IllegalArgumentException( "The provided password hash is not a hash or it could not be resolved to a supported hash algorithm. " + "The supported password hash algorithms are " - + Hasher.getAvailableAlgoStoredHash().toString() + + Hasher.getAvailableAlgoStoredPasswordHash().toString() ); } if (request.passwordHash() != null) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java index 96323836aa005..541bbdddd657e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java @@ -52,12 +52,12 @@ protected void doExecute(Task task, ChangePasswordRequest request, ActionListene final Hasher requestPwdHashAlgo = Hasher.resolveFromHash(request.passwordHash()); final Hasher configPwdHashAlgo = Hasher.resolve(XPackSettings.PASSWORD_HASHING_ALGORITHM.get(settings)); if (requestPwdHashAlgo.equals(configPwdHashAlgo) == false - && Hasher.getAvailableAlgoStoredHash().contains(requestPwdHashAlgo.name().toLowerCase(Locale.ROOT)) == false) { + && Hasher.getAvailableAlgoStoredPasswordHash().contains(requestPwdHashAlgo.name().toLowerCase(Locale.ROOT)) == false) { listener.onFailure( new IllegalArgumentException( "The provided password hash is not a hash or it could not be resolved to a supported hash algorithm. " + "The supported password hash algorithms are " - + Hasher.getAvailableAlgoStoredHash().toString() + + Hasher.getAvailableAlgoStoredPasswordHash().toString() ) ); return; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index c2d1370c2cbf3..5fee747a3f73f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -38,7 +38,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.cache.Cache; @@ -139,6 +138,7 @@ import java.util.function.Supplier; import java.util.stream.Collectors; +import static org.elasticsearch.common.SecureRandomUtils.getBase64SecureRandomString; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; @@ -158,9 +158,9 @@ public class ApiKeyService implements Closeable { private static final Logger logger = LogManager.getLogger(ApiKeyService.class); private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ApiKeyService.class); - public static final Setting PASSWORD_HASHING_ALGORITHM = XPackSettings.defaultStoredHashAlgorithmSetting( + public static final Setting STORED_HASH_ALGO_SETTING = XPackSettings.defaultStoredSecureTokenHashAlgorithmSetting( "xpack.security.authc.api_key.hashing.algorithm", - (s) -> Hasher.PBKDF2.name() + (s) -> Hasher.SSHA256.name() ); public static final Setting DELETE_TIMEOUT = Setting.timeSetting( "xpack.security.authc.api_key.delete.timeout", @@ -181,7 +181,7 @@ public class ApiKeyService implements Closeable { ); public static final Setting CACHE_HASH_ALGO_SETTING = Setting.simpleString( "xpack.security.authc.api_key.cache.hash_algo", - "ssha256", + Hasher.SSHA256.name(), Setting.Property.NodeScope ); public static final Setting CACHE_TTL_SETTING = Setting.timeSetting( @@ -217,9 +217,9 @@ public class ApiKeyService implements Closeable { private final ThreadPool threadPool; private final ApiKeyDocCache apiKeyDocCache; - // The API key secret is a Base64 encoded v4 UUID without padding. The UUID is 128 bits, i.e. 16 byte, - // which requires 22 digits of Base64 characters for encoding without padding. - // See also UUIDs.randomBase64UUIDSecureString + private static final int API_KEY_SECRET_NUM_BYTES = 16; + // The API key secret is a Base64 encoded string of 128 random bits. + // See getBase64SecureRandomString() private static final int API_KEY_SECRET_LENGTH = 22; private static final long EVICTION_MONITOR_INTERVAL_SECONDS = 300L; // 5 minutes private static final long EVICTION_MONITOR_INTERVAL_NANOS = EVICTION_MONITOR_INTERVAL_SECONDS * 1_000_000_000L; @@ -245,7 +245,7 @@ public ApiKeyService( this.securityIndex = securityIndex; this.clusterService = clusterService; this.enabled = XPackSettings.API_KEY_SERVICE_ENABLED_SETTING.get(settings); - this.hasher = Hasher.resolve(PASSWORD_HASHING_ALGORITHM.get(settings)); + this.hasher = Hasher.resolve(STORED_HASH_ALGO_SETTING.get(settings)); this.settings = settings; this.inactiveApiKeysRemover = new InactiveApiKeysRemover(settings, client, clusterService); this.threadPool = threadPool; @@ -545,7 +545,7 @@ private void createApiKeyAndIndexIt( ) { final Instant created = clock.instant(); final Instant expiration = getApiKeyExpiration(created, request.getExpiration()); - final SecureString apiKey = UUIDs.randomBase64UUIDSecureString(); + final SecureString apiKey = getBase64SecureRandomString(API_KEY_SECRET_NUM_BYTES); assert ApiKey.Type.CROSS_CLUSTER != request.getType() || API_KEY_SECRET_LENGTH == apiKey.length() : "Invalid API key (name=[" + request.getName() + "], type=[" + request.getType() + "], length=[" + apiKey.length() + "])"; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/ResetPasswordTool.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/ResetPasswordTool.java index 0718742d362cb..f04c670eb1ea7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/ResetPasswordTool.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/ResetPasswordTool.java @@ -43,7 +43,7 @@ class ResetPasswordTool extends BaseRunAsSuperuserCommand { private final OptionSpec usernameOption; ResetPasswordTool() { - this(CommandLineHttpClient::new, environment -> KeyStoreWrapper.load(environment.configFile())); + this(CommandLineHttpClient::new, environment -> KeyStoreWrapper.load(environment.configDir())); } protected ResetPasswordTool( diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java index 91c75c076881e..3c7fa029d4514 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java @@ -95,11 +95,11 @@ class SetupPasswordTool extends MultiCommand { SetupPasswordTool() { this(environment -> new CommandLineHttpClient(environment), environment -> { - KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(environment.configFile()); + KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(environment.configDir()); if (keyStoreWrapper == null) { throw new UserException( ExitCodes.CONFIG, - "Elasticsearch keystore file is missing [" + KeyStoreWrapper.keystorePath(environment.configFile()) + "]" + "Elasticsearch keystore file is missing [" + KeyStoreWrapper.keystorePath(environment.configDir()) + "]" ); } return keyStoreWrapper; @@ -142,7 +142,7 @@ class AutoSetup extends SetupCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { - terminal.println(Verbosity.VERBOSE, "Running with configuration path: " + env.configFile()); + terminal.println(Verbosity.VERBOSE, "Running with configuration path: " + env.configDir()); setupOptions(terminal, options, env); checkElasticKeystorePasswordValid(terminal, env); checkClusterHealth(terminal); @@ -198,7 +198,7 @@ class InteractiveSetup extends SetupCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { - terminal.println(Verbosity.VERBOSE, "Running with configuration path: " + env.configFile()); + terminal.println(Verbosity.VERBOSE, "Running with configuration path: " + env.configDir()); setupOptions(terminal, options, env); checkElasticKeystorePasswordValid(terminal, env); checkClusterHealth(terminal); @@ -298,7 +298,7 @@ void setupOptions(Terminal terminal, OptionSet options, Environment env) throws Settings settings = settingsBuilder.build(); elasticUserPassword = ReservedRealm.BOOTSTRAP_ELASTIC_PASSWORD.get(settings); - final Environment newEnv = new Environment(settings, env.configFile()); + final Environment newEnv = new Environment(settings, env.configDir()); Environment.assertEquivalent(newEnv, env); client = clientFunction.apply(newEnv); @@ -354,7 +354,7 @@ void checkElasticKeystorePasswordValid(Terminal terminal, Environment env) throw terminal.errorPrintln("Possible causes include:"); terminal.errorPrintln(" * The password for the '" + elasticUser + "' user has already been changed on this cluster"); terminal.errorPrintln(" * Your elasticsearch node is running against a different keystore"); - terminal.errorPrintln(" This tool used the keystore at " + KeyStoreWrapper.keystorePath(env.configFile())); + terminal.errorPrintln(" This tool used the keystore at " + KeyStoreWrapper.keystorePath(env.configDir())); terminal.errorPrintln(""); terminal.errorPrintln( "You can use the `elasticsearch-reset-password` CLI tool to reset the password of the '" + elasticUser + "' user" diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java index 8b3f8ec09675a..0fafd6b63c03f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java @@ -338,7 +338,7 @@ public void cancelled() { } public static Path resolvePath(final Environment environment, final String jwkSetPath) { - final Path directoryPath = environment.configFile(); + final Path directoryPath = environment.configDir(); return directoryPath.resolve(jwkSetPath); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java index d5ef90f7f1664..65e72568cacf8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java @@ -101,7 +101,7 @@ public KerberosRealm(final RealmConfig config, final UserRoleMapper userRoleMapp } this.kerberosTicketValidator = kerberosTicketValidator; this.threadPool = threadPool; - this.keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + this.keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); validateKeytab(this.keytabPath); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java index aa1946f445670..65d2492e3b6b8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java @@ -365,7 +365,7 @@ private void validateAccessToken(AccessToken accessToken, JWT idToken) { * @throws IOException if the file cannot be read */ private JWKSet readJwkSetFromFile(String jwkSetPath) throws IOException, ParseException { - final Path path = realmConfig.env().configFile().resolve(jwkSetPath); + final Path path = realmConfig.env().configDir().resolve(jwkSetPath); // avoid using JWKSet.loadFile() as it does not close FileInputStream internally try { String jwkSet = AccessController.doPrivileged( @@ -814,7 +814,7 @@ IDTokenValidator createIdTokenValidator(boolean addFileWatcherIfRequired) { } private void setMetadataFileWatcher(String jwkSetPath) throws IOException { - final Path path = realmConfig.env().configFile().resolve(jwkSetPath); + final Path path = realmConfig.env().configDir().resolve(jwkSetPath); FileWatcher watcher = new PrivilegedFileWatcher(path); watcher.addListener(new FileListener(LOGGER, () -> this.idTokenValidator.set(createIdTokenValidator(false)))); watcherService.add(watcher, ResourceWatcherService.Frequency.MEDIUM); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommand.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommand.java index 106b550a1e23c..1d69050d7ab25 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommand.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommand.java @@ -93,7 +93,7 @@ class SamlMetadataCommand extends KeyStoreAwareCommand { SamlMetadataCommand() { this((environment) -> { - KeyStoreWrapper ksWrapper = KeyStoreWrapper.load(environment.configFile()); + KeyStoreWrapper ksWrapper = KeyStoreWrapper.load(environment.configDir()); return ksWrapper; }); } @@ -458,7 +458,7 @@ private RealmConfig findRealm(Terminal terminal, OptionSet options, Environment final RealmConfig.RealmIdentifier identifier = new RealmConfig.RealmIdentifier(SamlRealmSettings.TYPE, name); final Settings realmSettings = realms.get(identifier); if (realmSettings == null) { - throw new UserException(ExitCodes.CONFIG, "No such realm '" + name + "' defined in " + env.configFile()); + throw new UserException(ExitCodes.CONFIG, "No such realm '" + name + "' defined in " + env.configDir()); } if (isSamlRealm(identifier)) { return buildRealm(identifier, env, settings); @@ -471,10 +471,10 @@ private RealmConfig findRealm(Terminal terminal, OptionSet options, Environment .filter(entry -> isSamlRealm(entry.getKey())) .toList(); if (saml.isEmpty()) { - throw new UserException(ExitCodes.CONFIG, "There is no SAML realm configured in " + env.configFile()); + throw new UserException(ExitCodes.CONFIG, "There is no SAML realm configured in " + env.configDir()); } if (saml.size() > 1) { - terminal.errorPrintln("Using configuration in " + env.configFile()); + terminal.errorPrintln("Using configuration in " + env.configDir()); terminal.errorPrintln( "Found multiple SAML realms: " + saml.stream().map(Map.Entry::getKey).map(Object::toString).collect(Collectors.joining(", ")) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java index 9adfd15e23207..d82be264b2248 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java @@ -792,7 +792,7 @@ private static Tuple, ActionListener< private final ClusterService clusterService; - private final FeatureService featureService; - private final ReservedRoleNameChecker reservedRoleNameChecker; private final NamedXContentRegistry xContentRegistry; @@ -153,7 +150,6 @@ public NativeRolesStore( XPackLicenseState licenseState, SecurityIndexManager securityIndex, ClusterService clusterService, - FeatureService featureService, ReservedRoleNameChecker reservedRoleNameChecker, NamedXContentRegistry xContentRegistry ) { @@ -162,7 +158,6 @@ public NativeRolesStore( this.licenseState = licenseState; this.securityIndex = securityIndex; this.clusterService = clusterService; - this.featureService = featureService; this.reservedRoleNameChecker = reservedRoleNameChecker; this.xContentRegistry = xContentRegistry; this.enabled = settings.getAsBoolean(NATIVE_ROLES_ENABLED, true); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHash.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHash.java index 8cce453f17fd7..eaaa413f46de6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHash.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHash.java @@ -48,10 +48,10 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce final Hasher hasher = Hasher.resolve(XPackSettings.PASSWORD_HASHING_ALGORITHM.get(env.settings())); try ( SecureString elasticPassword = new SecureString(generatePassword(20)); - KeyStoreWrapper nodeKeystore = KeyStoreWrapper.bootstrap(env.configFile(), () -> new SecureString(new char[0])) + KeyStoreWrapper nodeKeystore = KeyStoreWrapper.bootstrap(env.configDir(), () -> new SecureString(new char[0])) ) { nodeKeystore.setString(AUTOCONFIG_ELASTIC_PASSWORD_HASH.getKey(), hasher.hash(elasticPassword)); - nodeKeystore.save(env.configFile(), new char[0]); + nodeKeystore.save(env.configDir(), new char[0]); terminal.print(Terminal.Verbosity.NORMAL, elasticPassword.toString()); } catch (Exception e) { throw new UserException(ExitCodes.CANT_CREATE, "Failed to generate a password for the elastic user", e); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/CreateEnrollmentTokenTool.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/CreateEnrollmentTokenTool.java index 919f4531734fb..8f5fc96761cc9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/CreateEnrollmentTokenTool.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/tool/CreateEnrollmentTokenTool.java @@ -36,7 +36,7 @@ class CreateEnrollmentTokenTool extends BaseRunAsSuperuserCommand { CreateEnrollmentTokenTool() { this( environment -> new CommandLineHttpClient(environment), - environment -> KeyStoreWrapper.load(environment.configFile()), + environment -> KeyStoreWrapper.load(environment.configDir()), environment -> new ExternalEnrollmentTokenGenerator(environment) ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java index 39c7a45d51dfd..41bda63292de0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java @@ -513,7 +513,7 @@ private Tuple checkIndexAvailable(ClusterState state) { if (routingTable != null && routingTable.allPrimaryShardsActive()) { allPrimaryShards = true; } - if (routingTable != null && routingTable.readyForSearch(state)) { + if (routingTable != null && routingTable.readyForSearch()) { searchShards = true; } if (allPrimaryShards == false || searchShards == false) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/tool/BaseRunAsSuperuserCommand.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/tool/BaseRunAsSuperuserCommand.java index 2f45bafe493bb..542bbbe086cc5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/tool/BaseRunAsSuperuserCommand.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/tool/BaseRunAsSuperuserCommand.java @@ -93,7 +93,7 @@ public final void execute(Terminal terminal, OptionSet options, Environment env, settingsBuilder.setSecureSettings(keyStoreWrapper); } settings = settingsBuilder.build(); - newEnv = new Environment(settings, env.configFile()); + newEnv = new Environment(settings, env.configDir()); } else { newEnv = env; settings = env.settings(); diff --git a/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml index 636627240bf4c..0695c8e5766f8 100644 --- a/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml +++ b/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,9 +1,11 @@ org.elasticsearch.security: - set_https_connection_properties # for CommandLineHttpClient io.netty.transport: + - manage_threads - inbound_network - outbound_network io.netty.common: + - manage_threads - inbound_network - outbound_network org.opensaml.xmlsec.impl: diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 5c6c3e8c7933c..2b4b979fbaaa3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -32,14 +32,11 @@ import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.env.BuildVersion; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeMetadata; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.SlowLogFieldProvider; import org.elasticsearch.index.analysis.AnalysisRegistry; @@ -203,7 +200,6 @@ protected SSLService getSslService() { private Collection createComponentsUtil(Settings settings) throws Exception { Environment env = TestEnvironment.newEnvironment(settings); - NodeMetadata nodeMetadata = new NodeMetadata(randomAlphaOfLength(8), BuildVersion.current(), IndexVersion.current()); ThreadPool threadPool = mock(ThreadPool.class); ClusterService clusterService = mock(ClusterService.class); settings = Security.additionalSettings(settings, true); @@ -227,7 +223,6 @@ private Collection createComponentsUtil(Settings settings) throws Except mock(ScriptService.class), xContentRegistry(), env, - nodeMetadata, TestIndexNameExpressionResolver.newInstance(threadContext), TelemetryProvider.NOOP, mock(PersistentTasksService.class) @@ -548,7 +543,7 @@ public void testValidateForFipsKeystoreWithImplicitJksType() { .put( XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), randomFrom( - Hasher.getAvailableAlgoStoredHash() + Hasher.getAvailableAlgoStoredPasswordHash() .stream() .filter(alg -> alg.startsWith("pbkdf2") == false) .collect(Collectors.toList()) @@ -567,7 +562,10 @@ public void testValidateForFipsKeystoreWithExplicitJksType() { .put( XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), randomFrom( - Hasher.getAvailableAlgoStoredHash().stream().filter(alg -> alg.startsWith("pbkdf2")).collect(Collectors.toList()) + Hasher.getAvailableAlgoStoredPasswordHash() + .stream() + .filter(alg -> alg.startsWith("pbkdf2")) + .collect(Collectors.toList()) ) ) .build(); @@ -581,7 +579,7 @@ public void testValidateForFipsInvalidPasswordHashingAlgorithm() { .put( XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), randomFrom( - Hasher.getAvailableAlgoStoredHash() + Hasher.getAvailableAlgoStoredPasswordHash() .stream() .filter(alg -> alg.startsWith("pbkdf2") == false) .collect(Collectors.toList()) @@ -626,7 +624,7 @@ public void testValidateForFipsMultipleValidationErrors() { .put( XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), randomFrom( - Hasher.getAvailableAlgoStoredHash() + Hasher.getAvailableAlgoStoredPasswordHash() .stream() .filter(alg -> alg.startsWith("pbkdf2") == false) .collect(Collectors.toList()) @@ -646,19 +644,28 @@ public void testValidateForFipsNoErrorsOrLogs() throws IllegalAccessException { .put( XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), randomFrom( - Hasher.getAvailableAlgoStoredHash().stream().filter(alg -> alg.startsWith("pbkdf2")).collect(Collectors.toList()) + Hasher.getAvailableAlgoStoredPasswordHash() + .stream() + .filter(alg -> alg.startsWith("pbkdf2")) + .collect(Collectors.toList()) ) ) .put( XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM.getKey(), randomFrom( - Hasher.getAvailableAlgoStoredHash().stream().filter(alg -> alg.startsWith("pbkdf2")).collect(Collectors.toList()) + Hasher.getAvailableAlgoStoredPasswordHash() + .stream() + .filter(alg -> alg.startsWith("pbkdf2")) + .collect(Collectors.toList()) ) ) .put( - ApiKeyService.PASSWORD_HASHING_ALGORITHM.getKey(), + ApiKeyService.STORED_HASH_ALGO_SETTING.getKey(), randomFrom( - Hasher.getAvailableAlgoStoredHash().stream().filter(alg -> alg.startsWith("pbkdf2")).collect(Collectors.toList()) + Hasher.getAvailableAlgoStoredPasswordHash() + .stream() + .filter(alg -> alg.startsWith("pbkdf2")) + .collect(Collectors.toList()) ) ) .put( @@ -683,13 +690,37 @@ public void testValidateForFipsNonFipsCompliantCacheHashAlgoWarningLog() throws assertThatLogger(() -> Security.validateForFips(settings), Security.class, logEventForNonCompliantCacheHash(key)); } - public void testValidateForFipsNonFipsCompliantStoredHashAlgoWarningLog() throws IllegalAccessException { - String key = randomFrom(ApiKeyService.PASSWORD_HASHING_ALGORITHM, XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM).getKey(); + public void testValidateForFipsNonFipsCompliantStoredHashAlgoWarningLog() { + String key = XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM.getKey(); final Settings settings = Settings.builder() .put(XPackSettings.FIPS_MODE_ENABLED.getKey(), true) - .put(key, randomNonFipsCompliantStoredHash()) + .put(key, randomNonFipsCompliantStoredPasswordHash()) .build(); - assertThatLogger(() -> Security.validateForFips(settings), Security.class, logEventForNonCompliantStoredHash(key)); + assertThatLogger(() -> Security.validateForFips(settings), Security.class, logEventForNonCompliantStoredPasswordHash(key)); + } + + public void testValidateForFipsNonFipsCompliantApiKeyStoredHashAlgoWarningLog() { + var nonCompliant = randomFrom( + Hasher.getAvailableAlgoStoredPasswordHash() + .stream() + .filter(alg -> alg.startsWith("pbkdf2") == false && alg.startsWith("ssha256") == false) + .collect(Collectors.toList()) + ); + String key = ApiKeyService.STORED_HASH_ALGO_SETTING.getKey(); + final Settings settings = Settings.builder().put(XPackSettings.FIPS_MODE_ENABLED.getKey(), true).put(key, nonCompliant).build(); + assertThatLogger(() -> Security.validateForFips(settings), Security.class, logEventForNonCompliantStoredApiKeyHash(key)); + } + + public void testValidateForFipsFipsCompliantApiKeyStoredHashAlgoWarningLog() { + var compliant = randomFrom( + Hasher.getAvailableAlgoStoredPasswordHash() + .stream() + .filter(alg -> alg.startsWith("pbkdf2") || alg.startsWith("ssha256")) + .collect(Collectors.toList()) + ); + String key = ApiKeyService.STORED_HASH_ALGO_SETTING.getKey(); + final Settings settings = Settings.builder().put(XPackSettings.FIPS_MODE_ENABLED.getKey(), true).put(key, compliant).build(); + assertThatLogger(() -> Security.validateForFips(settings), Security.class); } public void testValidateForMultipleNonFipsCompliantCacheHashAlgoWarningLogs() throws IllegalAccessException { @@ -1135,9 +1166,12 @@ private String randomNonFipsCompliantCacheHash() { ); } - private String randomNonFipsCompliantStoredHash() { + private String randomNonFipsCompliantStoredPasswordHash() { return randomFrom( - Hasher.getAvailableAlgoStoredHash().stream().filter(alg -> alg.startsWith("pbkdf2") == false).collect(Collectors.toList()) + Hasher.getAvailableAlgoStoredPasswordHash() + .stream() + .filter(alg -> alg.startsWith("pbkdf2") == false) + .collect(Collectors.toList()) ); } @@ -1153,7 +1187,19 @@ private MockLog.SeenEventExpectation logEventForNonCompliantCacheHash(String set ); } - private MockLog.SeenEventExpectation logEventForNonCompliantStoredHash(String settingKey) { + private MockLog.SeenEventExpectation logEventForNonCompliantStoredApiKeyHash(String settingKey) { + return new MockLog.SeenEventExpectation( + "cache hash not fips compliant", + Security.class.getName(), + Level.WARN, + "[*] is not recommended for stored API key hashing in a FIPS 140 JVM. " + + "The recommended hasher for [" + + settingKey + + "] is SSHA256." + ); + } + + private MockLog.SeenEventExpectation logEventForNonCompliantStoredPasswordHash(String settingKey) { return new MockLog.SeenEventExpectation( "stored hash not fips compliant", Security.class.getName(), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportKibanaEnrollmentActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportKibanaEnrollmentActionTests.java index 0ed6d92fd551d..3ad55d5f64698 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportKibanaEnrollmentActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportKibanaEnrollmentActionTests.java @@ -69,7 +69,7 @@ public void setup() throws Exception { final Path tempDir = createTempDir(); final Path httpCaPath = tempDir.resolve("httpCa.p12"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/action/enrollment/httpCa.p12"), httpCaPath); - when(env.configFile()).thenReturn(tempDir); + when(env.configDir()).thenReturn(tempDir); final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("keystore.secure_password", "password"); final Settings settings = Settings.builder().put("keystore.path", httpCaPath).setSecureSettings(secureSettings).build(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportNodeEnrollmentActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportNodeEnrollmentActionTests.java index c85684a60e449..62af3d74410cc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportNodeEnrollmentActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportNodeEnrollmentActionTests.java @@ -78,7 +78,7 @@ public void testDoExecute() throws Exception { Path transportPath = tempDir.resolve("transport.p12"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/action/enrollment/httpCa.p12"), httpCaPath); Files.copy(getDataPath("/org/elasticsearch/xpack/security/action/enrollment/transport.p12"), transportPath); - when(env.configFile()).thenReturn(tempDir); + when(env.configDir()).thenReturn(tempDir); final SSLService sslService = mock(SSLService.class); final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("keystore.secure_password", "password"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilderTests.java index df5cebdf735ac..af2a5c11e6e73 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilderTests.java @@ -76,7 +76,7 @@ public void testWithHashedPasswordWithDifferentAlgo() throws IOException { } public void testWithHashedPasswordNotHash() { - final Hasher systemHasher = Hasher.valueOf(randomFrom(Hasher.getAvailableAlgoStoredHash()).toUpperCase(Locale.ROOT)); + final Hasher systemHasher = Hasher.valueOf(randomFrom(Hasher.getAvailableAlgoStoredPasswordHash()).toUpperCase(Locale.ROOT)); final char[] hash = randomAlphaOfLength(20).toCharArray(); final String json = Strings.format(""" { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/PutUserRequestBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/PutUserRequestBuilderTests.java index cb30c8f117f22..018ffa7b09651 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/PutUserRequestBuilderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/PutUserRequestBuilderTests.java @@ -205,7 +205,7 @@ public void testWithDifferentPasswordHashingAlgorithm() throws IOException { } public void testWithPasswordHashThatsNotReallyAHash() throws IOException { - final Hasher systemHasher = Hasher.valueOf(randomFrom(Hasher.getAvailableAlgoStoredHash()).toUpperCase(Locale.ROOT)); + final Hasher systemHasher = Hasher.valueOf(randomFrom(Hasher.getAvailableAlgoStoredPasswordHash()).toUpperCase(Locale.ROOT)); final char[] hash = randomAlphaOfLengthBetween(14, 20).toCharArray(); final String json = Strings.format(""" { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index 185669a6a203b..c7632943b63b1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; @@ -3008,48 +3007,6 @@ public void testGetApiKeyMetadata() throws IOException { assertThat(e.getMessage(), containsString("authentication realm must be [_es_api_key]")); } - public void testMaybeRemoveRemoteIndicesPrivilegesWithUnsupportedVersion() { - final String apiKeyId = randomAlphaOfLengthBetween(5, 8); - final Set userRoleDescriptors = Set.copyOf( - randomList( - 2, - 5, - () -> RoleDescriptorTestHelper.builder() - .allowReservedMetadata(randomBoolean()) - .allowRemoteIndices(randomBoolean()) - .allowRestriction(randomBoolean()) - .allowRemoteClusters(false) - .build() - ) - ); - - // Selecting random unsupported version. - final TransportVersion minTransportVersion = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - TransportVersionUtils.getPreviousVersion(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY) - ); - - final Set result = ApiKeyService.maybeRemoveRemotePrivileges(userRoleDescriptors, minTransportVersion, apiKeyId); - assertThat(result.stream().anyMatch(RoleDescriptor::hasRemoteIndicesPrivileges), equalTo(false)); - assertThat(result.size(), equalTo(userRoleDescriptors.size())); - - // Roles for which warning headers are added. - final List userRoleNamesWithRemoteIndicesPrivileges = userRoleDescriptors.stream() - .filter(RoleDescriptor::hasRemoteIndicesPrivileges) - .map(RoleDescriptor::getName) - .sorted() - .toList(); - - if (false == userRoleNamesWithRemoteIndicesPrivileges.isEmpty()) { - assertWarnings( - "Removed API key's remote indices privileges from role(s) " - + userRoleNamesWithRemoteIndicesPrivileges - + ". Remote indices are not supported by all nodes in the cluster. " - ); - } - } - public void testMaybeRemoveRemoteClusterPrivilegesWithUnsupportedVersion() { final String apiKeyId = randomAlphaOfLengthBetween(5, 8); final Set userRoleDescriptors = Set.copyOf( @@ -3124,52 +3081,6 @@ public void testBuildDelimitedStringWithLimit() { assertThat(e.getMessage(), equalTo("limit must be positive number")); } - public void testCreateCrossClusterApiKeyMinVersionConstraint() { - final Authentication authentication = randomValueOtherThanMany( - Authentication::isApiKey, - () -> AuthenticationTestHelper.builder().build() - ); - final AbstractCreateApiKeyRequest request = mock(AbstractCreateApiKeyRequest.class); - when(request.getType()).thenReturn(ApiKey.Type.CROSS_CLUSTER); - - final ClusterService clusterService = mock(ClusterService.class); - when(clusterService.getClusterSettings()).thenReturn( - new ClusterSettings(Settings.EMPTY, Set.of(ApiKeyService.DELETE_RETENTION_PERIOD, ApiKeyService.DELETE_INTERVAL)) - ); - final ClusterState clusterState = mock(ClusterState.class); - when(clusterService.state()).thenReturn(clusterState); - final TransportVersion minTransportVersion = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - TransportVersionUtils.getPreviousVersion(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY) - ); - when(clusterState.getMinTransportVersion()).thenReturn(minTransportVersion); - - final ApiKeyService service = new ApiKeyService( - Settings.EMPTY, - clock, - client, - securityIndex, - clusterService, - cacheInvalidatorRegistry, - threadPool, - MeterRegistry.NOOP - ); - - final PlainActionFuture future = new PlainActionFuture<>(); - service.createApiKey(authentication, request, Set.of(), future); - final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, future::actionGet); - - assertThat( - e.getMessage(), - containsString( - "all nodes must have version [" - + TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY.toReleaseVersion() - + "] or higher to support creating cross cluster API keys" - ) - ); - } - public void testAuthenticationFailureWithApiKeyTypeMismatch() throws Exception { final Settings settings = Settings.builder().put(XPackSettings.API_KEY_SERVICE_ENABLED_SETTING.getKey(), true).build(); final ApiKeyService service = spy(createApiKeyService(settings)); @@ -3268,73 +3179,6 @@ public void testValidateApiKeyTypeAndExpiration() throws IOException { assertThat(auth3.getMetadata(), hasEntry(API_KEY_TYPE_KEY, apiKeyDoc3.type.value())); } - public void testCreateOrUpdateApiKeyWithWorkflowsRestrictionForUnsupportedVersion() { - final Authentication authentication = AuthenticationTestHelper.builder().build(); - final ClusterService clusterService = mock(ClusterService.class); - when(clusterService.getClusterSettings()).thenReturn( - new ClusterSettings(Settings.EMPTY, Set.of(ApiKeyService.DELETE_RETENTION_PERIOD, ApiKeyService.DELETE_INTERVAL)) - ); - final ClusterState clusterState = mock(ClusterState.class); - when(clusterService.state()).thenReturn(clusterState); - final TransportVersion minTransportVersion = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - TransportVersionUtils.getPreviousVersion(WORKFLOWS_RESTRICTION_VERSION) - ); - when(clusterState.getMinTransportVersion()).thenReturn(minTransportVersion); - - final ApiKeyService service = new ApiKeyService( - Settings.EMPTY, - clock, - client, - securityIndex, - clusterService, - cacheInvalidatorRegistry, - threadPool, - MeterRegistry.NOOP - ); - - final List roleDescriptorsWithWorkflowsRestriction = randomList( - 1, - 3, - () -> randomRoleDescriptorWithWorkflowsRestriction() - ); - - final AbstractCreateApiKeyRequest createRequest = mock(AbstractCreateApiKeyRequest.class); - when(createRequest.getType()).thenReturn(ApiKey.Type.REST); - when(createRequest.getRoleDescriptors()).thenReturn(roleDescriptorsWithWorkflowsRestriction); - - final PlainActionFuture createFuture = new PlainActionFuture<>(); - service.createApiKey(authentication, createRequest, Set.of(), createFuture); - final IllegalArgumentException e1 = expectThrows(IllegalArgumentException.class, createFuture::actionGet); - assertThat( - e1.getMessage(), - containsString( - "all nodes must have version [" - + WORKFLOWS_RESTRICTION_VERSION.toReleaseVersion() - + "] or higher to support restrictions for API keys" - ) - ); - - final BulkUpdateApiKeyRequest updateRequest = new BulkUpdateApiKeyRequest( - randomList(1, 3, () -> randomAlphaOfLengthBetween(3, 5)), - roleDescriptorsWithWorkflowsRestriction, - Map.of(), - ApiKeyTests.randomFutureExpirationTime() - ); - final PlainActionFuture updateFuture = new PlainActionFuture<>(); - service.updateApiKeys(authentication, updateRequest, Set.of(), updateFuture); - final IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, createFuture::actionGet); - assertThat( - e2.getMessage(), - containsString( - "all nodes must have version [" - + WORKFLOWS_RESTRICTION_VERSION.toReleaseVersion() - + "] or higher to support restrictions for API keys" - ) - ); - } - public void testValidateOwnerUserRoleDescriptorsWithWorkflowsRestriction() { final Authentication authentication = AuthenticationTestHelper.builder().build(); final ClusterService clusterService = mock(ClusterService.class); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java index aed39b24f217d..31c6d6f0c2341 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.service.ClusterService; @@ -17,7 +16,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.action.apikey.ApiKey; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -36,7 +34,6 @@ import java.io.IOException; import java.util.concurrent.ExecutionException; -import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -75,55 +72,6 @@ public void init() throws Exception { ); } - public void testAuthenticateThrowsOnUnsupportedMinVersions() throws IOException { - when(clusterService.state().getMinTransportVersion()).thenReturn( - TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - TransportVersionUtils.getPreviousVersion(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY) - ) - ); - final var authcContext = mock(Authenticator.Context.class, Mockito.RETURNS_DEEP_STUBS); - when(authcContext.getThreadContext()).thenReturn(threadContext); - final var crossClusterAccessHeaders = new CrossClusterAccessHeaders( - CrossClusterAccessHeadersTests.randomEncodedApiKeyHeader(), - AuthenticationTestHelper.randomCrossClusterAccessSubjectInfo() - ); - crossClusterAccessHeaders.writeToContext(threadContext); - final AuthenticationService.AuditableRequest auditableRequest = mock(AuthenticationService.AuditableRequest.class); - when(authcContext.getRequest()).thenReturn(auditableRequest); - when(auditableRequest.exceptionProcessingRequest(any(), any())).thenAnswer( - i -> new ElasticsearchSecurityException("potato", (Exception) i.getArguments()[0]) - ); - doAnswer( - invocationOnMock -> new Authenticator.Context( - threadContext, - auditableRequest, - mock(Realms.class), - (AuthenticationToken) invocationOnMock.getArguments()[2] - ) - ).when(authenticationService).newContext(anyString(), any(), any()); - - final PlainActionFuture future = new PlainActionFuture<>(); - crossClusterAccessAuthenticationService.authenticate("action", mock(TransportRequest.class), future); - final ExecutionException actual = expectThrows(ExecutionException.class, future::get); - - assertThat(actual.getCause().getCause(), instanceOf(IllegalArgumentException.class)); - assertThat( - actual.getCause().getCause().getMessage(), - equalTo( - "all nodes must have version [" - + TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY.toReleaseVersion() - + "] or higher to support cross cluster requests through the dedicated remote cluster port" - ) - ); - verify(auditableRequest).exceptionProcessingRequest( - any(Exception.class), - credentialsArgMatches(crossClusterAccessHeaders.credentials()) - ); - verifyNoMoreInteractions(auditableRequest); - } - public void testAuthenticationSuccessOnSuccessfulAuthentication() throws IOException, ExecutionException, InterruptedException { final var crossClusterAccessHeaders = new CrossClusterAccessHeaders( CrossClusterAccessHeadersTests.randomEncodedApiKeyHeader(), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java index b84282bd40660..417725d908b41 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java @@ -72,7 +72,7 @@ public void shutdown() { } public void testStore_ConfiguredWithUnreadableFile() throws Exception { - Path configDir = env.configFile(); + Path configDir = env.configDir(); Files.createDirectories(configDir); Path file = configDir.resolve("users"); @@ -88,7 +88,7 @@ public void testStore_ConfiguredWithUnreadableFile() throws Exception { public void testStore_AutoReload() throws Exception { Path users = getDataPath("users"); - Path configDir = env.configFile(); + Path configDir = env.configDir(); Files.createDirectories(configDir); Path file = configDir.resolve("users"); Files.copy(users, file, StandardCopyOption.REPLACE_EXISTING); @@ -149,7 +149,7 @@ private RealmConfig getRealmConfig() { public void testStore_AutoReload_WithParseFailures() throws Exception { Path users = getDataPath("users"); - Path confDir = env.configFile(); + Path confDir = env.configDir(); Files.createDirectories(confDir); Path testUsers = confDir.resolve("users"); Files.copy(users, testUsers, StandardCopyOption.REPLACE_EXISTING); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStoreTests.java index 258770b10c743..759f57a4e0174 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStoreTests.java @@ -285,7 +285,7 @@ private Path writeUsersRoles(String input) throws Exception { } private Path getUsersRolesPath() throws IOException { - Path xpackConf = env.configFile(); + Path xpackConf = env.configDir(); Files.createDirectories(xpackConf); return xpackConf.resolve("users_roles"); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwkSetLoaderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwkSetLoaderTests.java index 9800cb73faf6a..3d05b7540596a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwkSetLoaderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwkSetLoaderTests.java @@ -41,7 +41,7 @@ public void testConcurrentReloadWillBeQueuedAndShareTheResults() throws IOExcept final RealmConfig realmConfig = mock(RealmConfig.class); when(realmConfig.getSetting(JwtRealmSettings.PKC_JWKSET_PATH)).thenReturn("jwkset.json"); final Environment env = mock(Environment.class); - when(env.configFile()).thenReturn(tempDir); + when(env.configDir()).thenReturn(tempDir); when(realmConfig.env()).thenReturn(env); final JwkSetLoader jwkSetLoader = spy(new JwkSetLoader(realmConfig, List.of(), null)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidatorTests.java index f1927876eba5f..2c9e57df60e26 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidatorTests.java @@ -59,7 +59,7 @@ public void setup() throws Exception { final RealmConfig realmConfig = mock(RealmConfig.class); when(realmConfig.getSetting(JwtRealmSettings.PKC_JWKSET_PATH)).thenReturn("jwkset.json"); final Environment env = mock(Environment.class); - when(env.configFile()).thenReturn(tempDir); + when(env.configDir()).thenReturn(tempDir); when(realmConfig.env()).thenReturn(env); validateSignatureAttemptCounter = new AtomicInteger(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java index f01914a7fed06..b15edd943db52 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java @@ -63,7 +63,7 @@ public void testAuthenticateDifferentFailureScenarios() throws LoginException, G final boolean throwExceptionForInvalidTicket = validTicket ? false : randomBoolean(); final boolean throwLoginException = randomBoolean(); final byte[] decodedTicket = randomByteArrayOfLength(5); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); if (validTicket) { mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, outToken), null); @@ -144,7 +144,7 @@ public void testDelegatedAuthorizationFailedToResolve() throws Exception { settings = Settings.builder().put(settings).putList("authorization_realms", "other_realm").build(); final KerberosRealm kerberosRealm = createKerberosRealm(Collections.singletonList(otherRealm), username); final byte[] decodedTicket = "base64encodedticket".getBytes(StandardCharsets.UTF_8); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, "out-token"), null); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java index b1ddb631a8dd2..c6431a8d81685 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java @@ -48,7 +48,7 @@ public void testAuthenticateWithCache() throws LoginException, GSSException { metadata.put(KerberosRealm.KRB_METADATA_UPN_KEY, username); final User expectedUser = new User(expectedUsername, roles.toArray(new String[0]), null, null, metadata, true); final byte[] decodedTicket = randomByteArrayOfLength(10); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, outToken), null); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); @@ -78,7 +78,7 @@ public void testCacheInvalidationScenarios() throws LoginException, GSSException final String authNUsername = randomFrom(userNames); final byte[] decodedTicket = randomByteArrayOfLength(10); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(authNUsername, outToken), null); final String expectedUsername = maybeRemoveRealmName(authNUsername); @@ -137,7 +137,7 @@ public void testAuthenticateWithValidTicketSucessAuthnWithUserDetailsWhenCacheDi metadata.put(KerberosRealm.KRB_METADATA_UPN_KEY, username); final User expectedUser = new User(expectedUsername, roles.toArray(new String[0]), null, null, metadata, true); final byte[] decodedTicket = randomByteArrayOfLength(10); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, outToken), null); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java index 177507ce6d792..e4718f3e95019 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java @@ -82,7 +82,7 @@ public void testAuthenticateWithValidTicketSucessAuthnWithUserDetails() throws L metadata.put(KerberosRealm.KRB_METADATA_UPN_KEY, username); final User expectedUser = new User(expectedUsername, roles.toArray(new String[roles.size()]), null, null, metadata, true); final byte[] decodedTicket = "base64encodedticket".getBytes(StandardCharsets.UTF_8); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, "out-token"), null); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); @@ -106,7 +106,7 @@ public void testFailedAuthorization() throws LoginException, GSSException { final String username = randomPrincipalName(); final KerberosRealm kerberosRealm = createKerberosRealm(username); final byte[] decodedTicket = "base64encodedticket".getBytes(StandardCharsets.UTF_8); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>("does-not-exist@REALM", "out-token"), null); @@ -236,7 +236,7 @@ public void testDelegatedAuthorization() throws Exception { final KerberosRealm kerberosRealm = createKerberosRealm(Collections.singletonList(otherRealm), username); final User expectedUser = lookupUser; final byte[] decodedTicket = "base64encodedticket".getBytes(StandardCharsets.UTF_8); - final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); + final Path keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, "out-token"), null); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/FileServiceAccountTokenStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/FileServiceAccountTokenStoreTests.java index 00b55e5b48337..0f2a720660afd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/FileServiceAccountTokenStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/FileServiceAccountTokenStoreTests.java @@ -128,7 +128,7 @@ public void testParseFileNotExists() throws IllegalAccessException, IOException public void testAutoReload() throws Exception { Path serviceTokensSourceFile = getDataPath("service_tokens"); - Path configDir = env.configFile(); + Path configDir = env.configDir(); Files.createDirectories(configDir); Path targetFile = configDir.resolve("service_tokens"); Files.copy(serviceTokensSourceFile, targetFile, StandardCopyOption.REPLACE_EXISTING); @@ -225,7 +225,7 @@ public void testAutoReload() throws Exception { public void testFindTokensFor() throws IOException { Path serviceTokensSourceFile = getDataPath("service_tokens"); - Path configDir = env.configFile(); + Path configDir = env.configDir(); Files.createDirectories(configDir); Path targetFile = configDir.resolve("service_tokens"); Files.copy(serviceTokensSourceFile, targetFile, StandardCopyOption.REPLACE_EXISTING); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java index 6332e63ca5958..ee025fe64ff9a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java @@ -76,8 +76,8 @@ public class DnRoleMapperTests extends ESTestCase { public void init() throws IOException { settings = Settings.builder().put("resource.reload.interval.high", "100ms").put("path.home", createTempDir()).build(); env = TestEnvironment.newEnvironment(settings); - if (Files.exists(env.configFile()) == false) { - Files.createDirectory(env.configFile()); + if (Files.exists(env.configDir()) == false) { + Files.createDirectory(env.configDir()); } threadPool = new TestThreadPool("test"); } @@ -100,7 +100,7 @@ public void testMapper_ConfiguredWithUnreadableFile() throws Exception { public void testMapper_AutoReload() throws Exception { Path roleMappingFile = getDataPath("role_mapping.yml"); - Path file = env.configFile().resolve("test_role_mapping.yml"); + Path file = env.configDir().resolve("test_role_mapping.yml"); Files.copy(roleMappingFile, file, StandardCopyOption.REPLACE_EXISTING); final CountDownLatch latch = new CountDownLatch(1); @@ -144,7 +144,7 @@ public void testMapper_AutoReload() throws Exception { public void testMapper_AutoReload_WithParseFailures() throws Exception { Path roleMappingFile = getDataPath("role_mapping.yml"); - Path file = env.configFile().resolve("test_role_mapping.yml"); + Path file = env.configDir().resolve("test_role_mapping.yml"); Files.copy(roleMappingFile, file, StandardCopyOption.REPLACE_EXISTING); final CountDownLatch latch = new CountDownLatch(1); @@ -171,7 +171,7 @@ public void testMapper_AutoReload_WithParseFailures() throws Exception { public void testMapperAutoReloadWithoutListener() throws Exception { Path roleMappingFile = getDataPath("role_mapping.yml"); - Path file = env.configFile().resolve("test_role_mapping.yml"); + Path file = env.configDir().resolve("test_role_mapping.yml"); Files.copy(roleMappingFile, file, StandardCopyOption.REPLACE_EXISTING); try (ResourceWatcherService watcherService = new ResourceWatcherService(settings, threadPool)) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index f7dc725c3f07d..6099b7351cf76 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -465,22 +465,6 @@ public void testSelectorsDoNotImpactWildcardDetection() { ); } - public void testWildcardSelectorsAreNotAllowedInShardLevelRequests() { - ShardSearchRequest request = mock(ShardSearchRequest.class); - when(request.indices()).thenReturn(new String[] { "index10::*" }); - IllegalArgumentException exception = expectThrows( - IllegalArgumentException.class, - () -> defaultIndicesResolver.resolveIndicesAndAliasesWithoutWildcards(TransportSearchAction.TYPE.name() + "[s]", request) - ); - assertThat( - exception, - throwableWithMessage( - "the action indices:data/read/search[s] does not support wildcard selectors;" - + " the provided index expression(s) [index10::*] are not allowed" - ) - ); - } - public void testAllIsNotAllowedInShardLevelRequests() { ShardSearchRequest request = mock(ShardSearchRequest.class); final boolean literalAll = randomBoolean(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java index 2b8a77d63588a..4228563852a53 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.bulk.BulkRequest; @@ -51,7 +50,6 @@ import org.elasticsearch.license.TestUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -64,8 +62,6 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.RoleRestrictionTests; -import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; -import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult; @@ -85,20 +81,17 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_FORMAT_SETTING; import static org.elasticsearch.indices.SystemIndexDescriptor.VERSION_META_KEY; -import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; import static org.elasticsearch.xpack.core.security.SecurityField.DOCUMENT_LEVEL_SECURITY_FEATURE; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomApplicationPrivileges; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomClusterPrivileges; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteIndicesPrivileges; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRoleDescriptorMetadata; -import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; @@ -155,7 +148,6 @@ private NativeRolesStore createRoleStoreForTest(Settings settings) { TestUtils.newTestLicenseState(), securityIndex, clusterService, - mock(FeatureService.class), new ReservedRoleNameChecker.Default(), mock(NamedXContentRegistry.class) ); @@ -411,7 +403,6 @@ public void testPutOfRoleWithFlsDlsUnlicensed() throws IOException { licenseState, securityIndex, clusterService, - mock(FeatureService.class), mock(ReservedRoleNameChecker.class), mock(NamedXContentRegistry.class) ); @@ -465,101 +456,6 @@ public void testPutOfRoleWithFlsDlsUnlicensed() throws IOException { assertThat(e.getMessage(), containsString("field and document level security")); } - public void testPutRoleWithRemotePrivsUnsupportedMinNodeVersion() throws IOException { - // Init for validation - new ReservedRolesStore(Set.of("superuser")); - enum TEST_MODE { - REMOTE_INDICES_PRIVS, - REMOTE_CLUSTER_PRIVS, - REMOTE_INDICES_AND_CLUSTER_PRIVS - } - for (TEST_MODE testMode : TEST_MODE.values()) { - // default to both remote indices and cluster privileges and use the switch below to remove one or the other - TransportVersion transportVersionBeforeAdvancedRemoteClusterSecurity = TransportVersionUtils.getPreviousVersion( - TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY - ); - RoleDescriptor.RemoteIndicesPrivileges[] remoteIndicesPrivileges = new RoleDescriptor.RemoteIndicesPrivileges[] { - RoleDescriptor.RemoteIndicesPrivileges.builder("remote").privileges("read").indices("index").build() }; - RemoteClusterPermissions remoteClusterPermissions = new RemoteClusterPermissions().addGroup( - new RemoteClusterPermissionGroup( - RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), - new String[] { "remote" } - ) - ); - switch (testMode) { - case REMOTE_CLUSTER_PRIVS -> { - transportVersionBeforeAdvancedRemoteClusterSecurity = TransportVersionUtils.getPreviousVersion( - ROLE_REMOTE_CLUSTER_PRIVS - ); - remoteIndicesPrivileges = null; - } - case REMOTE_INDICES_PRIVS -> remoteClusterPermissions = null; - } - final Client client = mock(Client.class); - - final TransportVersion minTransportVersion = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - transportVersionBeforeAdvancedRemoteClusterSecurity - ); - final ClusterService clusterService = mockClusterServiceWithMinNodeVersion(minTransportVersion); - - final XPackLicenseState licenseState = mock(XPackLicenseState.class); - - final SecuritySystemIndices systemIndices = new SecuritySystemIndices(clusterService.getSettings()); - final FeatureService featureService = mock(FeatureService.class); - systemIndices.init(client, featureService, clusterService); - final SecurityIndexManager securityIndex = systemIndices.getMainIndexManager(); - - final NativeRolesStore rolesStore = new NativeRolesStore( - Settings.EMPTY, - client, - licenseState, - securityIndex, - clusterService, - mock(FeatureService.class), - mock(ReservedRoleNameChecker.class), - mock(NamedXContentRegistry.class) - ); - // setup the roles store so the security index exists - securityIndex.clusterChanged(new ClusterChangedEvent("source", getClusterStateWithSecurityIndex(), getEmptyClusterState())); - - RoleDescriptor remoteIndicesRole = new RoleDescriptor( - "remote", - null, - null, - null, - null, - null, - null, - null, - remoteIndicesPrivileges, - remoteClusterPermissions, - null, - null - ); - PlainActionFuture future = new PlainActionFuture<>(); - putRole(rolesStore, remoteIndicesRole, future); - IllegalStateException e = expectThrows( - IllegalStateException.class, - String.format(Locale.ROOT, "expected IllegalStateException, but not thrown for mode [%s]", testMode), - future::actionGet - ); - assertThat( - e.getMessage(), - containsString( - "all nodes must have version [" - + (TEST_MODE.REMOTE_CLUSTER_PRIVS.equals(testMode) - ? ROLE_REMOTE_CLUSTER_PRIVS.toReleaseVersion() - : TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY.toReleaseVersion()) - + "] or higher to support remote " - + (remoteIndicesPrivileges != null ? "indices" : "cluster") - + " privileges" - ) - ); - } - } - public void testGetRoleWhenDisabled() throws Exception { final Settings settings = Settings.builder().put(NativeRolesStore.NATIVE_ROLES_ENABLED, "false").build(); NativeRolesStore store = createRoleStoreForTest(settings); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/FileOperatorUsersStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/FileOperatorUsersStoreTests.java index 34cfde8dc862f..972c00b59b1f2 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/FileOperatorUsersStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/FileOperatorUsersStoreTests.java @@ -484,7 +484,7 @@ public void testParseInvalidConfig() throws IOException { } private Path getOperatorUsersPath() throws IOException { - Path xpackConf = env.configFile(); + Path xpackConf = env.configDir(); Files.createDirectories(xpackConf); return xpackConf.resolve("operator_users.yml"); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java index 0663172fa2e9c..8a79e2232cfe5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java @@ -370,7 +370,21 @@ public void testHttpHeaderAuthnBypassHeaderValidator() throws Exception { new NetworkService(List.of()), testThreadPool, xContentRegistry(), - new NullDispatcher(), + new HttpServerTransport.Dispatcher() { + @Override + public void dispatchRequest(RestRequest request, RestChannel channel, ThreadContext threadContext) { + fail("Request should not be dispatched"); + } + + @Override + public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, Throwable cause) { + try { + channel.sendResponse(new RestResponse(channel, (Exception) cause)); + } catch (IOException e) { + fail(e, "Unexpected exception dispatching bad request"); + } + } + }, randomClusterSettings(), new SharedGroupFactory(settings), Tracer.NOOP, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java index 9bb0c8af6f481..2ac2d4ebf0c32 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLErrorMessageFileTests.java @@ -375,7 +375,7 @@ private void checkBlockedResource( + " [" + fileName + "] because access to read the file is blocked; SSL resources should be placed in the [" - + env.configFile().toAbsolutePath().toString() + + env.configDir().toAbsolutePath().toString() + "] directory"; Throwable exception = expectFailure(settings); @@ -477,7 +477,7 @@ private Settings.Builder configureWorkingKeystore(String prefix, Settings.Builde private ElasticsearchException expectFailure(Settings.Builder settings) { return expectThrows( ElasticsearchException.class, - () -> new SSLService(new Environment(buildEnvSettings(settings.build()), env.configFile())) + () -> new SSLService(new Environment(buildEnvSettings(settings.build()), env.configDir())) ); } diff --git a/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index fa6a908891400..5db5abd3d60f8 100644 --- a/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -17,6 +17,7 @@ import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; @@ -45,7 +46,7 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(Version.fromString(OLD_CLUSTER_VERSION)) .nodes(2) // some tests rely on the translog not being flushed .setting("indices.memory.shard_inactive_time", "60m") diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/AllocationFailuresResetOnShutdownIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/AllocationFailuresResetOnShutdownIT.java index b6c6843d766f9..320a0025b1026 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/AllocationFailuresResetOnShutdownIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/AllocationFailuresResetOnShutdownIT.java @@ -175,10 +175,10 @@ public void beforeIndexShardCreated(ShardRouting routing, Settings indexSettings prepareCreate("index1", indexSettings(1, 0)).execute(); - // await all relocation attempts are exhausted + // await all allocation attempts are exhausted var maxAttempts = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(Settings.EMPTY); ensureRed("index1"); - { + assertBusy(() -> { var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState(); var index = state.getRoutingTable().index("index1"); assertNotNull(index); @@ -186,7 +186,7 @@ public void beforeIndexShardCreated(ShardRouting routing, Settings indexSettings assertNotNull(shard); assertNotNull(shard.unassignedInfo()); assertThat(maxAttempts, equalTo(shard.unassignedInfo().failedAllocations())); - } + }); failAllocation.set(false); @@ -240,13 +240,13 @@ public void beforeIndexShardCreated(ShardRouting routing, Settings indexSettings // await all allocation attempts are exhausted var maxAttempts = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(Settings.EMPTY); ensureRed("index1"); - { + assertBusy(() -> { var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState(); var shard = state.getRoutingTable().index("index1").shard(0).primaryShard(); assertNotNull(shard); assertNotNull(shard.unassignedInfo()); assertThat(maxAttempts, equalTo(shard.unassignedInfo().failedAllocations())); - } + }); failAllocation.set(false); diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java index 3c7e9310744cb..934db77e73df3 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.health.HealthIndicatorService; import org.elasticsearch.license.XPackLicenseState; @@ -93,7 +92,6 @@ public class SnapshotLifecycle extends Plugin implements ActionPlugin, HealthPlu private final SetOnce snapshotRetentionService = new SetOnce<>(); private final SetOnce snapshotHistoryStore = new SetOnce<>(); private final SetOnce slmHealthIndicatorService = new SetOnce<>(); - private final SetOnce featureService = new SetOnce<>(); private final Settings settings; public SnapshotLifecycle(Settings settings) { @@ -126,7 +124,6 @@ public Collection createComponents(PluginServices services) { ClusterService clusterService = services.clusterService(); ThreadPool threadPool = services.threadPool(); final List components = new ArrayList<>(); - featureService.set(services.featureService()); SnapshotLifecycleTemplateRegistry templateRegistry = new SnapshotLifecycleTemplateRegistry( settings, clusterService, diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java index f7c5f1b8072f3..62e68bfdb425c 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java @@ -172,10 +172,14 @@ private FieldValues scriptValues() { @Override public GeoShapeWithDocValuesFieldMapper build(MapperBuilderContext context) { if (multiFieldsBuilder.hasMultiFields()) { + /* + * We have no plans to fail on multifields because it isn't worth breaking + * even the tiny fraction of users. + */ DEPRECATION_LOGGER.warn( DeprecationCategory.MAPPINGS, "geo_shape_multifields", - "Adding multifields to [geo_shape] mappers has no effect and will be forbidden in future" + "Adding multifields to [geo_shape] mappers has no effect" ); } GeometryParser geometryParser = new GeometryParser( diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java index 65e54513e8c9e..fcfeca6301950 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java @@ -100,10 +100,14 @@ private static CartesianPoint parseNullValue(Object nullValue, boolean ignoreZVa @Override public FieldMapper build(MapperBuilderContext context) { if (multiFieldsBuilder.hasMultiFields()) { + /* + * We have no plans to fail on multifields because it isn't worth breaking + * even the tiny fraction of users. + */ DEPRECATION_LOGGER.warn( DeprecationCategory.MAPPINGS, "point_multifields", - "Adding multifields to [point] mappers has no effect and will be forbidden in future" + "Adding multifields to [point] mappers has no effect" ); } CartesianPointParser parser = new CartesianPointParser( diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java index 198e0ba3011bf..3c8127b6c6036 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java @@ -104,10 +104,14 @@ protected Parameter[] getParameters() { @Override public ShapeFieldMapper build(MapperBuilderContext context) { if (multiFieldsBuilder.hasMultiFields()) { + /* + * We have no plans to fail on multifields because it isn't worth breaking + * even the tiny fraction of users. + */ DEPRECATION_LOGGER.warn( DeprecationCategory.MAPPINGS, "shape_multifields", - "Adding multifields to [shape] mappers has no effect and will be forbidden in future" + "Adding multifields to [shape] mappers has no effect" ); } GeometryParser geometryParser = new GeometryParser( diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java index 712113b1960ef..a34f0ba2eae3e 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java @@ -519,7 +519,7 @@ public void testMultiFieldsDeprecationWarning() throws Exception { b.startObject("keyword").field("type", "keyword").endObject(); b.endObject(); })); - assertWarnings("Adding multifields to [" + getFieldName() + "] mappers has no effect and will be forbidden in future"); + assertWarnings("Adding multifields to [" + getFieldName() + "] mappers has no effect"); } public void testSelfIntersectPolygon() throws IOException { diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java index 30a30bde51528..8f28b462afca4 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java @@ -368,7 +368,7 @@ public void testMultiFieldsDeprecationWarning() throws Exception { b.startObject("keyword").field("type", "keyword").endObject(); b.endObject(); })); - assertWarnings("Adding multifields to [point] mappers has no effect and will be forbidden in future"); + assertWarnings("Adding multifields to [point] mappers has no effect"); } @Override diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java index 5d2624735bebe..61491b88a8a0b 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java @@ -338,7 +338,7 @@ public void testMultiFieldsDeprecationWarning() throws Exception { b.startObject("keyword").field("type", "keyword").endObject(); b.endObject(); })); - assertWarnings("Adding multifields to [" + getFieldName() + "] mappers has no effect and will be forbidden in future"); + assertWarnings("Adding multifields to [" + getFieldName() + "] mappers has no effect"); } public void testSelfIntersectPolygon() throws IOException { diff --git a/x-pack/plugin/sql/jdbc/build.gradle b/x-pack/plugin/sql/jdbc/build.gradle index d1b179f09e403..fd0e43fb23c3c 100644 --- a/x-pack/plugin/sql/jdbc/build.gradle +++ b/x-pack/plugin/sql/jdbc/build.gradle @@ -20,11 +20,20 @@ dependencies { testImplementation project(':modules:rest-root') } -tasks.named("compileJava").configure { +java { targetCompatibility = JavaVersion.VERSION_1_8 sourceCompatibility = JavaVersion.VERSION_1_8 } +tasks.named("compileTestJava").configure { + targetCompatibility = buildParams.getMinimumRuntimeVersion() + sourceCompatibility = buildParams.getMinimumRuntimeVersion() +} + +tasks.named("test").configure { + // reset the unit test classpath as using the shadow jar won't work due to relocated packages + classpath = sourceSets.test.runtimeClasspath +} tasks.named("shadowJar").configure { relocate 'com.fasterxml', 'shadow.fasterxml' @@ -34,7 +43,3 @@ tasks.named("shadowJar").configure { } } -tasks.named("test").configure { - // reset the unit test classpath as using the shadow jar won't work due to relocated packages - classpath = sourceSets.test.runtimeClasspath -} diff --git a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/build.gradle b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/build.gradle index 04f25f7175451..2fdbd4b903959 100644 --- a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/build.gradle +++ b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/build.gradle @@ -1,6 +1,5 @@ dependencies { javaRestTestImplementation project(path: xpackModule('ql:test-fixtures')) - clusterPlugins project(':x-pack:qa:freeze-plugin') } tasks.named("check").configure {dependsOn("javaRestTest") } // run these tests as part of the "check" task diff --git a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/SqlTestClusterWithRemote.java b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/SqlTestClusterWithRemote.java index 0608e61488e38..00901b014954c 100644 --- a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/SqlTestClusterWithRemote.java +++ b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/SqlTestClusterWithRemote.java @@ -43,7 +43,6 @@ private static ElasticsearchCluster clusterSettings(String remoteAddress) { .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.autoconfiguration.enabled", "false") .user(USER_NAME, PASSWORD) - .plugin("freeze-plugin") .build(); } @@ -58,7 +57,6 @@ private static ElasticsearchCluster remoteClusterSettings() { .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.autoconfiguration.enabled", "false") .user(USER_NAME, PASSWORD) - .plugin("freeze-plugin") .build(); } diff --git a/x-pack/plugin/sql/qa/server/multi-node/build.gradle b/x-pack/plugin/sql/qa/server/multi-node/build.gradle index e7a558ba68dd9..15332fa1ef113 100644 --- a/x-pack/plugin/sql/qa/server/multi-node/build.gradle +++ b/x-pack/plugin/sql/qa/server/multi-node/build.gradle @@ -7,5 +7,4 @@ description = 'Run a subset of SQL tests against multiple nodes' */ dependencies { - clusterPlugins project(':x-pack:qa:freeze-plugin') } diff --git a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/SqlTestCluster.java b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/SqlTestCluster.java index 4f740f22393a7..9c2764397ecb6 100644 --- a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/SqlTestCluster.java +++ b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/SqlTestCluster.java @@ -20,7 +20,6 @@ public static ElasticsearchCluster getCluster() { .setting("xpack.watcher.enabled", "false") .setting("xpack.security.enabled", "false") .setting("xpack.license.self_generated.type", "trial") - .plugin("freeze-plugin") .build(); } } diff --git a/x-pack/plugin/sql/qa/server/security/build.gradle b/x-pack/plugin/sql/qa/server/security/build.gradle index e00989cbaa89c..2923dfb58780c 100644 --- a/x-pack/plugin/sql/qa/server/security/build.gradle +++ b/x-pack/plugin/sql/qa/server/security/build.gradle @@ -46,7 +46,6 @@ subprojects { user username: "user1", password: 'x-pack-test-password', role: "user1" user username: "user2", password: 'x-pack-test-password', role: "user2" user username: "manage_user", password: 'x-pack-test-password', role: "manage_user" - plugin ':x-pack:qa:freeze-plugin' } File testArtifactsDir = project.file("$buildDir/testArtifacts") diff --git a/x-pack/plugin/sql/qa/server/single-node/build.gradle b/x-pack/plugin/sql/qa/server/single-node/build.gradle index e4376edc683d1..08a196080e54e 100644 --- a/x-pack/plugin/sql/qa/server/single-node/build.gradle +++ b/x-pack/plugin/sql/qa/server/single-node/build.gradle @@ -2,5 +2,4 @@ apply plugin: 'elasticsearch.internal-test-artifact' dependencies { - clusterPlugins project(':x-pack:qa:freeze-plugin') } diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliErrorsIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliErrorsIT.java index 3a92dd675203f..02cdfc993c12c 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliErrorsIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliErrorsIT.java @@ -12,7 +12,7 @@ public class CliErrorsIT extends ErrorsTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java index ac4bffdb951d5..46e16418e0642 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java @@ -17,7 +17,7 @@ public class CliExplainIT extends CliIntegrationTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliFetchSizeIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliFetchSizeIT.java index 83daeccab0b0e..9811142d3611c 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliFetchSizeIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliFetchSizeIT.java @@ -12,7 +12,7 @@ public class CliFetchSizeIT extends FetchSizeTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java index ea7f793dd56ee..99895823adc7f 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java @@ -12,7 +12,7 @@ public class CliLenientIT extends LenientTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliPartialResultsIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliPartialResultsIT.java index 0d6f3fd530d22..8baa265780f40 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliPartialResultsIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliPartialResultsIT.java @@ -12,7 +12,7 @@ public class CliPartialResultsIT extends PartialResultsTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliSelectIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliSelectIT.java index bbc0c16393cb7..ecdd41a203ad3 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliSelectIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliSelectIT.java @@ -12,7 +12,7 @@ public class CliSelectIT extends SelectTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliShowIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliShowIT.java index 0c663be1e8706..2f9deffa48f08 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliShowIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliShowIT.java @@ -12,7 +12,7 @@ public class CliShowIT extends ShowTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/ConsistentFunctionArgHandlingIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/ConsistentFunctionArgHandlingIT.java index 3db713b5ed4db..de502bf886ff3 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/ConsistentFunctionArgHandlingIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/ConsistentFunctionArgHandlingIT.java @@ -52,7 +52,7 @@ */ public class ConsistentFunctionArgHandlingIT extends JdbcIntegrationTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CustomDateFormatIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CustomDateFormatIT.java index fb312a75bcc9c..4a91372abe5d4 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CustomDateFormatIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CustomDateFormatIT.java @@ -13,7 +13,7 @@ public class CustomDateFormatIT extends CustomDateFormatTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/FieldExtractorIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/FieldExtractorIT.java index daaa7e81154b4..ac967710e360c 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/FieldExtractorIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/FieldExtractorIT.java @@ -13,7 +13,7 @@ public class FieldExtractorIT extends FieldExtractorTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcCsvSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcCsvSpecIT.java index 3763169977873..c83f1597e1884 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcCsvSpecIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcCsvSpecIT.java @@ -23,11 +23,11 @@ public class GeoJdbcCsvSpecIT extends GeoCsvSpecTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected void loadDataset(RestClient client) throws Exception { - DataLoader.loadDatasetIntoEs(client, false); + DataLoader.loadDatasetIntoEs(client); } @Override diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java index a2a8cc87f62bc..903caf6990126 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java @@ -15,11 +15,11 @@ public class GeoJdbcSqlSpecIT extends GeoSqlSpecTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected void loadDataset(RestClient client) throws Exception { - DataLoader.loadDatasetIntoEs(client, false); + DataLoader.loadDatasetIntoEs(client); } @Override diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcCsvSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcCsvSpecIT.java index 8f661fa037e25..c78a922d7f6e4 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcCsvSpecIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcCsvSpecIT.java @@ -22,11 +22,11 @@ public class JdbcCsvSpecIT extends CsvSpecTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected void loadDataset(RestClient client) throws Exception { - DataLoader.loadDatasetIntoEs(client, false); + DataLoader.loadDatasetIntoEs(client); } @Override diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDatabaseMetaDataIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDatabaseMetaDataIT.java index 1a7337255fc78..35aa5c3701391 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDatabaseMetaDataIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDatabaseMetaDataIT.java @@ -12,7 +12,7 @@ public class JdbcDatabaseMetaDataIT extends DatabaseMetaDataTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocCsvSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocCsvSpecIT.java index fb8a96ca4ea7c..6147d0759f459 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocCsvSpecIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocCsvSpecIT.java @@ -42,7 +42,7 @@ */ public class JdbcDocCsvSpecIT extends SpecBaseIntegrationTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { @@ -58,7 +58,7 @@ protected String indexName() { @Override protected void loadDataset(RestClient client) throws Exception { - DataLoader.loadDocsDatasetIntoEs(client, false); + DataLoader.loadDocsDatasetIntoEs(client); } @ParametersFactory(shuffle = false, argumentFormatting = SqlSpecTestCase.PARAM_FORMATTING) diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocFrozenCsvSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocFrozenCsvSpecIT.java deleted file mode 100644 index 2276db4cff105..0000000000000 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocFrozenCsvSpecIT.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.sql.qa.single_node; - -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.apache.logging.log4j.Logger; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.xpack.sql.qa.jdbc.DataLoader; -import org.elasticsearch.xpack.sql.qa.jdbc.JdbcAssert; -import org.elasticsearch.xpack.sql.qa.jdbc.SpecBaseIntegrationTestCase; -import org.elasticsearch.xpack.sql.qa.jdbc.SqlSpecTestCase; -import org.junit.ClassRule; - -import java.sql.Connection; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.List; - -import static org.elasticsearch.xpack.ql.CsvSpecReader.CsvTestCase; -import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; -import static org.elasticsearch.xpack.ql.SpecReader.Parser; -import static org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.csvConnection; -import static org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.executeCsvQuery; - -public class JdbcDocFrozenCsvSpecIT extends SpecBaseIntegrationTestCase { - @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(true); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - private final CsvTestCase testCase; - - @Override - protected String indexName() { - return "library"; - } - - @Override - protected void loadDataset(RestClient client) throws Exception { - DataLoader.loadDocsDatasetIntoEs(client, true); - } - - @ParametersFactory(shuffle = false, argumentFormatting = SqlSpecTestCase.PARAM_FORMATTING) - public static List readScriptSpec() throws Exception { - Parser parser = specParser(); - return readScriptSpec("/docs/docs-frozen.csv-spec", parser); - } - - public JdbcDocFrozenCsvSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) { - super(fileName, groupName, testName, lineNumber); - this.testCase = testCase; - } - - @Override - protected void assertResults(ResultSet expected, ResultSet elastic) throws SQLException { - Logger log = logEsResultSet() ? logger : null; - - JdbcAssert.assertResultSets(expected, elastic, log, true, true); - } - - @Override - protected final void doTest() throws Throwable { - try (Connection csv = csvConnection(testCase); Connection es = esJdbc()) { - - // pass the testName as table for debugging purposes (in case the underlying reader is missing) - ResultSet expected = executeCsvQuery(csv, testName); - ResultSet elasticResults = executeJdbcQuery(es, testCase.query); - assertResults(expected, elasticResults); - } - } -} diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcFrozenCsvSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcFrozenCsvSpecIT.java deleted file mode 100644 index 11146bfb9aa28..0000000000000 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcFrozenCsvSpecIT.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.sql.qa.single_node; - -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.xpack.sql.qa.jdbc.CsvSpecTestCase; -import org.junit.ClassRule; - -import java.util.List; -import java.util.Properties; -import java.util.concurrent.TimeUnit; - -import static org.elasticsearch.xpack.ql.CsvSpecReader.CsvTestCase; -import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; - -public class JdbcFrozenCsvSpecIT extends CsvSpecTestCase { - @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(true); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - @ParametersFactory(argumentFormatting = PARAM_FORMATTING) - public static List readScriptSpec() throws Exception { - return readScriptSpec("/slow/frozen.csv-spec", specParser()); - } - - @Override - protected Properties connectionProperties() { - Properties props = new Properties(super.connectionProperties()); - String timeout = String.valueOf(TimeUnit.MINUTES.toMillis(5)); - props.setProperty("connect.timeout", timeout); - props.setProperty("network.timeout", timeout); - props.setProperty("query.timeout", timeout); - props.setProperty("page.timeout", timeout); - - return props; - } - - public JdbcFrozenCsvSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) { - super(fileName, groupName, testName, lineNumber, testCase); - } -} diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java index 91f3ab029f55c..f7d08ba4e22dd 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java @@ -25,7 +25,7 @@ public class JdbcShardFailureIT extends JdbcIntegrationTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); private String nodeAddresses; diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShowTablesIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShowTablesIT.java index e555448d3284d..40b90e1a42c6c 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShowTablesIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShowTablesIT.java @@ -12,7 +12,7 @@ public class JdbcShowTablesIT extends ShowTablesTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcSqlSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcSqlSpecIT.java index 1c9d029063b12..b8c9a7dbb9007 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcSqlSpecIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcSqlSpecIT.java @@ -14,11 +14,11 @@ public class JdbcSqlSpecIT extends SqlSpecTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected void loadDataset(RestClient client) throws Exception { - DataLoader.loadDatasetIntoEs(client, false); + DataLoader.loadDatasetIntoEs(client); } @Override diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlDeprecationIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlDeprecationIT.java index df5d43f079de3..7074091f4f166 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlDeprecationIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlDeprecationIT.java @@ -20,7 +20,7 @@ public class RestSqlDeprecationIT extends BaseRestSqlTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java index 3cc9844e6664e..167cc212685d7 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java @@ -24,7 +24,7 @@ */ public class RestSqlIT extends RestSqlTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlPaginationIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlPaginationIT.java index 088f5af1e0aef..6ef56274cdbb0 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlPaginationIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlPaginationIT.java @@ -13,7 +13,7 @@ public class RestSqlPaginationIT extends RestSqlPaginationTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlUsageIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlUsageIT.java index f50865979bc1b..297302c534030 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlUsageIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlUsageIT.java @@ -13,7 +13,7 @@ public class RestSqlUsageIT extends RestSqlUsageTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlProtocolIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlProtocolIT.java index 5acf570b0a5da..e59a8392f7335 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlProtocolIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlProtocolIT.java @@ -13,7 +13,7 @@ public class SqlProtocolIT extends SqlProtocolTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlTestCluster.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlTestCluster.java index 2ce6452bb8d93..7907bffd3b99e 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlTestCluster.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlTestCluster.java @@ -13,7 +13,7 @@ public class SqlTestCluster { public static String CLUSTER_NAME = "javaRestTest"; - public static ElasticsearchCluster getCluster(boolean enableFreezing) { + public static ElasticsearchCluster getCluster() { var settings = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) .name(CLUSTER_NAME) @@ -22,10 +22,6 @@ public static ElasticsearchCluster getCluster(boolean enableFreezing) { .setting("xpack.security.enabled", "false") .setting("xpack.license.self_generated.type", "trial"); - if (enableFreezing) { - settings = settings.plugin("freeze-plugin"); - } - return settings.build(); } } diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SysColumnsIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SysColumnsIT.java index 1fce9bfa18b49..928916b3c40ae 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SysColumnsIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SysColumnsIT.java @@ -13,7 +13,7 @@ public class SysColumnsIT extends SysColumnsTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java index 07bf55919b44a..49397f6999c24 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java @@ -32,14 +32,14 @@ public class DataLoader { public static void main(String[] args) throws Exception { try (RestClient client = RestClient.builder(new HttpHost("localhost", 9200)).build()) { - loadEmpDatasetIntoEs(client, true); - loadDocsDatasetIntoEs(client, true); + loadEmpDatasetIntoEs(client); + loadDocsDatasetIntoEs(client); LogManager.getLogger(DataLoader.class).info("Data loaded"); } } - public static void loadDatasetIntoEs(RestClient client, boolean includeFrozenIndices) throws Exception { - loadEmpDatasetIntoEs(client, includeFrozenIndices); + public static void loadDatasetIntoEs(RestClient client) throws Exception { + loadEmpDatasetIntoEs(client); } public static void createEmptyIndex(RestClient client, String index) throws Exception { @@ -62,7 +62,7 @@ public static void createEmptyIndex(RestClient client, String index) throws Exce client.performRequest(request); } - private static void loadEmpDatasetIntoEs(RestClient client, boolean includeFrozenIndices) throws Exception { + private static void loadEmpDatasetIntoEs(RestClient client) throws Exception { loadEmpDatasetIntoEs(client, "test_emp", "employees"); loadEmpDatasetWithExtraIntoEs(client, "test_emp_copy", "employees"); loadAppsDatasetIntoEs(client, "apps", "apps"); @@ -71,10 +71,6 @@ private static void loadEmpDatasetIntoEs(RestClient client, boolean includeFroze loadLogUnsignedLongIntoEs(client, "logs_unsigned_long", "logs_unsigned_long"); makeAlias(client, "test_alias", "test_emp", "test_emp_copy"); makeAlias(client, "test_alias_emp", "test_emp", "test_emp_copy"); - if (includeFrozenIndices) { - loadEmpDatasetIntoEs(client, "frozen_emp", "employees"); - freeze(client, "frozen_emp"); - } loadNoColsDatasetIntoEs(client, "empty_mapping"); } @@ -91,14 +87,10 @@ private static void loadNoColsDatasetIntoEs(RestClient client, String index) thr client.performRequest(request); } - public static void loadDocsDatasetIntoEs(RestClient client, boolean includeFrozenIndices) throws Exception { + public static void loadDocsDatasetIntoEs(RestClient client) throws Exception { loadEmpDatasetIntoEs(client, "emp", "employees"); loadLibDatasetIntoEs(client, "library"); makeAlias(client, "employees", "emp"); - if (includeFrozenIndices) { - loadLibDatasetIntoEs(client, "archive"); - freeze(client, "archive"); - } } public static void createString(String name, XContentBuilder builder) throws Exception { diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/SpecBaseIntegrationTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/SpecBaseIntegrationTestCase.java index b45930a9f0d06..3b97938838840 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/SpecBaseIntegrationTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/SpecBaseIntegrationTestCase.java @@ -60,7 +60,7 @@ protected String indexName() { } protected void loadDataset(RestClient client) throws Exception { - DataLoader.loadDatasetIntoEs(client, true); + DataLoader.loadDatasetIntoEs(client); } @Override diff --git a/x-pack/plugin/sql/qa/server/src/main/resources/docs/docs.csv-spec b/x-pack/plugin/sql/qa/server/src/main/resources/docs/docs.csv-spec index 2fa82c05cc1aa..0bdd3fbc1b450 100644 --- a/x-pack/plugin/sql/qa/server/src/main/resources/docs/docs.csv-spec +++ b/x-pack/plugin/sql/qa/server/src/main/resources/docs/docs.csv-spec @@ -3353,7 +3353,7 @@ Alejandro Amabile Anoosh Basil -Brendon +Cristinel // end::filterToday ; diff --git a/x-pack/plugin/sql/qa/server/src/main/resources/multi-cluster-with-security/multi-cluster-command.csv-spec b/x-pack/plugin/sql/qa/server/src/main/resources/multi-cluster-with-security/multi-cluster-command.csv-spec index be615e6d7df9f..806b1fe5fd620 100644 --- a/x-pack/plugin/sql/qa/server/src/main/resources/multi-cluster-with-security/multi-cluster-command.csv-spec +++ b/x-pack/plugin/sql/qa/server/src/main/resources/multi-cluster-with-security/multi-cluster-command.csv-spec @@ -46,21 +46,6 @@ my_remote_cluster|test_emp |TABLE |INDEX my_remote_cluster|test_emp_copy |TABLE |INDEX ; -showTablesWithFrozen -SHOW TABLES CATALOG 'my_remote_cluster' INCLUDE FROZEN; - - catalog | name | type | kind ------------------+-------------------+---------------+--------------- -my_remote_cluster|apps |TABLE |INDEX -my_remote_cluster|empty_mapping |TABLE |INDEX -my_remote_cluster|frozen_emp |TABLE |INDEX -my_remote_cluster|logs |TABLE |INDEX -my_remote_cluster|logs_nanos |TABLE |INDEX -my_remote_cluster|logs_unsigned_long |TABLE |INDEX -my_remote_cluster|test_emp |TABLE |INDEX -my_remote_cluster|test_emp_copy |TABLE |INDEX -; - showTablesSimpleLike SHOW TABLES CATALOG 'my_remote_cluster' LIKE 'test_emp'; diff --git a/x-pack/plugin/sql/qa/server/src/main/resources/slow/frozen.csv-spec b/x-pack/plugin/sql/qa/server/src/main/resources/slow/frozen.csv-spec deleted file mode 100644 index d2bb5c5b4200b..0000000000000 --- a/x-pack/plugin/sql/qa/server/src/main/resources/slow/frozen.csv-spec +++ /dev/null @@ -1,65 +0,0 @@ -// To mute tests follow example in file: example.csv-spec - -// -// Frozen indices tests -// - -showTables -SHOW TABLES INCLUDE FROZEN; - - catalog | name | type | kind -javaRestTest |apps |TABLE |INDEX -javaRestTest |empty_mapping |TABLE |INDEX -javaRestTest |frozen_emp |TABLE |FROZEN INDEX -javaRestTest |logs |TABLE |INDEX -javaRestTest |logs_nanos |TABLE |INDEX -javaRestTest |logs_unsigned_long |TABLE |INDEX -javaRestTest |test_alias |VIEW |ALIAS -javaRestTest |test_alias_emp |VIEW |ALIAS -javaRestTest |test_emp |TABLE |INDEX -javaRestTest |test_emp_copy |TABLE |INDEX -; - -columnFromFrozen -SELECT gender FROM FROZEN frozen_emp ORDER BY gender LIMIT 5; - -gender:s -F -F -F -F -F -; - -percentileFrozen -SELECT gender, PERCENTILE(emp_no, 92.45) p1 FROM FROZEN frozen_emp GROUP BY gender; - -gender:s | p1:d -null |10018.745 -F |10096.336 -M |10091.393 -; - -countFromFrozen -SELECT gender, COUNT(*) AS c FROM FROZEN frozen_emp GROUP BY gender; - -gender:s | c:l -null |10 -F |33 -M |57 -; - -sum -SELECT SUM(salary) FROM FROZEN frozen_emp; - - SUM(salary):l ---------------- -4824855 -; - -kurtosisAndSkewnessNoGroup -SELECT KURTOSIS(emp_no) k, SKEWNESS(salary) s FROM FROZEN frozen_emp; - -k:d | s:d -1.7997599759975997 | 0.2707722118423227 -; diff --git a/x-pack/plugin/sql/sql-client/build.gradle b/x-pack/plugin/sql/sql-client/build.gradle index 4a20e00666ea4..f52e4b9ed96db 100644 --- a/x-pack/plugin/sql/sql-client/build.gradle +++ b/x-pack/plugin/sql/sql-client/build.gradle @@ -12,11 +12,16 @@ dependencies { testImplementation(testArtifact(project(xpackModule('core')))) } -tasks.named("compileJava").configure { +java { targetCompatibility = JavaVersion.VERSION_1_8 sourceCompatibility = JavaVersion.VERSION_1_8 } +tasks.named("compileTestJava").configure { + targetCompatibility = buildParams.getMinimumRuntimeVersion() + sourceCompatibility = buildParams.getMinimumRuntimeVersion() +} + tasks.named('forbiddenApisMain').configure { // does not depend on core, so only jdk and http signatures should be checked replaceSignatureFiles 'jdk-signatures' diff --git a/x-pack/plugin/sql/sql-proto/build.gradle b/x-pack/plugin/sql/sql-proto/build.gradle index 2cb1cfa89f033..0a34afdbc2504 100644 --- a/x-pack/plugin/sql/sql-proto/build.gradle +++ b/x-pack/plugin/sql/sql-proto/build.gradle @@ -16,11 +16,16 @@ dependencies { } } -tasks.named("compileJava").configure { +java { targetCompatibility = JavaVersion.VERSION_1_8 sourceCompatibility = JavaVersion.VERSION_1_8 } +tasks.named("compileTestJava").configure { + targetCompatibility = buildParams.getMinimumRuntimeVersion() + sourceCompatibility = buildParams.getMinimumRuntimeVersion() +} + tasks.named('forbiddenApisMain').configure { //sql does not depend on server, so only jdk signatures should be checked replaceSignatureFiles 'jdk-signatures' diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/30_synthetic_source.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/30_synthetic_source.yml index df85362df5fa8..465a6f27f9efa 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/30_synthetic_source.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/30_synthetic_source.yml @@ -15,51 +15,26 @@ events: type: counted_keyword - - - do: - index: - index: test-events - id: "1" - body: { "events": [ "a", "b", "a", "c" ] } - - - do: - index: - index: test-events - id: "2" - body: { "events": ["b", "b", "c", "a", "b"] } - - - do: - index: - index: test-events - id: "3" - body: { "events": ["c", "a", null, "b", null, "c"]} - - do: - index: + bulk: index: test-events - id: "4" - body: { "events": ["a"]} - - - do: - index: - index: test-events - id: "5" - body: { "events": []} - - - do: - index: - index: test-events - id: "6" - body: { "events": [null, null]} - - - do: - index: - index: test-events - id: "7" - body: { "events": [["a", "b"], "a", ["c"], [["b"], "c"]]} - - - do: - indices.refresh: { } + refresh: true + body: + - '{"create":{"_id": "1"}}' + - '{"events": [ "a", "b", "a", "c" ] }' + - '{"create":{"_id": "2"}}' + - '{"events": ["b", "b", "c", "a", "b"] }' + - '{"create":{"_id": "3"}}' + - '{"events": ["c", "a", null, "b", null, "c"] }' + - '{"create":{"_id": "4"}}' + - '{"events": ["a"] }' + - '{"create":{"_id": "5"}}' + - '{"events": [] }' + - '{"create":{"_id": "6"}}' + - '{"events": [null, null] }' + - '{"create":{"_id": "7"}}' + - '{"events": [["a", "b"], "a", ["c"], [["b"], "c"]] }' + - '{"create": {"_id": "8"}}' - do: search: @@ -157,49 +132,24 @@ synthetic_source_keep: all - do: - index: - index: test-events - id: "1" - body: { "events": [ "a", "b", "a", "c" ] } - - - do: - index: - index: test-events - id: "2" - body: { "events": [ "b", "b", "c", "a", "b" ] } - - - do: - index: - index: test-events - id: "3" - body: { "events": [ "c", "a", null, "b", null, "c" ] } - - - do: - index: - index: test-events - id: "4" - body: { "events": [ "a" ] } - - - do: - index: - index: test-events - id: "5" - body: { "events": [ ] } - - - do: - index: - index: test-events - id: "6" - body: { "events": [ null, null ] } - - - do: - index: + bulk: index: test-events - id: "7" - body: { "events": [["a", "b"], "a", ["c"], [["b"], "c"]]} - - - do: - indices.refresh: { } + refresh: true + body: + - '{"create":{"_id": "1"}}' + - '{"events": [ "a", "b", "a", "c" ] }' + - '{"create":{"_id": "2"}}' + - '{"events": ["b", "b", "c", "a", "b"] }' + - '{"create":{"_id": "3"}}' + - '{"events": ["c", "a", null, "b", null, "c"] }' + - '{"create":{"_id": "4"}}' + - '{"events": ["a"] }' + - '{"create":{"_id": "5"}}' + - '{"events": [] }' + - '{"create":{"_id": "6"}}' + - '{"events": [null, null] }' + - '{"create":{"_id": "7"}}' + - '{"events": [["a", "b"], "a", ["c"], [["b"], "c"]] }' - do: search: @@ -303,50 +253,26 @@ properties: events: type: counted_keyword - - do: - index: - index: test-events - id: "1" - body: { "event-object": { "event-object-2": { "events": [ "a", "b", "a", "c" ] } } } - - - do: - index: - index: test-events - id: "2" - body: { "event-object": { "event-object-2": { "events": [ "b", "b", "c", "a", "b" ] } } } - - - do: - index: - index: test-events - id: "3" - body: { "event-object": { "event-object-2": { "events": [ "c", "a", null, "b", null, "c" ] } } } - - - do: - index: - index: test-events - id: "4" - body: { "event-object": { "event-object-2": { "events": [ "a" ] } } } - - - do: - index: - index: test-events - id: "5" - body: { "event-object": { "event-object-2": { "events": [ ] } } } - - - do: - index: - index: test-events - id: "6" - body: { "event-object": { "event-object-2": { "events": [ null, null ] } } } - do: - index: + bulk: index: test-events - id: "7" - body: { "event-object": { "event-object-2": { "events": [["a", "b"], "a", ["c"], [["b"], "c"]] } } } - - - do: - indices.refresh: { } + refresh: true + body: + - '{"create":{"_id": "1"}}' + - '{ "event-object": { "event-object-2": { "events": [ "a", "b", "a", "c" ] } } }' + - '{"create":{"_id": "2"}}' + - '{ "event-object": { "event-object-2": { "events": [ "b", "b", "c", "a", "b" ] } } }' + - '{"create":{"_id": "3"}}' + - '{ "event-object": { "event-object-2": { "events": [ "c", "a", null, "b", null, "c" ] } } }' + - '{"create":{"_id": "4"}}' + - '{ "event-object": { "event-object-2": { "events": [ "a" ] } } }' + - '{"create":{"_id": "5"}}' + - '{ "event-object": { "event-object-2": { "events": [ ] } } }' + - '{"create":{"_id": "6"}}' + - '{ "event-object": { "event-object-2": { "events": [ null, null ] } } }' + - '{"create":{"_id": "7"}}' + - '{ "event-object": { "event-object-2": { "events": [["a", "b"], "a", ["c"], [["b"], "c"]] } } }' - do: search: @@ -452,51 +378,144 @@ properties: events: type: counted_keyword + - do: + bulk: + index: test-events + refresh: true + body: + - '{"create":{"_id": "1"}}' + - '{"events": [ "a", "b", "a", "c" ] }' + - '{"create":{"_id": "2"}}' + - '{"events": ["b", "b", "c", "a", "b"] }' + - '{"create":{"_id": "3"}}' + - '{"events": ["c", "a", null, "b", null, "c"] }' + - '{"create":{"_id": "4"}}' + - '{"events": ["a"] }' + - '{"create":{"_id": "5"}}' + - '{"events": [] }' + - '{"create":{"_id": "6"}}' + - '{"events": [null, null] }' + - '{"create":{"_id": "7"}}' + - '{"events": [["a", "b"], "a", ["c"], [["b"], "c"]] }' - do: - index: + search: index: test-events - id: "1" - body: { "events": [ "a", "b", "a", "c" ] } + body: + query: + ids: + values: [ 1 ] + - match: + hits.hits.0._source: + events: [ "a", "b", "a", "c" ] - do: - index: + search: index: test-events - id: "2" - body: { "events": [ "b", "b", "c", "a", "b" ] } + body: + query: + ids: + values: [ 2 ] + - match: + hits.hits.0._source: + events: [ "b", "b", "c", "a", "b" ] - do: - index: + search: index: test-events - id: "3" - body: { "events": [ "c", "a", null, "b", null, "c" ] } + body: + query: + ids: + values: [ 3 ] + - match: + hits.hits.0._source: + events: [ "c", "a", null, "b", null, "c" ] - do: - index: + search: index: test-events - id: "4" - body: { "events": [ "a" ] } + body: + query: + ids: + values: [ 4 ] + - match: + hits.hits.0._source: + events: [ "a" ] - do: - index: + search: index: test-events - id: "5" - body: { "events": [ ] } + body: + query: + ids: + values: [ 5 ] + - match: + hits.hits.0._source: + events: [ ] - do: - index: + search: index: test-events - id: "6" - body: { "events": [ null, null ] } + body: + query: + ids: + values: [ 6 ] + - match: + hits.hits.0._source: + events: [ null, null ] - do: - index: + search: index: test-events - id: "7" - body: { "events": [ [ "a", "b" ], "a", [ "c" ], [ [ "b" ], "c" ] ] } + body: + query: + ids: + values: [ 7 ] + - match: + hits.hits.0._source: + events: [["a", "b"], "a", ["c"], [["b"], "c"]] + +--- + +"synthetic source arrays moved to leaf fields": + - requires: + cluster_features: ["mapper.counted_keyword.synthetic_source_native_support"] + reason: "Feature implemented" - do: - indices.refresh: { } + indices.create: + index: test-events + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + event-object: + type: object + properties: + events: + type: counted_keyword + + - do: + bulk: + index: test-events + refresh: true + body: + - '{"create":{"_id": "1"}}' + - '{ "event-object": [ { "events": [ "a", "b"] }, { "events": [ "a", "c" ] } ] }' + - '{"create":{"_id": "2"}}' + - '{ "event-object": [ { "events": [ "b", "b"] }, { "events": "c" }, { "events": [ "a", "b" ] } ] }' + - '{"create":{"_id": "3"}}' + - '{ "event-object": [ { "events": [ "c", "a", null ] }, { "events": [ "b", null, "c" ] } ] }' + - '{"create":{"_id": "4"}}' + - '{ "event-object": [ { "events": [ "a" ] }, { "events": [] }, { "events": [ null ] } ] }' + - '{"create":{"_id": "5"}}' + - '{ "event-object": [] }' + - '{"create":{"_id": "6"}}' + - '{ "event-object": [ { "events": [ null ] }, { "events": null } ] }' + - '{"create":{"_id": "7"}}' + - '{ "event-object": [ { "events": [["a", "b"], "a"]}, { "events": [["c"], [["b"], "c"]] } ] }' - do: search: @@ -507,7 +526,8 @@ values: [ 1 ] - match: hits.hits.0._source: - events: [ "a", "b", "a", "c" ] + event-object: + events: [ "a", "a", "b", "c" ] - do: search: @@ -518,7 +538,8 @@ values: [ 2 ] - match: hits.hits.0._source: - events: [ "b", "b", "c", "a", "b" ] + event-object: + events: [ "a", "b", "b", "b", "c" ] - do: search: @@ -529,7 +550,8 @@ values: [ 3 ] - match: hits.hits.0._source: - events: [ "c", "a", null, "b", null, "c" ] + event-object: + events: [ "a", "b", "c", "c" ] - do: search: @@ -540,7 +562,8 @@ values: [ 4 ] - match: hits.hits.0._source: - events: [ "a" ] + event-object: + events: "a" - do: search: @@ -550,8 +573,7 @@ ids: values: [ 5 ] - match: - hits.hits.0._source: - events: [ ] + hits.hits.0._source: {} - do: search: @@ -561,8 +583,7 @@ ids: values: [ 6 ] - match: - hits.hits.0._source: - events: [ null, null ] + hits.hits.0._source: {} - do: search: @@ -573,4 +594,5 @@ values: [ 7 ] - match: hits.hits.0._source: - events: [["a", "b"], "a", ["c"], [["b"], "c"]] + event-object: + events: [ "a", "a", "b", "b", "c", "c" ] diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/40_multiple_subobjects.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/40_multiple_subobjects.yml new file mode 100644 index 0000000000000..7e48a4ed97ca4 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/40_multiple_subobjects.yml @@ -0,0 +1,33 @@ +"multiple subobjects": + - requires: + cluster_features: ["gte_v8.12.0"] + reason: "counted_keyword was added in 8.12" + + - do: + indices.create: + index: test-events + body: + mappings: + properties: + parent: + type: object + properties: + child: + type: counted_keyword + + - do: + index: + index: test-events + id: "1" + refresh: true + body: '{"parent": [{"child": "foo"}, {"child": "bar"}]}' + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 1 ] + - match: + hits.hits.0._source: {"parent": [{"child": "foo"}, {"child": "bar"}]} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_streams/10_data_stream_resolvability.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_streams/10_data_stream_resolvability.yml index f8c3a4d660fee..289b7ee3f93c6 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_streams/10_data_stream_resolvability.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_streams/10_data_stream_resolvability.yml @@ -148,6 +148,8 @@ - do: # Should not raise error + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_index_caps: index: "logs*" - do: @@ -473,6 +475,8 @@ index: simple-data-stream1 - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "simple-data-stream1" body: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml index f72cdd65b275c..266901474c2f9 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml @@ -1,12 +1,12 @@ --- setup: - requires: - test_runner_features: [capabilities, contains] + test_runner_features: [capabilities, contains, allowed_warnings] capabilities: - method: POST path: /_query parameters: [] - capabilities: [join_lookup_v12] + capabilities: [join_lookup_v12, join_lookup_skip_mv_warnings] reason: "uses LOOKUP JOIN" - do: indices.create: @@ -18,6 +18,16 @@ setup: type: long color: type: keyword + - do: + indices.create: + index: test-mv + body: + mappings: + properties: + key: + type: long + color: + type: keyword - do: indices.create: index: test-lookup-1 @@ -44,6 +54,19 @@ setup: type: long color: type: keyword + - do: + indices.create: + index: test-lookup-mv + body: + settings: + index: + mode: lookup + mappings: + properties: + key: + type: long + color: + type: keyword - do: indices.update_aliases: body: @@ -75,6 +98,28 @@ setup: - { "key": 1, "color": "cyan" } - { "index": { } } - { "key": 2, "color": "yellow" } + - do: + bulk: + index: "test-mv" + refresh: true + body: + - { "index": { } } + - { "key": 1, "color": "red" } + - { "index": { } } + - { "key": 2, "color": "blue" } + - { "index": { } } + - { "key": [0, 1, 2], "color": null } + - do: + bulk: + index: "test-lookup-mv" + refresh: true + body: + - { "index": { } } + - { "key": 1, "color": "cyan" } + - { "index": { } } + - { "key": 2, "color": "yellow" } + - { "index": { } } + - { "key": [0, 1, 2], "color": "green" } --- basic: @@ -200,3 +245,39 @@ pattern-single: - match: { error.type: "parsing_exception" } - contains: { error.reason: "line 1:36: invalid index pattern [test-lookup-1*], * is not allowed in LOOKUP JOIN" } + +--- +mv-on-lookup: + - do: + esql.query: + body: + query: 'FROM test | SORT key | LOOKUP JOIN test-lookup-mv ON key' + allowed_warnings: + - "No limit defined, adding default limit of [1000]" + - "Line 1:24: evaluation of [LOOKUP JOIN test-lookup-mv ON key] failed, treating result as null. Only first 20 failures recorded." + - "Line 1:24: java.lang.IllegalArgumentException: LOOKUP JOIN encountered multi-value" + + - match: {columns.0.name: "key"} + - match: {columns.0.type: "long"} + - match: {columns.1.name: "color"} + - match: {columns.1.type: "keyword"} + - match: {values.0: [1, "cyan"]} + - match: {values.1: [2, "yellow"]} + +--- +mv-on-query: + - do: + esql.query: + body: + query: 'FROM test-mv | SORT key | LOOKUP JOIN test-lookup-1 ON key | LIMIT 4' + allowed_warnings: + - "Line 1:27: evaluation of [LOOKUP JOIN test-lookup-1 ON key] failed, treating result as null. Only first 20 failures recorded." + - "Line 1:27: java.lang.IllegalArgumentException: LOOKUP JOIN encountered multi-value" + + - match: {columns.0.name: "key"} + - match: {columns.0.type: "long"} + - match: {columns.1.name: "color"} + - match: {columns.1.type: "keyword"} + - match: {values.0: [[0, 1, 2], null]} + - match: {values.1: [1, "cyan"]} + - match: {values.2: [2, "yellow"]} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml index b9415bce62ea9..512ea155144b2 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml @@ -80,8 +80,7 @@ setup: time_series_dimension: true agg_metric: type: aggregate_metric_double - metrics: - - max + metrics: [ min, max, sum, value_count ] default_metric: max k8s: properties: @@ -99,9 +98,83 @@ setup: index: test2 body: - '{"index": {}}' - - '{"@timestamp": "2021-04-28T18:50:04.467Z", "dim": "A", "agg_metric": {"max": 10}}' + - '{"@timestamp": "2021-04-28T18:50:04.467Z", "dim": "A", "agg_metric": {"max": 10, "min": -1, "sum": 20, "value_count": 5}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T18:50:24.467Z", "dim": "B", "agg_metric": {"max": 20}}' + - '{"@timestamp": "2021-04-28T18:50:24.467Z", "dim": "B", "agg_metric": {"max": 20, "min": 3, "sum": 50, "value_count": 7}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:44.467Z", "dim": "B", "agg_metric": {"max": 17, "min": -5, "sum": 33, "value_count": 9}}' + + - do: + indices.create: + index: test3 + body: + settings: + index: + mode: time_series + routing_path: [ k8s.pod.uid ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + "@timestamp": + type: date + agg_metric: + type: aggregate_metric_double + metrics: [ min, max ] + default_metric: min + k8s: + properties: + pod: + properties: + uid: + type: keyword + time_series_dimension: true + - do: + bulk: + refresh: true + index: test3 + body: + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:50:04.467Z", "agg_metric": {"max": 1, "min": -3}, "k8s": {"pod": {"uid":"947e4ced-1786-4e53-9e0c-5c447e959507"}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:50:24.467Z", "agg_metric": {"max": 10, "min": 3}, "k8s": {"pod": {"uid":"947e4ced-1786-4e53-9e0c-5c447e959507"}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:50:44.467Z", "agg_metric": {"max": 17, "min": 2}, "k8s": {"pod": {"uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9"}}}' + + - do: + indices.create: + index: test4 + body: + settings: + index: + mode: time_series + routing_path: [ k8s.pod.uid ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + "@timestamp": + type: date + agg_metric: + type: aggregate_metric_double + metrics: [ sum, value_count ] + default_metric: sum + k8s: + properties: + pod: + properties: + uid: + type: keyword + time_series_dimension: true + - do: + bulk: + refresh: true + index: test4 + body: + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T23:50:04.467Z", "agg_metric": {"sum": 1, "value_count": 10}, "k8s": {"pod": {"uid":"947e4ced-1786-4e53-9e0c-5c447e959507"}}}' --- load everything: @@ -201,6 +274,14 @@ cast then sort on counter: --- from doc with aggregate_metric_double: + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [aggregate_metric_double] + reason: "Support for aggregate_metric_double" - do: allowed_warnings_regex: - "No limit defined, adding default limit of \\[.*\\]" @@ -211,25 +292,169 @@ from doc with aggregate_metric_double: - match: {columns.0.name: "@timestamp"} - match: {columns.0.type: "date"} - match: {columns.1.name: "agg_metric"} - - match: {columns.1.type: "unsupported"} + - match: {columns.1.type: "aggregate_metric_double"} - match: {columns.2.name: "dim"} - match: {columns.2.type: "keyword"} - match: {columns.3.name: "k8s.pod.ip"} - match: {columns.3.type: "ip"} - match: {columns.4.name: "k8s.pod.network.tx"} - match: {columns.4.type: "long"} - - length: {values: 2} + - length: {values: 3} --- stats on aggregate_metric_double: + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [aggregate_metric_double] + reason: "Support for aggregate_metric_double" + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test2 | STATS max(agg_metric), min(agg_metric), sum(agg_metric), count(agg_metric)' + - length: {values: 1} + - length: {values.0: 4} + - match: {columns.0.name: "max(agg_metric)"} + - match: {columns.0.type: "double"} + - match: {columns.1.name: "min(agg_metric)"} + - match: {columns.1.type: "double"} + - match: {columns.2.name: "sum(agg_metric)"} + - match: {columns.2.type: "double"} + - match: {columns.3.name: "count(agg_metric)"} + - match: {columns.3.type: "long"} + - match: {values.0.0: 20.0} + - match: {values.0.1: -5.0} + - match: {values.0.2: 103.0} + - match: {values.0.3: 21.0} + +--- +grouping stats on aggregate_metric_double: + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [aggregate_metric_double] + reason: "Support for aggregate_metric_double" - do: - catch: /Cannot use field \[agg_metric\] with unsupported type \[aggregate_metric_double\]/ + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'FROM test2 | STATS max(agg_metric) BY dim' + query: "FROM test2 + | STATS max(agg_metric), min(agg_metric), sum(agg_metric), count(agg_metric) BY dim + | SORT dim" + - length: {values: 2} + - length: {values.0: 5} + - match: {columns.0.name: "max(agg_metric)"} + - match: {columns.0.type: "double"} + - match: {columns.1.name: "min(agg_metric)"} + - match: {columns.1.type: "double"} + - match: {columns.2.name: "sum(agg_metric)"} + - match: {columns.2.type: "double"} + - match: {columns.3.name: "count(agg_metric)"} + - match: {columns.3.type: "long"} + - match: {columns.4.name: "dim"} + - match: {columns.4.type: "keyword"} + - match: {values.0.0: 10.0} + - match: {values.0.1: -1.0} + - match: {values.0.2: 20.0} + - match: {values.0.3: 5.0} + - match: {values.0.4: "A"} + - match: {values.1.0: 20.0} + - match: {values.1.1: -5.0} + - match: {values.1.2: 83.0} + - match: {values.1.3: 16.0} + - match: {values.1.4: "B"} + +--- +stats on aggregate_metric_double with partial submetrics: + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [aggregate_metric_double_partial_submetrics] + reason: "Support for partial submetrics in aggregate_metric_double" + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test3 | STATS max(agg_metric), min(agg_metric), sum(agg_metric), count(agg_metric) BY k8s.pod.uid | SORT k8s.pod.uid' + + - length: {values: 2} + - length: {values.0: 5} + - match: {columns.0.name: "max(agg_metric)"} + - match: {columns.0.type: "double"} + - match: {columns.1.name: "min(agg_metric)"} + - match: {columns.1.type: "double"} + - match: {columns.2.name: "sum(agg_metric)"} + - match: {columns.2.type: "double"} + - match: {columns.3.name: "count(agg_metric)"} + - match: {columns.3.type: "long"} + - match: {columns.4.name: "k8s.pod.uid"} + - match: {columns.4.type: "keyword"} + - match: {values.0.0: 10.0} + - match: {values.0.1: -3.0} + - match: {values.0.2: null} + - match: {values.0.3: null} + - match: {values.0.4: "947e4ced-1786-4e53-9e0c-5c447e959507"} + - match: {values.1.0: 17.0} + - match: {values.1.1: 2.0} + - match: {values.1.2: null} + - match: {values.1.3: null} + - match: {values.1.4: "df3145b3-0563-4d3b-a0f7-897eb2876ea9"} + +--- +stats on aggregate_metric_double missing min and max: + - requires: + test_runner_features: [ capabilities ] + capabilities: + - method: POST + path: /_query + parameters: [ ] + capabilities: [ aggregate_metric_double_partial_submetrics ] + reason: "Support for partial submetrics in aggregate_metric_double" + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test4 | STATS max(agg_metric), min(agg_metric), sum(agg_metric), count(agg_metric)' + + - length: {values: 1} + - length: {values.0: 4} + - match: {columns.0.name: "max(agg_metric)"} + - match: {columns.0.type: "double"} + - match: {columns.1.name: "min(agg_metric)"} + - match: {columns.1.type: "double"} + - match: {columns.2.name: "sum(agg_metric)"} + - match: {columns.2.type: "double"} + - match: {columns.3.name: "count(agg_metric)"} + - match: {columns.3.type: "long"} + - match: {values.0.0: null} + - match: {values.0.1: null} + - match: {values.0.2: 1.0} + - match: {values.0.3: 10} --- from index pattern unsupported counter: + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [aggregate_metric_double_partial_submetrics] + reason: "Support for partial submetrics in aggregate_metric_double" - do: allowed_warnings_regex: - "No limit defined, adding default limit of \\[.*\\]" @@ -240,7 +465,7 @@ from index pattern unsupported counter: - match: {columns.0.name: "@timestamp"} - match: {columns.0.type: "date"} - match: {columns.1.name: "agg_metric"} - - match: {columns.1.type: "unsupported"} + - match: {columns.1.type: "aggregate_metric_double"} - match: {columns.2.name: "dim"} - match: {columns.2.type: "keyword"} - match: {columns.3.name: "k8s.pod.ip"} @@ -255,7 +480,7 @@ from index pattern unsupported counter: - match: {columns.7.type: "keyword"} - match: {columns.8.name: "metricset"} - match: {columns.8.type: "keyword"} - - length: {values: 10} + - length: {values: 15} --- from index pattern explicit counter use: @@ -276,7 +501,7 @@ from index pattern explicit counter use: query: 'FROM test* | keep *.tx' - match: {columns.0.name: "k8s.pod.network.tx"} - match: {columns.0.type: "unsupported"} - - length: {values: 10} + - length: {values: 15} --- _source: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml index e100f30717aef..8e5a6e6d231d6 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml @@ -13,7 +13,7 @@ setup: properties: aggregate_metric_double: type: aggregate_metric_double - metrics: [ min, max ] + metrics: [ min, max, sum, value_count ] default_metric: max binary: type: binary @@ -81,7 +81,7 @@ setup: body: - { "index": { } } - { - "aggregate_metric_double": { "min": 1.0, "max": 3.0 }, + "aggregate_metric_double": { "min": 1.0, "max": 3.0, "sum": 10.1, "value_count": 5 }, "binary": "U29tZSBiaW5hcnkgYmxvYg==", "completion": "foo bar", "date_nanos": "2015-01-01T12:10:30.123456789Z", @@ -119,8 +119,8 @@ unsupported: - method: POST path: /_query parameters: [ ] - capabilities: [ date_nanos_type ] - reason: "support for date nanos type" + capabilities: [ aggregate_metric_double ] + reason: "support for aggregate_metric_double type" - do: allowed_warnings_regex: @@ -131,7 +131,7 @@ unsupported: query: 'from test' - match: { columns.0.name: aggregate_metric_double } - - match: { columns.0.type: unsupported } + - match: { columns.0.type: aggregate_metric_double } - match: { columns.1.name: binary } - match: { columns.1.type: unsupported } - match: { columns.2.name: completion } @@ -227,7 +227,7 @@ unsupported: body: query: 'from test | limit 0' - match: { columns.0.name: aggregate_metric_double } - - match: { columns.0.type: unsupported } + - match: { columns.0.type: aggregate_metric_double } - match: { columns.1.name: binary } - match: { columns.1.type: unsupported } - match: { columns.2.name: completion } @@ -308,8 +308,8 @@ unsupported with sort: - method: POST path: /_query parameters: [ ] - capabilities: [ date_nanos_type ] - reason: "support for date nanos type" + capabilities: [ aggregate_metric_double ] + reason: "support for aggregate_metric_double type" - do: allowed_warnings_regex: @@ -317,97 +317,94 @@ unsupported with sort: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'from test | sort some_doc.bar' + query: 'from test | drop aggregate_metric_double | sort some_doc.bar' - - match: { columns.0.name: aggregate_metric_double } + - match: { columns.0.name: binary } - match: { columns.0.type: unsupported } - - match: { columns.1.name: binary } + - match: { columns.1.name: completion } - match: { columns.1.type: unsupported } - - match: { columns.2.name: completion } - - match: { columns.2.type: unsupported } - - match: { columns.3.name: date_nanos } - - match: { columns.3.type: date_nanos } - - match: { columns.4.name: date_range } + - match: { columns.2.name: date_nanos } + - match: { columns.2.type: date_nanos } + - match: { columns.3.name: date_range } + - match: { columns.3.type: unsupported } + - match: { columns.4.name: dense_vector } - match: { columns.4.type: unsupported } - - match: { columns.5.name: dense_vector } + - match: { columns.5.name: double_range } - match: { columns.5.type: unsupported } - - match: { columns.6.name: double_range } + - match: { columns.6.name: float_range } - match: { columns.6.type: unsupported } - - match: { columns.7.name: float_range } - - match: { columns.7.type: unsupported } - - match: { columns.8.name: geo_point } + - match: { columns.7.name: geo_point } + - match: { columns.7.type: geo_point } + - match: { columns.8.name: geo_point_alias } - match: { columns.8.type: geo_point } - - match: { columns.9.name: geo_point_alias } - - match: { columns.9.type: geo_point } - - match: { columns.10.name: geo_shape } - - match: { columns.10.type: geo_shape } - - match: { columns.11.name: histogram } + - match: { columns.9.name: geo_shape } + - match: { columns.9.type: geo_shape } + - match: { columns.10.name: histogram } + - match: { columns.10.type: unsupported } + - match: { columns.11.name: integer_range } - match: { columns.11.type: unsupported } - - match: { columns.12.name: integer_range } + - match: { columns.12.name: ip_range } - match: { columns.12.type: unsupported } - - match: { columns.13.name: ip_range } + - match: { columns.13.name: long_range } - match: { columns.13.type: unsupported } - - match: { columns.14.name: long_range } - - match: { columns.14.type: unsupported } - - match: { columns.15.name: match_only_text } - - match: { columns.15.type: text } - - match: { columns.16.name: name } - - match: { columns.16.type: keyword } - - match: { columns.17.name: point } - - match: { columns.17.type: cartesian_point } - - match: { columns.18.name: rank_feature } + - match: { columns.14.name: match_only_text } + - match: { columns.14.type: text } + - match: { columns.15.name: name } + - match: { columns.15.type: keyword } + - match: { columns.16.name: point } + - match: { columns.16.type: cartesian_point } + - match: { columns.17.name: rank_feature } + - match: { columns.17.type: unsupported } + - match: { columns.18.name: rank_features } - match: { columns.18.type: unsupported } - - match: { columns.19.name: rank_features } + - match: { columns.19.name: search_as_you_type } - match: { columns.19.type: unsupported } - - match: { columns.20.name: search_as_you_type } + - match: { columns.20.name: search_as_you_type._2gram } - match: { columns.20.type: unsupported } - - match: { columns.21.name: search_as_you_type._2gram } + - match: { columns.21.name: search_as_you_type._3gram } - match: { columns.21.type: unsupported } - - match: { columns.22.name: search_as_you_type._3gram } + - match: { columns.22.name: search_as_you_type._index_prefix } - match: { columns.22.type: unsupported } - - match: { columns.23.name: search_as_you_type._index_prefix } - - match: { columns.23.type: unsupported } - - match: { columns.24.name: shape } - - match: { columns.24.type: cartesian_shape } - - match: { columns.25.name: some_doc.bar } - - match: { columns.25.type: long } - - match: { columns.26.name: some_doc.foo } - - match: { columns.26.type: keyword } - - match: { columns.27.name: text } - - match: { columns.27.type: text } - - match: { columns.28.name: token_count } - - match: { columns.28.type: integer } + - match: { columns.23.name: shape } + - match: { columns.23.type: cartesian_shape } + - match: { columns.24.name: some_doc.bar } + - match: { columns.24.type: long } + - match: { columns.25.name: some_doc.foo } + - match: { columns.25.type: keyword } + - match: { columns.26.name: text } + - match: { columns.26.type: text } + - match: { columns.27.name: token_count } + - match: { columns.27.type: integer } - length: { values: 1 } - match: { values.0.0: null } - match: { values.0.1: null } - - match: { values.0.2: null } - - match: { values.0.3: "2015-01-01T12:10:30.123456789Z" } + - match: { values.0.2: "2015-01-01T12:10:30.123456789Z" } + - match: { values.0.3: null } - match: { values.0.4: null } - match: { values.0.5: null } - match: { values.0.6: null } - - match: { values.0.7: null } + - match: { values.0.7: "POINT (10.0 12.0)" } - match: { values.0.8: "POINT (10.0 12.0)" } - - match: { values.0.9: "POINT (10.0 12.0)" } - - match: { values.0.10: "LINESTRING (-97.154 25.996, -97.159 25.998, -97.181 25.991, -97.187 25.985)" } + - match: { values.0.9: "LINESTRING (-97.154 25.996, -97.159 25.998, -97.181 25.991, -97.187 25.985)" } + - match: { values.0.10: null } - match: { values.0.11: null } - match: { values.0.12: null } - match: { values.0.13: null } - - match: { values.0.14: null } - - match: { values.0.15: "foo bar baz" } - - match: { values.0.16: Alice } - - match: { values.0.17: "POINT (-97.15447 25.9961525)" } + - match: { values.0.14: "foo bar baz" } + - match: { values.0.15: Alice } + - match: { values.0.16: "POINT (-97.15447 25.9961525)" } + - match: { values.0.17: null } - match: { values.0.18: null } - match: { values.0.19: null } - match: { values.0.20: null } - match: { values.0.21: null } - match: { values.0.22: null } - - match: { values.0.23: null } - - match: { values.0.24: "LINESTRING (-377.03653 389.897676, -377.009051 389.889939)" } - - match: { values.0.25: 12 } - - match: { values.0.26: xy } - - match: { values.0.27: "foo bar" } - - match: { values.0.28: 3 } + - match: { values.0.23: "LINESTRING (-377.03653 389.897676, -377.009051 389.889939)" } + - match: { values.0.24: 12 } + - match: { values.0.25: xy } + - match: { values.0.26: "foo bar" } + - match: { values.0.27: 3 } --- nested declared inline: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/46_downsample.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/46_downsample.yml new file mode 100644 index 0000000000000..5a0b8b281e88f --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/46_downsample.yml @@ -0,0 +1,119 @@ +setup: + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + index: + mode: time_series + routing_path: [ metricset, k8s.pod.uid ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + "@timestamp": + type: date + metricset: + type: keyword + time_series_dimension: true + k8s: + properties: + pod: + properties: + uid: + type: keyword + time_series_dimension: true + name: + type: keyword + created_at: + type: date_nanos + running: + type: boolean + number_of_containers: + type: integer + ip: + type: ip + tags: + type: keyword + values: + type: integer + network: + properties: + tx: + type: long + time_series_metric: gauge + rx: + type: long + time_series_metric: gauge + - do: + bulk: + refresh: true + index: test + body: + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001810, "rx": 802133}, "created_at": "2021-04-28T19:34:00.000Z", "running": false, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 6]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:24.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.26", "network": {"tx": 2005177, "rx": 801479}, "created_at": "2021-04-28T19:35:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west1"], "values": [1, 1, 3]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T20:50:44.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.41", "network": {"tx": 2006223, "rx": 802337}, "created_at": "2021-04-28T19:36:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west2"], "values": [4, 1, 2]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T20:51:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.22", "network": {"tx": 2012916, "rx": 803685}, "created_at": "2021-04-28T19:37:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 1]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.33", "network": {"tx": 1434521, "rx": 530575}, "created_at": "2021-04-28T19:42:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test"], "values": [2, 3, 4]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:23.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.56", "network": {"tx": 1434577, "rx": 530600}, "created_at": "2021-04-28T19:43:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test", "us-west2"], "values": [2, 1, 1]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:50:53.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.37", "network": {"tx": 1434587, "rx": 530604}, "created_at": "2021-04-28T19:44:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [4, 5, 2]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.120", "network": {"tx": 1434595, "rx": 530605}, "created_at": "2021-04-28T19:45:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [3, 2, 1]}}}' + + - do: + indices.put_settings: + index: test + body: + index.blocks.write: true + +--- +"Query stats on downsampled index": + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [aggregate_metric_double] + reason: "Support for aggregate_metric_double" + - do: + indices.downsample: + index: test + target_index: test-downsample + body: > + { + "fixed_interval": "1h" + } + - is_true: acknowledged + + - do: + esql.query: + body: + query: "FROM test-downsample | + STATS max(k8s.pod.network.rx), min(k8s.pod.network.rx), sum(k8s.pod.network.rx), count(k8s.pod.network.rx) + | LIMIT 100" + + - length: {values: 1} + - length: {values.0: 4} + - match: {columns.0.name: "max(k8s.pod.network.rx)"} + - match: {columns.0.type: "double"} + - match: {columns.1.name: "min(k8s.pod.network.rx)"} + - match: {columns.1.type: "double"} + - match: {columns.2.name: "sum(k8s.pod.network.rx)"} + - match: {columns.2.type: "double"} + - match: {columns.3.name: "count(k8s.pod.network.rx)"} + - match: {columns.3.type: "long"} + - match: {values.0.0: 803685.0} + - match: {values.0.1: 530575.0} + - match: {values.0.2: 5332018.0} + - match: {values.0.3: 8} + diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/job_cat_apis.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/job_cat_apis.yml index 25a127f077c24..a95f59e81e58a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/job_cat_apis.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/job_cat_apis.yml @@ -87,6 +87,8 @@ setup: {"airline":"JZA","responsetime":"244.1276","time":"1403485200"} - do: + warnings: + - 'Forcing any buffered data to be processed is deprecated, in a future major version it will be compulsory to use a datafeed' ml.flush_job: job_id: job-stats-test - match: { flushed: true } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/jobs_get_stats.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/jobs_get_stats.yml index 6aab1fb9e894a..ab4c7311d8302 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/jobs_get_stats.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/jobs_get_stats.yml @@ -87,6 +87,8 @@ setup: {"airline":"JZA","responsetime":"244.1276","time":"1403485200"} - do: + warnings: + - 'Forcing any buffered data to be processed is deprecated, in a future major version it will be compulsory to use a datafeed' ml.flush_job: job_id: job-stats-test - match: { flushed: true } @@ -131,6 +133,8 @@ setup: {"airline":"JZA","responsetime":"244.1276","time":"1403485200"} - do: + warnings: + - 'Forcing any buffered data to be processed is deprecated, in a future major version it will be compulsory to use a datafeed' ml.flush_job: job_id: job-stats-test - match: { flushed: true } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/post_data.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/post_data.yml index 9577f80f4592e..7cf56d757c09c 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/post_data.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/post_data.yml @@ -77,6 +77,8 @@ setup: - match: { latest_record_timestamp: 1403481700000} - do: + warnings: + - 'Forcing any buffered data to be processed is deprecated, in a future major version it will be compulsory to use a datafeed' ml.flush_job: job_id: post-data-job - match: { flushed: true } @@ -110,7 +112,12 @@ setup: --- "Test flush and close job WITHOUT sending any data": + - skip: + features: + - "warnings" - do: + warnings: + - 'Forcing any buffered data to be processed is deprecated, in a future major version it will be compulsory to use a datafeed' ml.flush_job: job_id: post-data-job - match: { flushed: true } @@ -150,6 +157,8 @@ setup: # Skip a bucket - do: + warnings: + - 'Forcing any buffered data to be processed is deprecated, in a future major version it will be compulsory to use a datafeed' ml.flush_job: job_id: post-data-job skip_time: 1403488700 @@ -266,26 +275,36 @@ setup: - skip: reason: "https://github.com/elastic/elasticsearch/issues/34747" cluster_features: ["gte_v6.5.0"] + features: + - "warnings" - do: catch: missing + warnings: + - 'Forcing any buffered data to be processed is deprecated, in a future major version it will be compulsory to use a datafeed' ml.flush_job: job_id: not_a_job - do: catch: /parse_exception/ + warnings: + - 'Forcing any buffered data to be processed is deprecated, in a future major version it will be compulsory to use a datafeed' ml.flush_job: job_id: post-data-job start: not_a_date - do: catch: /parse_exception/ + warnings: + - 'Forcing any buffered data to be processed is deprecated, in a future major version it will be compulsory to use a datafeed' ml.flush_job: job_id: post-data-job end: end_not_a_date - do: catch: /parse_exception/ + warnings: + - 'Forcing any buffered data to be processed is deprecated, in a future major version it will be compulsory to use a datafeed' ml.flush_job: job_id: post-data-job advance_time: advance_time_not_a_date @@ -311,6 +330,8 @@ setup: - do: catch: /status_exception/ + warnings: + - 'Forcing any buffered data to be processed is deprecated, in a future major version it will be compulsory to use a datafeed' ml.flush_job: job_id: post-data-closed-job diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/delete_job.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/delete_job.yml index bf1a91b5c81fa..9f23ba791b7b3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/delete_job.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/delete_job.yml @@ -1,6 +1,8 @@ setup: + - requires: + test_runner_features: [ "allowed_warnings" ] - skip: - features: headers + features: ["allowed_warnings", "headers"] - do: indices.create: index: foo @@ -48,11 +50,16 @@ setup: } ] } + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." --- "Test basic delete_job": - + - requires: + test_runner_features: ["allowed_warnings"] - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: foo @@ -93,11 +100,15 @@ setup: job_state: "stopped" - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.delete_job: id: foo - is_true: acknowledged - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: foo - match: @@ -107,6 +118,8 @@ setup: "Test delete job twice": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: foo @@ -147,11 +160,15 @@ setup: job_state: "stopped" - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.delete_job: id: foo - is_true: acknowledged - do: + allowed_warnings: + - The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information. rollup.get_jobs: id: foo - match: @@ -161,6 +178,8 @@ setup: "Test delete running job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: foo @@ -201,11 +220,15 @@ setup: job_state: "stopped" - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.start_job: id: foo - is_true: started - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: request rollup.delete_job: id: foo @@ -217,6 +240,8 @@ setup: "Test delete non-existent job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /the task with id \[does_not_exist\] doesn't exist/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml index ff99c39ef9afc..2f14a8d87954b 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml @@ -1,6 +1,6 @@ setup: - skip: - features: headers + features: ["allowed_warnings", "headers"] - do: indices.create: index: foo @@ -29,6 +29,8 @@ setup: "Test basic get_jobs": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -55,6 +57,8 @@ setup: - is_true: acknowledged - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: foo @@ -97,6 +101,8 @@ setup: "Test get with no jobs": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: "_all" @@ -106,6 +112,8 @@ setup: "Test get missing job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: foo diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_rollup_caps.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_rollup_caps.yml index 834141343dcbc..61f91f0dd3cea 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_rollup_caps.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_rollup_caps.yml @@ -1,6 +1,6 @@ setup: - skip: - features: headers + features: ["allowed_warnings", "headers"] - do: indices.create: index: foo @@ -46,6 +46,8 @@ setup: my-id: { } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -78,6 +80,8 @@ setup: "Verify one job caps": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_caps: id: "foo" @@ -101,6 +105,8 @@ setup: "Verify two job caps": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -125,6 +131,8 @@ setup: ] } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_caps: id: "foo" @@ -160,6 +168,8 @@ setup: "Verify all caps": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -185,6 +195,8 @@ setup: } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -210,6 +222,8 @@ setup: } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_caps: id: "_all" diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml index dca96eb325b87..4245d1efeed4d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml @@ -1,6 +1,6 @@ setup: - skip: - features: headers + features: ["allowed_warnings", "headers"] - do: indices.create: index: foo @@ -47,6 +47,8 @@ setup: my-id: { } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -79,6 +81,8 @@ setup: "Verify one job caps by rollup index": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_index_caps: index: "foo_rollup" @@ -102,6 +106,8 @@ setup: "Verify two job caps by rollup index": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -126,6 +132,8 @@ setup: ] } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_index_caps: index: "foo_rollup" @@ -162,6 +170,8 @@ setup: "Verify two different job caps by rollup index": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -186,6 +196,8 @@ setup: ] } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_index_caps: index: "foo_rollup" @@ -209,6 +221,8 @@ setup: "Verify all job caps by rollup index": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -233,6 +247,8 @@ setup: ] } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -258,6 +274,8 @@ setup: } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_index_caps: index: "_all" @@ -308,6 +326,8 @@ setup: "Verify job caps by rollup index comma delimited list": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -332,6 +352,8 @@ setup: ] } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -357,6 +379,8 @@ setup: } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_index_caps: index: "foo_rollup2,foo_rollup" @@ -407,6 +431,8 @@ setup: "Verify index pattern": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -431,6 +457,8 @@ setup: ] } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -456,6 +484,8 @@ setup: } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_index_caps: index: "*_rollup2" diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml index d45c13a2b8adb..6876bb6aff8eb 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml @@ -42,6 +42,8 @@ setup: --- "Test basic put_job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -68,6 +70,8 @@ setup: - is_true: acknowledged - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: foo @@ -116,6 +120,8 @@ setup: "Test put_job with existing name": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -142,6 +148,8 @@ setup: - is_true: acknowledged - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /Cannot create rollup job \[foo\] because job was previously created \(existing metadata\)/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -173,6 +181,8 @@ setup: indices.create: index: non-rollup - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /Rollup data cannot be added to existing indices that contain non-rollup data/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -203,6 +213,8 @@ setup: "Try to include headers": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /unknown field \[headers\]/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -235,6 +247,8 @@ setup: "Validation failures": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /Could not find a \[numeric\] or \[date,date_nanos\] field with name \[field_doesnt_exist\] in any of the indices matching the index pattern/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -268,6 +282,8 @@ setup: index: dummy-rollup-index - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /new rollup jobs are not allowed in clusters that don't have any rollup usage, since rollup has been deprecated/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -297,6 +313,8 @@ setup: "Unknown Metric": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /Unsupported metric \[does_not_exist\]/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -338,6 +356,7 @@ setup: - do: allowed_warnings: - "index [foo_rollup] matches multiple legacy templates [global, test], composable templates will only match a single template" + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -413,6 +432,8 @@ setup: reason: Comma delimited index pattern introduced in 8.2.0 - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -439,6 +460,8 @@ setup: - is_true: acknowledged - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: bar diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/rollup_search.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/rollup_search.yml index d3f21f16c3a30..65708235f30c7 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/rollup_search.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/rollup_search.yml @@ -1,6 +1,6 @@ setup: - skip: - features: headers + features: ["allowed_warnings", "headers"] - do: indices.create: index: foo @@ -29,6 +29,8 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.put_job: id: foo body: > @@ -139,6 +141,8 @@ setup: "Basic Search": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" body: @@ -166,6 +170,8 @@ setup: cluster_features: ["gte_v6.6.0"] reason: rest_total_hits_as_int was introduced in 6.6.0 - do: + allowed_warnings: + - The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information. rollup.rollup_search: index: "foo_rollup" body: @@ -191,6 +197,8 @@ setup: "Formatted Date Histo": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" body: @@ -217,6 +225,8 @@ setup: "Empty aggregation": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: rest_total_hits_as_int: true index: "foo_rollup" @@ -232,6 +242,8 @@ setup: "Empty aggregation with new response format": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" body: @@ -249,6 +261,8 @@ setup: "Search with Metric": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" body: @@ -282,6 +296,8 @@ setup: "Search with Query": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" body: @@ -317,6 +333,8 @@ setup: "Search with MatchAll and Second Job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -427,6 +445,8 @@ setup: - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" body: @@ -460,6 +480,8 @@ setup: "Search with Query and Second Job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -570,6 +592,8 @@ setup: - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" body: @@ -605,6 +629,8 @@ setup: "Search with Query and Second Job different intervals": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -714,6 +740,8 @@ setup: "_rollup.version": 1 - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" body: @@ -749,6 +777,8 @@ setup: "Wildcards matching single rollup index": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup*" body: @@ -787,6 +817,8 @@ setup: type: integer - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -837,6 +869,8 @@ setup: name: rollup_alias - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "rollup_alias" body: @@ -875,6 +909,8 @@ setup: type: integer - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -908,6 +944,8 @@ setup: name: rollup_alias - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /RollupSearch currently only supports searching one rollup index at a time\./ rollup.rollup_search: index: "rollup_alias" @@ -943,6 +981,8 @@ setup: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser warnings: - "Creating Rollup job [tz] with timezone [Canada/Mountain], but [Canada/Mountain] has been deprecated by the IANA. Use [America/Edmonton] instead." + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.put_job: id: tz body: > @@ -1012,6 +1052,8 @@ setup: "_rollup.version": 2 - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "tz_rollup" body: @@ -1039,6 +1081,8 @@ setup: - match: { aggregations.histo.buckets.2.the_max.value: 3 } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "tz_rollup" body: @@ -1162,6 +1206,8 @@ setup: "_rollup.version": 2 - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "tz_rollup" body: @@ -1190,6 +1236,8 @@ setup: - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "tz_rollup" body: @@ -1221,6 +1269,8 @@ setup: "Search with typed_keys": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" typed_keys: true @@ -1254,6 +1304,8 @@ setup: "Search error against live index": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: bad_request rollup.rollup_search: index: "foo" @@ -1270,6 +1322,8 @@ setup: "Search error against rollup and live index": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: bad_request rollup.rollup_search: index: "foo*" @@ -1285,6 +1339,8 @@ setup: "Search error no matching indices": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /Must specify at least one concrete index/ rollup.rollup_search: index: "bar*" diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/start_job.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/start_job.yml index 50e6c46016348..a5ded0c138385 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/start_job.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/start_job.yml @@ -1,6 +1,6 @@ setup: - skip: - features: headers + features: ["allowed_warnings", "headers"] - do: indices.create: index: foo @@ -25,6 +25,8 @@ setup: my-id: { } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -53,6 +55,8 @@ setup: "Test start nonexistent job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /Task for Rollup Job \[does_not_exist\] not found/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -64,6 +68,8 @@ setup: "Test start job twice": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.start_job: @@ -71,6 +77,8 @@ setup: - is_true: started - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.start_job: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/stop_job.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/stop_job.yml index 187c190a9efef..8f746420e78ae 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/stop_job.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/stop_job.yml @@ -1,6 +1,6 @@ setup: - skip: - features: headers + features: ["allowed_warnings", "headers"] - do: indices.create: index: foo @@ -25,6 +25,8 @@ setup: my-id: { } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -53,6 +55,8 @@ setup: "Test stop nonexistent job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /Task for Rollup Job \[does_not_exist\] not found/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -64,6 +68,8 @@ setup: "Test stop job twice": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.start_job: @@ -71,6 +77,8 @@ setup: - is_true: started - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.stop_job: @@ -78,6 +86,8 @@ setup: - is_true: stopped - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.stop_job: @@ -88,6 +98,8 @@ setup: "Test stop non-started job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.stop_job: @@ -98,6 +110,8 @@ setup: --- "Test wait_for_completion default timeout": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.start_job: @@ -105,6 +119,8 @@ setup: - is_true: started - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.stop_job: @@ -115,6 +131,8 @@ setup: --- "Test wait_for_completion with custom timeout": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.start_job: @@ -122,6 +140,8 @@ setup: - is_true: started - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.stop_job: diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformDeleteIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformDeleteIT.java index bb68c7b84da5d..b5064c46c95ae 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformDeleteIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformDeleteIT.java @@ -110,7 +110,7 @@ public void testDeleteWithParamDeletesAutoCreatedDestinationIndex() throws Excep deleteTransform(transformId, false, true); assertFalse(indexExists(transformDest)); - assertFalse(aliasExists(transformDest)); + assertFalse(aliasExists(transformDestAlias)); } public void testDeleteWithParamDeletesManuallyCreatedDestinationIndex() throws Exception { @@ -139,7 +139,7 @@ public void testDeleteWithParamDeletesManuallyCreatedDestinationIndex() throws E assertFalse(aliasExists(transformDestAlias)); } - public void testDeleteWithParamDoesNotDeleteManuallySetUpAlias() throws Exception { + public void testDeleteWithManuallyCreatedIndexAndManuallyCreatedAlias() throws Exception { String transformId = "transform-4"; String transformDest = transformId + "_idx"; String transformDestAlias = transformId + "_alias"; @@ -158,31 +158,106 @@ public void testDeleteWithParamDoesNotDeleteManuallySetUpAlias() throws Exceptio assertTrue(indexExists(transformDest)); assertTrue(aliasExists(transformDestAlias)); + deleteTransform(transformId, false, true); + assertFalse(indexExists(transformDest)); + assertFalse(aliasExists(transformDestAlias)); + } + + public void testDeleteDestinationIndexIsNoOpWhenNoDestinationIndexExists() throws Exception { + String transformId = "transform-5"; + String transformDest = transformId + "_idx"; + String transformDestAlias = transformId + "_alias"; + setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME, transformDest, transformDestAlias); + + createTransform(transformId, transformDest, transformDestAlias); + assertFalse(indexExists(transformDest)); + assertFalse(aliasExists(transformDestAlias)); + + deleteTransform(transformId, false, true); + assertFalse(indexExists(transformDest)); + assertFalse(aliasExists(transformDestAlias)); + } + + public void testDeleteWithAliasPointingToManyIndices() throws Exception { + var transformId = "transform-6"; + var transformDest = transformId + "_idx"; + var otherIndex = "some-other-index-6"; + String transformDestAlias = transformId + "_alias"; + setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME, transformDest, otherIndex, transformDestAlias); + + createIndex(transformDest, null, null, "\"" + transformDestAlias + "\": { \"is_write_index\": true }"); + createIndex(otherIndex, null, null, "\"" + transformDestAlias + "\": {}"); + + assertTrue(indexExists(transformDest)); + assertTrue(indexExists(otherIndex)); + assertTrue(aliasExists(transformDestAlias)); + + createTransform(transformId, transformDestAlias, null); + + startTransform(transformId); + waitForTransformCheckpoint(transformId, 1); + + stopTransform(transformId, false); + + assertTrue(indexExists(transformDest)); + assertTrue(indexExists(otherIndex)); + assertTrue(aliasExists(transformDestAlias)); + + deleteTransform(transformId, false, true); + + assertFalse(indexExists(transformDest)); + assertTrue(indexExists(otherIndex)); + assertTrue(aliasExists(transformDestAlias)); + } + + public void testDeleteWithNoWriteIndexThrowsException() throws Exception { + var transformId = "transform-7"; + var transformDest = transformId + "_idx"; + var otherIndex = "some-other-index-7"; + String transformDestAlias = transformId + "_alias"; + setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME, transformDest, otherIndex, transformDestAlias); + + createIndex(transformDest, null, null, "\"" + transformDestAlias + "\": {}"); + + assertTrue(indexExists(transformDest)); + assertTrue(aliasExists(transformDestAlias)); + + createTransform(transformId, transformDestAlias, null); + + createIndex(otherIndex, null, null, "\"" + transformDestAlias + "\": {}"); + assertTrue(indexExists(otherIndex)); + ResponseException e = expectThrows(ResponseException.class, () -> deleteTransform(transformId, false, true)); assertThat( e.getMessage(), containsString( Strings.format( - "The provided expression [%s] matches an alias, specify the corresponding concrete indices instead.", + "Cannot disambiguate destination index alias [%s]. Alias points to many indices with no clear write alias." + + " Retry with delete_dest_index=false and manually clean up destination index.", transformDestAlias ) ) ); } - public void testDeleteDestinationIndexIsNoOpWhenNoDestinationIndexExists() throws Exception { - String transformId = "transform-5"; - String transformDest = transformId + "_idx"; - String transformDestAlias = transformId + "_alias"; - setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME, transformDest, transformDestAlias); + public void testDeleteWithAlreadyDeletedIndex() throws Exception { + var transformId = "transform-8"; + var transformDest = transformId + "_idx"; + setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME, transformDest); + + createIndex(transformDest); + + assertTrue(indexExists(transformDest)); + + createTransform(transformId, transformDest, null); + + deleteIndex(transformDest); - createTransform(transformId, transformDest, transformDestAlias); assertFalse(indexExists(transformDest)); - assertFalse(aliasExists(transformDestAlias)); deleteTransform(transformId, false, true); + assertFalse(indexExists(transformDest)); - assertFalse(aliasExists(transformDestAlias)); } private void createTransform(String transformId, String destIndex, String destAlias) throws IOException { diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java index 322ac63a819fc..83f731e298159 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java @@ -2003,6 +2003,84 @@ public void testPivotWithTopMetrics() throws Exception { assertEquals("business_3", actual); } + @SuppressWarnings(value = "unchecked") + public void testPivotWithExtendedStats() throws Exception { + var transformId = "extended_stats_transform"; + var transformIndex = "extended_stats_pivot_reviews"; + setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME, transformIndex); + + var createTransformRequest = createRequestWithAuth( + "PUT", + getTransformEndpoint() + transformId, + BASIC_AUTH_VALUE_TRANSFORM_ADMIN_WITH_SOME_DATA_ACCESS + ); + + var config = Strings.format(""" + { + "source": { + "index": "%s" + }, + "dest": { + "index": "%s" + }, + "pivot": { + "group_by": { + "reviewer": { + "terms": { + "field": "user_id" + } + } + }, + "aggregations": { + "stars": { + "extended_stats": { + "field": "stars" + } + } + } + } + }""", REVIEWS_INDEX_NAME, transformIndex); + + createTransformRequest.setJsonEntity(config); + var createTransformResponse = entityAsMap(client().performRequest(createTransformRequest)); + assertThat(createTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + + startAndWaitForTransform(transformId, transformIndex, BASIC_AUTH_VALUE_TRANSFORM_ADMIN_WITH_SOME_DATA_ACCESS); + assertTrue(indexExists(transformIndex)); + + var searchResult = getAsMap(transformIndex + "/_search?q=reviewer:user_4"); + assertEquals(1, XContentMapValues.extractValue("hits.total.value", searchResult)); + var stdDevMap = (Map) ((List) XContentMapValues.extractValue("hits.hits._source.stars", searchResult)).get(0); + assertThat(stdDevMap.get("count"), equalTo(41)); + assertThat( + stdDevMap, + allOf( + hasEntry("sum", 159.0), + hasEntry("min", 1.0), + hasEntry("max", 5.0), + hasEntry("avg", 3.8780487804878048), + hasEntry("sum_of_squares", 711.0), + hasEntry("variance", 2.3022010707911953), + hasEntry("variance_population", 2.3022010707911953), + hasEntry("variance_sampling", 2.3597560975609753), + hasEntry("std_deviation", 1.5173005868288574), + hasEntry("std_deviation_sampling", 1.5361497640402693), + hasEntry("std_deviation_population", 1.5173005868288574) + ) + ); + assertThat( + (Map) stdDevMap.get("std_deviation_bounds"), + allOf( + hasEntry("upper", 6.91264995414552), + hasEntry("lower", 0.84344760683009), + hasEntry("upper_population", 6.91264995414552), + hasEntry("lower_population", 0.84344760683009), + hasEntry("upper_sampling", 6.950348308568343), + hasEntry("lower_sampling", 0.8057492524072662) + ) + ); + } + public void testPivotWithBoxplot() throws Exception { String transformId = "boxplot_transform"; String transformIndex = "boxplot_pivot_reviews"; diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index 537f50a30b5dd..20ec649f74811 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -412,7 +412,7 @@ protected void updateTransform(String transformId, String update, boolean deferV } updateTransformRequest.setJsonEntity(update); - client().performRequest(updateTransformRequest); + assertOKAndConsume(client().performRequest(updateTransformRequest)); } protected void startTransform(String transformId) throws IOException { diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/auditor/NotificationsIndexIT.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/auditor/NotificationsIndexIT.java new file mode 100644 index 0000000000000..94fd24fbdfd5b --- /dev/null +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/auditor/NotificationsIndexIT.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.auditor; + +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.metadata.AliasMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; +import org.elasticsearch.xpack.transform.TransformSingleNodeTestCase; +import org.elasticsearch.xpack.transform.notifications.TransformAuditor; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.is; + +public class NotificationsIndexIT extends TransformSingleNodeTestCase { + public void testAliasCreated() throws Exception { + // Auditing a notification should create the .transform-notifications-000002 + // index and the write alias + createNotification(true); + + assertBusy(() -> { + assertNotificationsIndexExists(); + assertNotificationsWriteAliasCreated(); + }); + } + + private void assertNotificationsIndexExists() { + GetIndexResponse getIndexResponse = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) + .setIndices(TransformInternalIndexConstants.AUDIT_INDEX) + .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) + .get(); + assertThat(Arrays.asList(getIndexResponse.getIndices()), contains(TransformInternalIndexConstants.AUDIT_INDEX)); + } + + private void assertNotificationsWriteAliasCreated() { + Map> aliases = indicesAdmin().prepareGetAliases( + TimeValue.timeValueSeconds(10L), + TransformInternalIndexConstants.AUDIT_INDEX_WRITE_ALIAS + ).setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN).get().getAliases(); + assertThat(aliases.size(), is(1)); + List indexAliases = aliases.get(TransformInternalIndexConstants.AUDIT_INDEX); + assertNotNull(aliases.toString(), indexAliases); + assertThat(indexAliases.size(), is(1)); + var writeAlias = indexAliases.get(0); + assertThat(writeAlias.alias(), is(TransformInternalIndexConstants.AUDIT_INDEX_WRITE_ALIAS)); + assertThat("notification write alias should be hidden but is not: " + aliases, writeAlias.isHidden(), is(true)); + } + + private void createNotification(boolean includeNodeInfo) { + var clusterService = getInstanceFromNode(ClusterService.class); + TransformAuditor auditor = new TransformAuditor( + client(), + clusterService.getNodeName(), + clusterService, + TestIndexNameExpressionResolver.newInstance(), + includeNodeInfo + ); + auditor.info("whatever", "blah"); + } +} diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java index e4e577299d0d7..4ce17b46805e3 100644 --- a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java @@ -12,6 +12,8 @@ import org.apache.lucene.index.PointValues; import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.snapshots.features.ResetFeatureStateAction; +import org.elasticsearch.action.admin.cluster.snapshots.features.ResetFeatureStateRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -61,6 +63,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformStats; import org.elasticsearch.xpack.core.transform.transforms.latest.LatestConfig; import org.elasticsearch.xpack.transform.LocalStateTransform; +import org.junit.After; import org.junit.Before; import java.io.IOException; @@ -136,6 +139,11 @@ public void setUpNamedXContentRegistryAndIndices() throws Exception { remoteNewDocs = createIndexAndIndexDocs(REMOTE_CLUSTER, "remote_new_index", newRemoteNumShards, timestamp, randomBoolean()); } + @After + public void cleanup() { + client().execute(ResetFeatureStateAction.INSTANCE, new ResetFeatureStateRequest(TEST_REQUEST_TIMEOUT)).actionGet(); + } + private int createIndexAndIndexDocs(String cluster, String index, int numberOfShards, long timestamp, boolean exposeTimestamp) throws Exception { Client client = client(cluster); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java index d67abd45b3092..7f4b9543698b4 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java @@ -140,6 +140,7 @@ public class Transform extends Plugin implements SystemIndexPlugin, PersistentTa private final Settings settings; private final SetOnce transformServices = new SetOnce<>(); private final SetOnce transformConfigAutoMigration = new SetOnce<>(); + private final SetOnce transformAuditor = new SetOnce<>(); private final TransformExtension transformExtension = new DefaultTransformExtension(); public static final Integer DEFAULT_INITIAL_MAX_PAGE_SEARCH_SIZE = Integer.valueOf(500); @@ -299,8 +300,10 @@ public Collection createComponents(PluginServices services) { client, clusterService.getNodeName(), clusterService, + services.indexNameExpressionResolver(), getTransformExtension().includeNodeInfo() ); + this.transformAuditor.set(auditor); Clock clock = Clock.systemUTC(); TransformCheckpointService checkpointService = new TransformCheckpointService( clock, @@ -443,8 +446,12 @@ public void cleanUpFeature( ActionListener finalListener ) { OriginSettingClient client = new OriginSettingClient(unwrappedClient, TRANSFORM_ORIGIN); - ActionListener unsetResetModeListener = ActionListener.wrap( - success -> client.execute( + ActionListener unsetResetModeListener = ActionListener.wrap(success -> { + // + if (transformAuditor.get() != null) { + transformAuditor.get().reset(); + } + client.execute( SetResetModeAction.INSTANCE, SetResetModeActionRequest.disabled(true), ActionListener.wrap(resetSuccess -> finalListener.onResponse(success), resetFailure -> { @@ -457,7 +464,8 @@ public void cleanUpFeature( ) ); }) - ), + ); + }, failure -> client.execute( SetResetModeAction.INSTANCE, SetResetModeActionRequest.disabled(false), diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java index 41b683a7965ca..619e72581cb51 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java @@ -10,9 +10,13 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; import org.elasticsearch.client.internal.Client; @@ -27,6 +31,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; @@ -42,6 +47,8 @@ import org.elasticsearch.xpack.transform.persistence.TransformConfigManager; import org.elasticsearch.xpack.transform.transforms.TransformTask; +import java.util.Objects; + import static org.elasticsearch.xpack.core.ClientHelper.TRANSFORM_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.executeWithHeadersAsync; @@ -146,20 +153,31 @@ private void deleteDestinationIndex( TimeValue timeout, ActionListener listener ) { - // <3> Check if the error is "index not found" error. If so, just move on. The index is already deleted. - ActionListener deleteDestIndexListener = ActionListener.wrap(listener::onResponse, e -> { - if (e instanceof IndexNotFoundException) { - listener.onResponse(AcknowledgedResponse.TRUE); - } else { - listener.onFailure(e); - } - }); + getTransformConfig(transformId).andThen((l, r) -> deleteDestinationIndex(r.v1(), parentTaskId, timeout, l)) + .addListener(listener.delegateResponse((l, e) -> { + if (e instanceof IndexNotFoundException) { + l.onResponse(AcknowledgedResponse.TRUE); + } else { + l.onFailure(e); + } + })); + } - // <2> Delete destination index - ActionListener> getTransformConfigurationListener = ActionListener.wrap( - transformConfigAndVersion -> { - TransformConfig config = transformConfigAndVersion.v1(); - String destIndex = config.getDestination().getIndex(); + private SubscribableListener> getTransformConfig(String transformId) { + return SubscribableListener.newForked(l -> transformConfigManager.getTransformConfigurationForUpdate(transformId, l)); + } + + /** + * Delete the destination index. If the Transform is configured to write to an alias, then follow that alias to the concrete index. + */ + private void deleteDestinationIndex( + TransformConfig config, + TaskId parentTaskId, + TimeValue timeout, + ActionListener listener + ) { + SubscribableListener.newForked(l -> resolveDestinationIndex(config, parentTaskId, timeout, l)) + .andThen((l, destIndex) -> { DeleteIndexRequest deleteDestIndexRequest = new DeleteIndexRequest(destIndex); deleteDestIndexRequest.ackTimeout(timeout); deleteDestIndexRequest.setParentTask(parentTaskId); @@ -169,14 +187,57 @@ private void deleteDestinationIndex( client, TransportDeleteIndexAction.TYPE, deleteDestIndexRequest, - deleteDestIndexListener + l ); - }, - listener::onFailure - ); + }) + .addListener(listener); + } + + private void resolveDestinationIndex(TransformConfig config, TaskId parentTaskId, TimeValue timeout, ActionListener listener) { + var destIndex = config.getDestination().getIndex(); + var responseListener = ActionListener.wrap(r -> findDestinationIndexInAliases(r, destIndex, listener), e -> { + if (e instanceof AliasesNotFoundException) { + // no alias == the destIndex is our concrete index + listener.onResponse(destIndex); + } else { + listener.onFailure(e); + } + }); + + GetAliasesRequest request = new GetAliasesRequest(timeout, destIndex); + request.setParentTask(parentTaskId); + executeWithHeadersAsync(config.getHeaders(), TRANSFORM_ORIGIN, client, GetAliasesAction.INSTANCE, request, responseListener); + } - // <1> Fetch transform configuration - transformConfigManager.getTransformConfigurationForUpdate(transformId, getTransformConfigurationListener); + private static void findDestinationIndexInAliases(GetAliasesResponse aliases, String destIndex, ActionListener listener) { + var indexToAliases = aliases.getAliases(); + if (indexToAliases.isEmpty()) { + // if the alias list is empty, that means the index is a concrete index + listener.onResponse(destIndex); + } else if (indexToAliases.size() == 1) { + // if there is one value, the alias will treat it as the write index, so it's our destination index + listener.onResponse(indexToAliases.keySet().iterator().next()); + } else { + // if there is more than one index, there may be more than one alias for each index + // we have to search for the alias that matches our destination index name AND is declared the write index for that alias + indexToAliases.entrySet().stream().map(entry -> { + if (entry.getValue().stream().anyMatch(md -> destIndex.equals(md.getAlias()) && Boolean.TRUE.equals(md.writeIndex()))) { + return entry.getKey(); + } else { + return null; + } + }).filter(Objects::nonNull).findFirst().ifPresentOrElse(listener::onResponse, () -> { + listener.onFailure( + new ElasticsearchStatusException( + "Cannot disambiguate destination index alias [" + + destIndex + + "]. Alias points to many indices with no clear write alias. Retry with delete_dest_index=false and manually" + + " clean up destination index.", + RestStatus.CONFLICT + ) + ); + }); + } } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java index 7a1f874da66a7..402a8cbe12bd5 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java @@ -13,6 +13,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xpack.core.common.notifications.AbstractAuditor; @@ -23,7 +24,7 @@ import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; import java.io.IOException; -import java.util.Collections; +import java.util.List; import static org.elasticsearch.xpack.core.ClientHelper.TRANSFORM_ORIGIN; @@ -38,28 +39,21 @@ public class TransformAuditor extends AbstractAuditor { private final boolean includeNodeInfo; - public TransformAuditor(Client client, String nodeName, ClusterService clusterService, boolean includeNodeInfo) { + public TransformAuditor( + Client client, + String nodeName, + ClusterService clusterService, + IndexNameExpressionResolver indexNameExpressionResolver, + boolean includeNodeInfo + ) { super( new OriginSettingClient(client, TRANSFORM_ORIGIN), - TransformInternalIndexConstants.AUDIT_INDEX, - TransformInternalIndexConstants.AUDIT_INDEX, - () -> { - try { - return new TransportPutComposableIndexTemplateAction.Request(TransformInternalIndexConstants.AUDIT_INDEX).indexTemplate( - ComposableIndexTemplate.builder() - .template(TransformInternalIndex.getAuditIndexTemplate()) - .version((long) TransformConfigVersion.CURRENT.id()) - .indexPatterns(Collections.singletonList(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN)) - .priority(Long.MAX_VALUE) - .build() - ); - } catch (IOException e) { - throw new ElasticsearchException("Failure creating transform notification index template request", e); - } - }, + TransformInternalIndexConstants.AUDIT_INDEX_WRITE_ALIAS, nodeName, TransformAuditMessage::new, - clusterService + clusterService, + indexNameExpressionResolver, + clusterService.threadPool().generic() ); clusterService.addListener(event -> { if (event.metadataChanged()) { @@ -93,4 +87,30 @@ protected void writeBacklog() { super.writeBacklog(); } } + + @Override + protected TransportPutComposableIndexTemplateAction.Request putTemplateRequest() { + try { + return new TransportPutComposableIndexTemplateAction.Request(TransformInternalIndexConstants.AUDIT_INDEX).indexTemplate( + ComposableIndexTemplate.builder() + .template(TransformInternalIndex.getAuditIndexTemplate()) + .version((long) TransformConfigVersion.CURRENT.id()) + .indexPatterns(List.of(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN)) + .priority(Long.MAX_VALUE) + .build() + ); + } catch (IOException e) { + throw new ElasticsearchException("Failure creating transform notification index template request", e); + } + } + + @Override + protected int templateVersion() { + return TransformConfigVersion.CURRENT.id(); + } + + @Override + protected IndexDetails indexDetails() { + return new IndexDetails(TransformInternalIndexConstants.AUDIT_INDEX_PREFIX, TransformInternalIndexConstants.AUDIT_TEMPLATE_VERSION); + } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndex.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndex.java index 14e9292d10fd1..0cd8558b3dbe4 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndex.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndex.java @@ -385,7 +385,7 @@ protected static boolean hasLatestVersionedIndex(ClusterState state) { protected static boolean allPrimaryShardsActiveForLatestVersionedIndex(ClusterState state) { IndexRoutingTable indexRouting = state.routingTable().index(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME); - return indexRouting != null && indexRouting.allPrimaryShardsActive() && indexRouting.readyForSearch(state); + return indexRouting != null && indexRouting.allPrimaryShardsActive() && indexRouting.readyForSearch(); } private static void waitForLatestVersionedIndexShardsActive(Client client, ActionListener listener) { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java index 9cd0f3abcb07d..b7bd434194b80 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java @@ -188,9 +188,7 @@ static List verifyIndicesPrimaryShardsAreActive(ClusterState clusterStat List unavailableIndices = new ArrayList<>(indices.length); for (String index : indices) { IndexRoutingTable routingTable = clusterState.getRoutingTable().index(index); - if (routingTable == null - || routingTable.allPrimaryShardsActive() == false - || routingTable.readyForSearch(clusterState) == false) { + if (routingTable == null || routingTable.allPrimaryShardsActive() == false || routingTable.readyForSearch() == false) { unavailableIndices.add(index); } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtils.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtils.java index a851e4a47f1cc..663b2acb0a01b 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtils.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtils.java @@ -23,6 +23,7 @@ import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.metrics.GeoBounds; import org.elasticsearch.search.aggregations.metrics.GeoCentroid; +import org.elasticsearch.search.aggregations.metrics.InternalExtendedStats; import org.elasticsearch.search.aggregations.metrics.MultiValueAggregation; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation.MultiValue; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation.SingleValue; @@ -69,6 +70,7 @@ public final class AggregationResultUtils { tempMap.put(GeoShapeMetricAggregation.class.getName(), new GeoShapeMetricAggExtractor()); tempMap.put(MultiValue.class.getName(), new NumericMultiValueAggExtractor()); tempMap.put(MultiValueAggregation.class.getName(), new MultiValueAggExtractor()); + tempMap.put(InternalExtendedStats.class.getName(), new ExtendedStatsExtractor()); TYPE_VALUE_EXTRACTOR_MAP = Collections.unmodifiableMap(tempMap); } @@ -171,6 +173,9 @@ static AggValueExtractor getExtractor(Aggregation aggregation) { // TODO: can the Range extractor be removed? } else if (aggregation instanceof Range) { return TYPE_VALUE_EXTRACTOR_MAP.get(Range.class.getName()); + } else if (aggregation instanceof InternalExtendedStats) { + // note: extended stats is also a multi bucket agg, therefore check range first + return TYPE_VALUE_EXTRACTOR_MAP.get(InternalExtendedStats.class.getName()); } else if (aggregation instanceof MultiValue) { return TYPE_VALUE_EXTRACTOR_MAP.get(MultiValue.class.getName()); } else if (aggregation instanceof MultiValueAggregation) { @@ -281,6 +286,13 @@ public Object value(Aggregation agg, Map fieldTypeMap, String lo } } + static class ExtendedStatsExtractor implements AggValueExtractor { + @Override + public Object value(Aggregation agg, Map fieldTypeMap, String lookupFieldPrefix) { + return ((InternalExtendedStats) agg).asIndexableMap(); + } + } + static class MultiValueAggExtractor implements AggValueExtractor { @Override public Object value(Aggregation agg, Map fieldTypeMap, String lookupFieldPrefix) { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregations.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregations.java index 95e05d93ff03a..16ad1eb8fcd51 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregations.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregations.java @@ -60,7 +60,6 @@ public final class TransformAggregations { "date_histogram", "date_range", "diversified_sampler", - "extended_stats", // https://github.com/elastic/elasticsearch/issues/51925 "filters", "geo_distance", "geohash_grid", @@ -120,7 +119,8 @@ enum AggregationType { MISSING("missing", LONG), TOP_METRICS("top_metrics", SOURCE), STATS("stats", DOUBLE), - BOXPLOT("boxplot", DOUBLE); + BOXPLOT("boxplot", DOUBLE), + EXTENDED_STATS("extended_stats", DOUBLE); private final String aggregationType; private final String targetMapping; diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransformUpdaterTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransformUpdaterTests.java index fa957a2ac89cf..3231d705f389c 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransformUpdaterTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransformUpdaterTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Tuple; @@ -66,7 +65,6 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -import static org.mockito.Mockito.mock; public class TransformUpdaterTests extends ESTestCase { @@ -77,8 +75,7 @@ public class TransformUpdaterTests extends ESTestCase { private final IndexNameExpressionResolver indexNameExpressionResolver = TestIndexNameExpressionResolver.newInstance(); private TestThreadPool threadPool; private Client client; - private ClusterService clusterService = mock(ClusterService.class); - private TransformAuditor auditor = new MockTransformAuditor(clusterService); + private TransformAuditor auditor; private final Settings settings = Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build(); private final Settings destIndexSettings = new DefaultTransformExtension().getTransformDestinationIndexSettings(); @@ -124,8 +121,7 @@ public void setupClient() { } threadPool = createThreadPool(); client = new MyMockClient(threadPool); - clusterService = mock(ClusterService.class); - auditor = new MockTransformAuditor(clusterService); + auditor = MockTransformAuditor.createMockAuditor(); } @After diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportSetTransformUpgradeModeActionTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportSetTransformUpgradeModeActionTests.java index 97ef367dca8e5..f31922d5b69f4 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportSetTransformUpgradeModeActionTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportSetTransformUpgradeModeActionTests.java @@ -38,6 +38,7 @@ import static org.hamcrest.Matchers.is; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.matches; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -176,7 +177,7 @@ public void testEnableUpgradeMode() throws InterruptedException { upgradeModeSuccessfullyChanged(stateWithTransformTask(), assertNoFailureListener(r -> { assertThat(r, is(AcknowledgedResponse.TRUE)); - verify(clusterService).submitUnbatchedStateUpdateTask(eq("unassign persistent task from any node"), any()); + verify(clusterService).submitUnbatchedStateUpdateTask(matches("unassign persistent task \\[.*\\] from any node"), any()); })); } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/notifications/MockTransformAuditor.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/notifications/MockTransformAuditor.java index 7ae1795875db5..4eb255b69cfd3 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/notifications/MockTransformAuditor.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/notifications/MockTransformAuditor.java @@ -11,10 +11,13 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.common.notifications.Level; import org.elasticsearch.xpack.core.transform.notifications.TransformAuditMessage; @@ -50,14 +53,17 @@ public static MockTransformAuditor createMockAuditor() { when(state.getMetadata()).thenReturn(metadata); ClusterService clusterService = mock(ClusterService.class); when(clusterService.state()).thenReturn(state); + ThreadPool threadPool = mock(); + when(threadPool.generic()).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); + when(clusterService.threadPool()).thenReturn(threadPool); - return new MockTransformAuditor(clusterService); + return new MockTransformAuditor(clusterService, mock(IndexNameExpressionResolver.class)); } private final List expectations; - public MockTransformAuditor(ClusterService clusterService) { - super(mock(Client.class), MOCK_NODE_NAME, clusterService, true); + private MockTransformAuditor(ClusterService clusterService, IndexNameExpressionResolver indexNameResolver) { + super(mock(Client.class), MOCK_NODE_NAME, clusterService, indexNameResolver, true); expectations = new CopyOnWriteArrayList<>(); } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregationsTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregationsTests.java index 4564ec5cc67ea..41a913ec4c2b6 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregationsTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregationsTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.PercentilesAggregationBuilder; @@ -31,7 +32,9 @@ import java.util.Map; import java.util.stream.Collectors; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.is; public class TransformAggregationsTests extends ESTestCase { @@ -137,6 +140,9 @@ public void testResolveTargetMapping() { assertEquals("double", TransformAggregations.resolveTargetMapping("stats", null)); assertEquals("double", TransformAggregations.resolveTargetMapping("stats", "int")); + // extended stats + assertEquals("double", TransformAggregations.resolveTargetMapping("extended_stats", "double")); + // boxplot assertEquals("double", TransformAggregations.resolveTargetMapping("boxplot", "double")); @@ -220,6 +226,39 @@ public void testGetAggregationOutputTypesStats() { assertEquals("stats", outputTypes.get("stats.sum")); } + public void testGetAggregationOutputTypesExtendedStats() { + var extendedStatsAggregationBuilder = new ExtendedStatsAggregationBuilder("extended_stats"); + + var inputAndOutputTypes = TransformAggregations.getAggregationInputAndOutputTypes(extendedStatsAggregationBuilder); + var outputTypes = inputAndOutputTypes.v2(); + assertEquals(18, outputTypes.size()); + assertThat( + outputTypes, + allOf( + hasEntry("extended_stats.count", "extended_stats"), + hasEntry("extended_stats.sum", "extended_stats"), + hasEntry("extended_stats.avg", "extended_stats"), + hasEntry("extended_stats.min", "extended_stats"), + hasEntry("extended_stats.max", "extended_stats"), + + hasEntry("extended_stats.sum_of_squares", "extended_stats"), + hasEntry("extended_stats.variance", "extended_stats"), + hasEntry("extended_stats.variance_population", "extended_stats"), + hasEntry("extended_stats.variance_sampling", "extended_stats"), + hasEntry("extended_stats.std_deviation", "extended_stats"), + hasEntry("extended_stats.std_deviation_population", "extended_stats"), + hasEntry("extended_stats.std_deviation_sampling", "extended_stats"), + + hasEntry("extended_stats.std_deviation_bounds.upper", "extended_stats"), + hasEntry("extended_stats.std_deviation_bounds.lower", "extended_stats"), + hasEntry("extended_stats.std_deviation_bounds.upper_population", "extended_stats"), + hasEntry("extended_stats.std_deviation_bounds.lower_population", "extended_stats"), + hasEntry("extended_stats.std_deviation_bounds.upper_sampling", "extended_stats"), + hasEntry("extended_stats.std_deviation_bounds.lower_sampling", "extended_stats") + ) + ); + } + public void testGetAggregationOutputTypesRange() { { AggregationBuilder rangeAggregationBuilder = new RangeAggregationBuilder("range_agg_name").addUnboundedTo(100) diff --git a/x-pack/plugin/watcher/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/watcher/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 0000000000000..2eb0d0dbd9881 --- /dev/null +++ b/x-pack/plugin/watcher/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,2 @@ +ALL-UNNAMED: + - manage_threads diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java index 337fc00cc7caf..093959978b0d1 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java @@ -112,8 +112,8 @@ public static void main(String[] args) throws Exception { Node node = new Node( internalNodeEnv, PluginsLoader.createPluginsLoader( - PluginsLoader.loadModulesBundles(internalNodeEnv.modulesFile()), - PluginsLoader.loadPluginsBundles(internalNodeEnv.pluginsFile()), + PluginsLoader.loadModulesBundles(internalNodeEnv.modulesDir()), + PluginsLoader.loadPluginsBundles(internalNodeEnv.pluginsDir()), Map.of() ) ).start() diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index 0b31e96ece84a..b7a9b8af057e0 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -570,11 +570,11 @@ public void testRegexAcceleration() throws IOException, ParseException { { "(maynotexist)?foobar", "+eoo +ooa +oaa +aaq +aq_ +q__" }, { ".*/etc/passw.*", "+\\/es +esc +sc\\/ +c\\/o +\\/oa +oas +ass +ssw" }, { ".*etc/passwd", " +esc +sc\\/ +c\\/o +\\/oa +oas +ass +ssw +swc +wc_ +c__" }, - { "(http|ftp)://foo.*", "+((+gss +sso) eso) +(+\\/\\/\\/ +\\/\\/e +\\/eo +eoo)" }, + { "(http|ftp)://foo.*", "+\\/\\/\\/ +\\/\\/e +\\/eo +eoo +((+gss +sso) eso)" }, { "[Pp][Oo][Ww][Ee][Rr][Ss][Hh][Ee][Ll][Ll]\\.[Ee][Xx][Ee]", "+_oo +oow +owe +weq +eqs +qsg +sge +gek +ekk +kk\\/ +k\\/e +\\/ew +ewe +we_ +e__" }, - { "foo<1-100>bar", "+(+_eo +eoo) +(+aaq +aq_ +q__)" }, + { "foo<1-100>bar", "+_eo +eoo +aaq +aq_ +q__" }, { "(aaa.+&.+bbb)cat", "+cas +as_ +s__" }, { ".a", "a__" } }; for (String[] test : acceleratedTests) { diff --git a/x-pack/qa/core-rest-tests-with-security/build.gradle b/x-pack/qa/core-rest-tests-with-security/build.gradle index 65f2282014dc4..f85d6ba961e0b 100644 --- a/x-pack/qa/core-rest-tests-with-security/build.gradle +++ b/x-pack/qa/core-rest-tests-with-security/build.gradle @@ -31,7 +31,7 @@ restResources { tasks.named("yamlRestTest").configure { ArrayList blacklist = [ 'index/10_with_id/Index with ID', - 'indices.get_alias/10_basic/Get alias against closed indices' + 'indices.get_alias/10_basic/Get alias against closed indices', ]; if (buildParams.isSnapshotBuild() == false) { blacklist += [ diff --git a/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java b/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java index 0b40828b8e86c..16baa729d9703 100644 --- a/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java +++ b/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java @@ -46,9 +46,11 @@ public class CoreWithSecurityClientYamlTestSuiteIT extends ESClientYamlSuiteTest .setting("xpack.ml.enabled", "false") .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.autoconfiguration.enabled", "false") + .systemProperty("es.queryable_built_in_roles_enabled", "false") .user(USER, PASS) .feature(FeatureFlag.TIME_SERIES_MODE) .feature(FeatureFlag.SUB_OBJECTS_AUTO_ENABLED) + .feature(FeatureFlag.INDEX_RECOVERY_USE_SYNTHETIC_SOURCE) .build(); public CoreWithSecurityClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { diff --git a/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java b/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java index 261bc567d5c91..74cb057278c4a 100644 --- a/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java +++ b/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java @@ -152,7 +152,7 @@ public void tearDownMiniKdc() throws IOException, PrivilegedActionException { protected Path getKeytabPath(Environment env) { final Setting setting = KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.getConcreteSettingForNamespace(REALM_NAME); - return env.configFile().resolve(setting.get(settings)); + return env.configDir().resolve(setting.get(settings)); } /** diff --git a/x-pack/qa/freeze-plugin/build.gradle b/x-pack/qa/freeze-plugin/build.gradle deleted file mode 100644 index 03704dcc57a6f..0000000000000 --- a/x-pack/qa/freeze-plugin/build.gradle +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - - -apply plugin: 'elasticsearch.base-internal-es-plugin' - -esplugin { - name = 'freeze-plugin' - description = 'Provides freeze-index endpoint for testing purposes only' - classname = 'org.elasticsearch.plugin.freeze.FreezeIndexPlugin' - extendedPlugins = ['x-pack-core'] -} - -dependencies { - compileOnly project(":server") - compileOnly project(path: xpackModule('core')) -} - -tasks.named('test').configure { enabled = false } - diff --git a/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java b/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java deleted file mode 100644 index af9a741b0aef1..0000000000000 --- a/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.plugin.freeze; - -import org.elasticsearch.action.support.ActiveShardCount; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.IndexScopedSettings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.plugins.ActionPlugin; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.protocol.xpack.frozen.FreezeRequest; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestController; -import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.core.frozen.action.FreezeIndexAction; - -import java.util.List; -import java.util.function.Predicate; -import java.util.function.Supplier; - -import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.rest.RestUtils.getAckTimeout; -import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; - -/** - * Restores the REST endpoint for freezing indices so that the JDBC tests can still freeze indices - * for testing purposes until frozen indices are no longer supported. - */ -public class FreezeIndexPlugin extends Plugin implements ActionPlugin { - - @Override - public List getRestHandlers( - Settings settings, - NamedWriteableRegistry namedWriteableRegistry, - RestController restController, - ClusterSettings clusterSettings, - IndexScopedSettings indexScopedSettings, - SettingsFilter settingsFilter, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier nodesInCluster, - Predicate clusterSupportsFeature - ) { - return List.of(new FreezeIndexRestEndpoint()); - } - - /** - * Used by the {@link FreezeIndexPlugin} above. - */ - static class FreezeIndexRestEndpoint extends BaseRestHandler { - @Override - public String getName() { - return "freeze-for-testing-only"; - } - - @Override - public List routes() { - return List.of(new Route(POST, "/{index}/_freeze")); - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { - boolean freeze = request.path().endsWith("/_freeze"); - FreezeRequest freezeRequest = new FreezeRequest( - getMasterNodeTimeout(request), - getAckTimeout(request), - Strings.splitStringByCommaToArray(request.param("index")) - ); - freezeRequest.indicesOptions(IndicesOptions.fromRequest(request, freezeRequest.indicesOptions())); - String waitForActiveShards = request.param("wait_for_active_shards"); - if (waitForActiveShards != null) { - freezeRequest.waitForActiveShards(ActiveShardCount.parseString(waitForActiveShards)); - } - freezeRequest.setFreeze(freeze); - return channel -> client.execute(FreezeIndexAction.INSTANCE, freezeRequest, new RestToXContentListener<>(channel)); - } - } - -} diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/AbstractXpackFullClusterRestartTestCase.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/AbstractXpackFullClusterRestartTestCase.java index 96acaaa5b41b4..87c7dedf0a409 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/AbstractXpackFullClusterRestartTestCase.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/AbstractXpackFullClusterRestartTestCase.java @@ -10,6 +10,7 @@ import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.upgrades.ParameterizedFullClusterRestartTestCase; @@ -20,7 +21,7 @@ public abstract class AbstractXpackFullClusterRestartTestCase extends Parameteri @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(Version.fromString(OLD_CLUSTER_VERSION)) .nodes(2) // some tests rely on the translog not being flushed .setting("indices.memory.shard_inactive_time", "60m") diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 516dd4759861f..762d8b4ac8655 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -430,6 +430,7 @@ public void testRollupAfterRestart() throws Exception { // create the rollup job final Request createRollupJobRequest = new Request("PUT", "/_rollup/job/rollup-job-test"); + createRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); createRollupJobRequest.setJsonEntity(""" { "index_pattern": "rollup-*", @@ -455,6 +456,7 @@ public void testRollupAfterRestart() throws Exception { // start the rollup job final Request startRollupJobRequest = new Request("POST", "/_rollup/job/rollup-job-test/_start"); + startRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); Map startRollupJobResponse = entityAsMap(client().performRequest(startRollupJobRequest)); assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE)); @@ -823,6 +825,7 @@ private void assertRollUpJob(final String rollupJob) throws Exception { // check that the rollup job is started using the RollUp API final Request getRollupJobRequest = new Request("GET", "_rollup/job/" + rollupJob); + getRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); Map getRollupJobResponse = entityAsMap(client().performRequest(getRollupJobRequest)); Map job = getJob(getRollupJobResponse, rollupJob); assertNotNull(job); @@ -865,7 +868,7 @@ private void assertRollUpJob(final String rollupJob) throws Exception { private void waitForRollUpJob(final String rollupJob, final Matcher expectedStates) throws Exception { assertBusy(() -> { final Request getRollupJobRequest = new Request("GET", "/_rollup/job/" + rollupJob); - + getRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); Response getRollupJobResponse = client().performRequest(getRollupJobRequest); assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); diff --git a/x-pack/qa/multi-node/src/javaRestTest/java/org/elasticsearch/multi_node/RollupIT.java b/x-pack/qa/multi-node/src/javaRestTest/java/org/elasticsearch/multi_node/RollupIT.java index e8fce8e513165..4afcf8c20344a 100644 --- a/x-pack/qa/multi-node/src/javaRestTest/java/org/elasticsearch/multi_node/RollupIT.java +++ b/x-pack/qa/multi-node/src/javaRestTest/java/org/elasticsearch/multi_node/RollupIT.java @@ -128,6 +128,7 @@ public void testBigRollup() throws Exception { // create the rollup job final Request createRollupJobRequest = new Request("PUT", "/_rollup/job/rollup-job-test"); + createRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); int pageSize = randomIntBetween(2, 50); // fast cron so test runs quickly createRollupJobRequest.setJsonEntity(Strings.format(""" @@ -154,11 +155,13 @@ public void testBigRollup() throws Exception { ] }""", pageSize)); + assertWarnings(); var createRollupJobResponse = responseAsMap(client().performRequest(createRollupJobRequest)); assertThat(createRollupJobResponse.get("acknowledged"), equalTo(Boolean.TRUE)); // start the rollup job final Request startRollupJobRequest = new Request("POST", "_rollup/job/rollup-job-test/_start"); + startRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); var startRollupJobResponse = responseAsMap(client().performRequest(startRollupJobRequest)); assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE)); @@ -167,6 +170,7 @@ public void testBigRollup() throws Exception { // Wait for the job to finish, by watching how many rollup docs we've indexed assertBusy(() -> { final Request getRollupJobRequest = new Request("GET", "_rollup/job/rollup-job-test"); + getRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); Response getRollupJobResponse = client().performRequest(getRollupJobRequest); assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); @@ -211,6 +215,7 @@ public void testBigRollup() throws Exception { var liveBody = responseAsMap(liveResponse); request = new Request("GET", "results-rollup/_rollup_search"); + request.setOptions(ROLLUP_REQUESTS_OPTIONS); request.setJsonEntity(jsonRequestBody); Response rollupResponse = client().performRequest(request); var rollupBody = responseAsMap(rollupResponse); @@ -223,6 +228,7 @@ public void testBigRollup() throws Exception { request = new Request("GET", "rollup-docs/_rollup_search"); request.setJsonEntity(jsonRequestBody); + request.setOptions(ROLLUP_REQUESTS_OPTIONS); Response liveRollupResponse = client().performRequest(request); var liveRollupBody = responseAsMap(liveRollupResponse); @@ -241,6 +247,7 @@ private void assertRollUpJob(final String rollupJob) throws Exception { // check that the rollup job is started using the RollUp API final Request getRollupJobRequest = new Request("GET", "_rollup/job/" + rollupJob); + getRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); var getRollupJobResponse = responseAsMap(client().performRequest(getRollupJobRequest)); Map job = getJob(getRollupJobResponse, rollupJob); if (job != null) { @@ -286,6 +293,7 @@ private void assertRollUpJob(final String rollupJob) throws Exception { private void waitForRollUpJob(final String rollupJob, String[] expectedStates) throws Exception { assertBusy(() -> { final Request getRollupJobRequest = new Request("GET", "_rollup/job/" + rollupJob); + getRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); Response getRollupJobResponse = client().performRequest(getRollupJobRequest); assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index e45571fd7056e..4edf4b0c6277a 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -47,7 +47,6 @@ buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> testDistribution = "DEFAULT" versions = [oldVersion, project.version] numberOfNodes = 3 - systemProperty 'es.queryable_built_in_roles_enabled', 'true' systemProperty 'ingest.geoip.downloader.enabled.default', 'true' //we don't want to hit real service from each test systemProperty 'ingest.geoip.downloader.endpoint.default', 'http://invalid.endpoint' diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java index 746c8c926086e..ca5fdf94e28f9 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java @@ -8,12 +8,15 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.Build; +import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.client.Node; import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.client.WarningsHandler; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.common.settings.SecureString; @@ -30,7 +33,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.time.Instant; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -184,24 +186,90 @@ public void testDataStreamValidationDoesNotBreakUpgrade() throws Exception { } public void testUpgradeDataStream() throws Exception { + /* + * This test tests upgrading a "normal" data stream (dataStreamName), and upgrading a data stream that was originally just an + * ordinary index that was converted to a data stream (dataStreamFromNonDataStreamIndices). + */ String dataStreamName = "reindex_test_data_stream"; + String dataStreamFromNonDataStreamIndices = "index_first_reindex_test_data_stream"; int numRollovers = randomIntBetween(0, 5); if (CLUSTER_TYPE == ClusterType.OLD) { createAndRolloverDataStream(dataStreamName, numRollovers); + createDataStreamFromNonDataStreamIndices(dataStreamFromNonDataStreamIndices); } else if (CLUSTER_TYPE == ClusterType.UPGRADED) { - upgradeDataStream(dataStreamName, numRollovers); + Map> oldIndicesMetadata = getIndicesMetadata(dataStreamName); + upgradeDataStream(dataStreamName, numRollovers, numRollovers + 1, 0); + upgradeDataStream(dataStreamFromNonDataStreamIndices, 0, 1, 0); + Map> upgradedIndicesMetadata = getIndicesMetadata(dataStreamName); + compareIndexMetadata(oldIndicesMetadata, upgradedIndicesMetadata); + } + } + + private void compareIndexMetadata( + Map> oldIndicesMetadata, + Map> upgradedIndicesMetadata + ) { + for (Map.Entry> upgradedIndexEntry : upgradedIndicesMetadata.entrySet()) { + String upgradedIndexName = upgradedIndexEntry.getKey(); + if (upgradedIndexName.startsWith(".migrated-")) { + String oldIndexName = "." + upgradedIndexName.substring(".migrated-".length()); + Map oldIndexMetadata = oldIndicesMetadata.get(oldIndexName); + Map upgradedIndexMetadata = upgradedIndexEntry.getValue(); + compareSettings(oldIndexMetadata, upgradedIndexMetadata); + assertThat("Mappings did not match", upgradedIndexMetadata.get("mappings"), equalTo(oldIndexMetadata.get("mappings"))); + // TODO: Uncomment the following two checks once we are correctly copying this state over: + // assertThat("ILM states did not match", upgradedIndexMetadata.get("ilm"), equalTo(oldIndexMetadata.get("ilm"))); + // assertThat( + // "Rollover info did not match", + // upgradedIndexMetadata.get("rollover_info"), + // equalTo(oldIndexMetadata.get("rollover_info")) + // ); + assertThat(upgradedIndexMetadata.get("system"), equalTo(oldIndexMetadata.get("system"))); + } + } + } + + private void compareSettings(Map oldIndexMetadata, Map upgradedIndexMetadata) { + Map oldIndexSettings = getIndexSettingsFromIndexMetadata(oldIndexMetadata); + Map upgradedIndexSettings = getIndexSettingsFromIndexMetadata(upgradedIndexMetadata); + final Set SETTINGS_TO_CHECK = Set.of( + "lifecycle", + "mode", + "routing", + "hidden", + "number_of_shards", + // "creation_date", TODO: Uncomment this once we are correctly copying over this setting + "number_of_replicas" + ); + for (String setting : SETTINGS_TO_CHECK) { + assertThat( + "Unexpected value for setting " + setting, + upgradedIndexSettings.get(setting), + equalTo(oldIndexSettings.get(setting)) + ); } } - private static void createAndRolloverDataStream(String dataStreamName, int numRollovers) throws IOException { + @SuppressWarnings("unchecked") + private Map getIndexSettingsFromIndexMetadata(Map indexMetadata) { + return (Map) ((Map) indexMetadata.get("settings")).get("index"); + } + + private void createAndRolloverDataStream(String dataStreamName, int numRollovers) throws IOException { + boolean useIlm = randomBoolean(); + if (useIlm) { + createIlmPolicy(); + } // We want to create a data stream and roll it over several times so that we have several indices to upgrade - final String template = """ + String template = """ { "settings":{ "index": { + $ILM_SETTING "mode": "time_series" } }, + $DSL_TEMPLATE "mappings":{ "dynamic_templates": [ { @@ -247,6 +315,19 @@ private static void createAndRolloverDataStream(String dataStreamName, int numRo } } """; + if (useIlm) { + template = template.replace("$ILM_SETTING", """ + "lifecycle.name": "test-lifecycle-policy", + """); + template = template.replace("$DSL_TEMPLATE", ""); + } else { + template = template.replace("$ILM_SETTING", ""); + template = template.replace("$DSL_TEMPLATE", """ + "lifecycle": { + "data_retention": "7d" + }, + """); + } final String indexTemplate = """ { "index_patterns": ["$PATTERN"], @@ -267,17 +348,186 @@ private static void createAndRolloverDataStream(String dataStreamName, int numRo } } - private void upgradeDataStream(String dataStreamName, int numRolloversOnOldCluster) throws Exception { + private static void createIlmPolicy() throws IOException { + String ilmPolicy = """ + { + "policy": { + "phases": { + "hot": { + "actions": { + "rollover": { + "max_primary_shard_size": "50kb" + } + } + }, + "warm": { + "min_age": "30d", + "actions": { + "shrink": { + "number_of_shards": 1 + }, + "forcemerge": { + "max_num_segments": 1 + } + } + } + } + } + }"""; + Request putIlmPolicyRequest = new Request("PUT", "_ilm/policy/test-lifecycle-policy"); + putIlmPolicyRequest.setJsonEntity(ilmPolicy); + assertOK(client().performRequest(putIlmPolicyRequest)); + } + + /* + * This returns a Map of index metadata for each index in the data stream, as retrieved from the cluster state. + */ + @SuppressWarnings("unchecked") + private Map> getIndicesMetadata(String dataStreamName) throws IOException { + Request getClusterStateRequest = new Request("GET", "/_cluster/state/metadata/" + dataStreamName); + Response clusterStateResponse = client().performRequest(getClusterStateRequest); + Map clusterState = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + clusterStateResponse.getEntity().getContent(), + false + ); + return ((Map>>) clusterState.get("metadata")).get("indices"); + } + + private void createDataStreamFromNonDataStreamIndices(String dataStreamFromNonDataStreamIndices) throws IOException { + /* + * This method creates an index, creates an alias to that index, and then converts the aliased index into a data stream. This is + * similar to the path that many indices (including system indices) took in versions 7/8. + */ + // First, we create an ordinary index with no @timestamp mapping: + final String templateWithNoTimestamp = """ + { + "mappings":{ + "properties": { + "message": { + "type": "text" + } + } + } + } + """; + // Note that this is not a data stream template: + final String indexTemplate = """ + { + "index_patterns": ["$PATTERN"], + "template": $TEMPLATE + }"""; + var putIndexTemplateRequest = new Request("POST", "/_index_template/reindex_test_data_stream_index_template"); + putIndexTemplateRequest.setJsonEntity( + indexTemplate.replace("$TEMPLATE", templateWithNoTimestamp).replace("$PATTERN", dataStreamFromNonDataStreamIndices + "-*") + ); + String indexName = dataStreamFromNonDataStreamIndices + "-01"; + if (minimumTransportVersion().before(TransportVersions.V_8_0_0)) { + /* + * It is not possible to create a 7.x index template with a type. And you can't create an empty index with a type. But you can + * create the index with a type by posting a document to an index with a type. We do that here so that we test that the type is + * removed when we reindex into 8.x. + */ + String typeName = "test-type"; + Request createIndexRequest = new Request("POST", indexName + "/" + typeName); + createIndexRequest.setJsonEntity(""" + { + "@timestamp": "2099-11-15T13:12:00", + "message": "GET /search HTTP/1.1 200 1070000", + "user": { + "id": "kimchy" + } + }"""); + createIndexRequest.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE).build()); + assertOK(client().performRequest(createIndexRequest)); + } + assertOK(client().performRequest(putIndexTemplateRequest)); + bulkLoadDataMissingTimestamp(indexName); + /* + * Next, we will change the index's mapping to include a @timestamp field since we are going to convert it to a data stream. But + * first we have to flush the translog to disk because adding a @timestamp field will cause errors if it is done before the translog + * is flushed: + */ + assertOK(client().performRequest(new Request("POST", indexName + "/_flush"))); + ensureHealth(indexName, (request -> { + request.addParameter("wait_for_nodes", "3"); + request.addParameter("wait_for_status", "green"); + request.addParameter("timeout", "70s"); + request.addParameter("level", "shards"); + })); + + // Updating the mapping to include @timestamp: + Request updateIndexMappingRequest = new Request("PUT", indexName + "/_mapping"); + updateIndexMappingRequest.setJsonEntity(""" + { + "properties": { + "@timestamp" : { + "type": "date" + }, + "message": { + "type": "text" + } + } + }"""); + assertOK(client().performRequest(updateIndexMappingRequest)); + + // Creating an alias with the same name that the data stream will have: + Request createAliasRequest = new Request("POST", "/_aliases"); + String aliasRequestBody = """ + { + "actions": [ + { + "add": { + "index": "$index", + "alias": "$alias" + } + } + ] + }"""; + createAliasRequest.setJsonEntity( + aliasRequestBody.replace("$index", indexName).replace("$alias", dataStreamFromNonDataStreamIndices) + ); + assertOK(client().performRequest(createAliasRequest)); + + // This is now just an aliased index. We'll convert it into a data stream + final String templateWithTimestamp = """ + { + "mappings":{ + "properties": { + "@timestamp" : { + "type": "date" + }, + "message": { + "type": "text" + } + } + } + } + """; + final String dataStreamTemplate = """ + { + "index_patterns": ["$PATTERN"], + "template": $TEMPLATE, + "data_stream": { + } + }"""; + var putDataStreamTemplateRequest = new Request("POST", "/_index_template/reindex_test_data_stream_data_stream_template"); + putDataStreamTemplateRequest.setJsonEntity( + dataStreamTemplate.replace("$TEMPLATE", templateWithTimestamp).replace("$PATTERN", dataStreamFromNonDataStreamIndices) + ); + assertOK(client().performRequest(putDataStreamTemplateRequest)); + Request migrateToDataStreamRequest = new Request("POST", "/_data_stream/_migrate/" + dataStreamFromNonDataStreamIndices); + assertOK(client().performRequest(migrateToDataStreamRequest)); + } + + @SuppressWarnings("unchecked") + private void upgradeDataStream(String dataStreamName, int numRolloversOnOldCluster, int expectedSuccessesCount, int expectedErrorCount) + throws Exception { Set indicesNeedingUpgrade = getDataStreamIndices(dataStreamName); - Set closedOldIndices = getClosedIndices(dataStreamName); final int explicitRolloverOnNewClusterCount = randomIntBetween(0, 2); for (int i = 0; i < explicitRolloverOnNewClusterCount; i++) { String oldIndexName = rollover(dataStreamName); if (randomBoolean()) { - if (i == 0) { - // Since this is the first rollover on the new cluster, the old index came from the old cluster - closedOldIndices.add(oldIndexName); - } closeIndex(oldIndexName); } } @@ -305,39 +555,54 @@ private void upgradeDataStream(String dataStreamName, int numRolloversOnOldClust statusResponse.getEntity().getContent(), false ); + String statusResponseString = statusResponseMap.keySet() + .stream() + .map(key -> key + "=" + statusResponseMap.get(key)) + .collect(Collectors.joining(", ", "{", "}")); assertOK(statusResponse); - assertThat(statusResponseMap.get("complete"), equalTo(true)); + assertThat(statusResponseString, statusResponseMap.get("complete"), equalTo(true)); final int originalWriteIndex = 1; if (isOriginalClusterSameMajorVersionAsCurrent()) { assertThat( + statusResponseString, statusResponseMap.get("total_indices_in_data_stream"), equalTo(originalWriteIndex + numRolloversOnOldCluster + explicitRolloverOnNewClusterCount) ); // If the original cluster was the same as this one, we don't want any indices reindexed: - assertThat(statusResponseMap.get("total_indices_requiring_upgrade"), equalTo(0)); - assertThat(statusResponseMap.get("successes"), equalTo(0)); + assertThat(statusResponseString, statusResponseMap.get("total_indices_requiring_upgrade"), equalTo(0)); + assertThat(statusResponseString, statusResponseMap.get("successes"), equalTo(0)); } else { // The number of rollovers that will have happened when we call reindex: final int rolloversPerformedByReindex = explicitRolloverOnNewClusterCount == 0 ? 1 : 0; final int expectedTotalIndicesInDataStream = originalWriteIndex + numRolloversOnOldCluster + explicitRolloverOnNewClusterCount + rolloversPerformedByReindex; - assertThat(statusResponseMap.get("total_indices_in_data_stream"), equalTo(expectedTotalIndicesInDataStream)); + assertThat( + statusResponseString, + statusResponseMap.get("total_indices_in_data_stream"), + equalTo(expectedTotalIndicesInDataStream) + ); /* * total_indices_requiring_upgrade is made up of: (the original write index) + numRolloversOnOldCluster. The number of * rollovers on the upgraded cluster is irrelevant since those will not be reindexed. */ assertThat( + statusResponseString, statusResponseMap.get("total_indices_requiring_upgrade"), - equalTo(originalWriteIndex + numRolloversOnOldCluster - closedOldIndices.size()) + equalTo(originalWriteIndex + numRolloversOnOldCluster) ); - assertThat(statusResponseMap.get("successes"), equalTo(numRolloversOnOldCluster + 1 - closedOldIndices.size())); + assertThat(statusResponseString, statusResponseMap.get("successes"), equalTo(expectedSuccessesCount)); // We expect all the original indices to have been deleted - for (String oldIndex : indicesNeedingUpgrade) { - if (closedOldIndices.contains(oldIndex) == false) { - assertThat(indexExists(oldIndex), equalTo(false)); + if (expectedErrorCount == 0) { + for (String oldIndex : indicesNeedingUpgrade) { + assertThat(statusResponseString, indexExists(oldIndex), equalTo(false)); } } - assertThat(getDataStreamIndices(dataStreamName).size(), equalTo(expectedTotalIndicesInDataStream)); + assertThat( + statusResponseString, + getDataStreamIndices(dataStreamName).size(), + equalTo(expectedTotalIndicesInDataStream) + ); + assertThat(statusResponseString, ((List) statusResponseMap.get("errors")).size(), equalTo(expectedErrorCount)); } }, 60, TimeUnit.SECONDS); Request cancelRequest = new Request("POST", "_migration/reindex/" + dataStreamName + "/_cancel"); @@ -356,29 +621,6 @@ private Set getDataStreamIndices(String dataStreamName) throws IOExcepti return indices.stream().map(index -> index.get("index_name").toString()).collect(Collectors.toSet()); } - @SuppressWarnings("unchecked") - private Set getClosedIndices(String dataStreamName) throws IOException { - Set allIndices = getDataStreamIndices(dataStreamName); - Set closedIndices = new HashSet<>(); - Response response = client().performRequest(new Request("GET", "_cluster/state/blocks/indices")); - Map responseMap = XContentHelper.convertToMap(JsonXContent.jsonXContent, response.getEntity().getContent(), false); - Map blocks = (Map) responseMap.get("blocks"); - Map indices = (Map) blocks.get("indices"); - for (Map.Entry indexEntry : indices.entrySet()) { - String indexName = indexEntry.getKey(); - if (allIndices.contains(indexName)) { - Map blocksForIndex = (Map) indexEntry.getValue(); - for (Map.Entry blockEntry : blocksForIndex.entrySet()) { - Map block = (Map) blockEntry.getValue(); - if ("index closed".equals(block.get("description"))) { - closedIndices.add(indexName); - } - } - } - } - return closedIndices; - } - /* * Similar to isOriginalClusterCurrent, but returns true if the major versions of the clusters are the same. So true * for 8.6 and 8.17, but false for 7.17 and 8.18. @@ -416,6 +658,26 @@ private static void bulkLoadData(String dataStreamName) throws IOException { assertOK(response); } + /* + * This bulkloads data, where some documents have no @timestamp field and some do. + */ + private static void bulkLoadDataMissingTimestamp(String dataStreamName) throws IOException { + final String bulk = """ + {"create": {}} + {"metricset": "pod", "k8s": {"pod": {"name": "cat", "network": {"tx": 2001818691, "rx": 802133794}}}} + {"create": {}} + {"metricset": "pod", "k8s": {"pod": {"name": "hamster", "network": {"tx": 2005177954, "rx": 801479970}}}} + {"create": {}} + {"metricset": "pod", "k8s": {"pod": {"name": "cow", "network": {"tx": 2006223737, "rx": 802337279}}}} + {"create": {}} + {"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "rat", "network": {"tx": 2012916202, "rx": 803685721}}}} + """; + var bulkRequest = new Request("POST", "/" + dataStreamName + "/_bulk"); + bulkRequest.setJsonEntity(bulk.replace("$now", formatInstant(Instant.now()))); + var response = client().performRequest(bulkRequest); + assertOK(response); + } + static String formatInstant(Instant instant) { return DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(instant); } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java index f9d28670dab65..78f6bcd8ac9ab 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java @@ -65,7 +65,6 @@ protected static void waitForPendingUpgraderTasks() throws Exception { * The purpose of this test is to ensure that when a job is open through a rolling upgrade we upgrade the results * index mappings when it is assigned to an upgraded node even if no other ML endpoint is called after the upgrade */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/98560") public void testSnapshotUpgrader() throws Exception { Request adjustLoggingLevels = new Request("PUT", "/_cluster/settings"); adjustLoggingLevels.setJsonEntity(""" @@ -98,6 +97,13 @@ public void testSnapshotUpgrader() throws Exception { @SuppressWarnings("unchecked") private void testSnapshotUpgradeFailsOnMixedCluster() throws Exception { + // TODO the mixed cluster assertions sometimes fail because the code that + // detects the mixed cluster relies on the transport versions being different. + // This assumption does not hold immediately after a version bump and new + // branch being cut as the new branch will have the same transport version + // See https://github.com/elastic/elasticsearch/issues/98560 + + assumeTrue("The mixed cluster is not always detected correctly, see https://github.com/elastic/elasticsearch/issues/98560", false); Map jobs = entityAsMap(getJob(JOB_ID)); String currentSnapshot = ((List) XContentMapValues.extractValue("jobs.model_snapshot_id", jobs)).get(0); @@ -154,7 +160,7 @@ private void testSnapshotUpgrade() throws Exception { List> upgradedSnapshot = (List>) entityAsMap(getModelSnapshots(JOB_ID, snapshotToUpgradeId)) .get("model_snapshots"); - assertThat(upgradedSnapshot, hasSize(1)); + assertThat(upgradedSnapshot.toString(), upgradedSnapshot, hasSize(1)); assertThat(upgradedSnapshot.get(0).get("latest_record_time_stamp"), equalTo(snapshotToUpgrade.get("latest_record_time_stamp"))); // Does the snapshot still work? @@ -338,7 +344,23 @@ protected Response postData(String jobId, String data) throws IOException { } protected void flushJob(String jobId) throws IOException { - client().performRequest(new Request("POST", "/_ml/anomaly_detectors/" + jobId + "/_flush")); + // Flush job is deprecated, so a deprecation warning is possible (depending on the old version) + RequestOptions flushOptions = RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warnings -> { + if (warnings.isEmpty()) { + // No warning is OK - it means we hit an old node where flush is not deprecated + return false; + } else if (warnings.size() > 1) { + return true; + } + return warnings.get(0) + .equals( + "Forcing any buffered data to be processed is deprecated, " + + "in a future major version it will be compulsory to use a datafeed" + ) == false; + }).build(); + Request flushRequest = new Request("POST", "/_ml/anomaly_detectors/" + jobId + "/_flush"); + flushRequest.setOptions(flushOptions); + client().performRequest(flushRequest); } private void closeJob(String jobId) throws IOException { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java index c8eaa9e78f2ef..cde88089d4330 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java @@ -24,8 +24,12 @@ import java.util.stream.Stream; import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; public class MlMappingsUpgradeIT extends AbstractUpgradeTestCase { @@ -66,6 +70,7 @@ public void testMappingsUpgrade() throws Exception { assertUpgradedConfigMappings(); assertMlLegacyTemplatesDeleted(); IndexMappingTemplateAsserter.assertMlMappingsMatchTemplates(client()); + assertNotificationsIndexAliasCreated(); break; default: throw new UnsupportedOperationException("Unknown cluster type [" + CLUSTER_TYPE + "]"); @@ -237,4 +242,22 @@ private void assertUpgradedConfigMappings() throws Exception { ); }); } + + @SuppressWarnings("unchecked") + private void assertNotificationsIndexAliasCreated() throws Exception { + assertBusy(() -> { + Request getMappings = new Request("GET", "_alias/.ml-notifications-write"); + Response response = client().performRequest(getMappings); + Map responseMap = entityAsMap(response); + assertThat(responseMap.entrySet(), hasSize(1)); + var aliases = (Map) responseMap.get(".ml-notifications-000002"); + assertThat(aliases.entrySet(), hasSize(1)); + var allAliases = (Map) aliases.get("aliases"); + var writeAlias = (Map) allAliases.get(".ml-notifications-write"); + + assertThat(writeAlias, hasEntry("is_hidden", Boolean.TRUE)); + var isWriteIndex = (Boolean) writeAlias.get("is_write_index"); + assertThat(isWriteIndex, anyOf(is(Boolean.TRUE), nullValue())); + }); + } } diff --git a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHashTests.java b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHashTests.java index 95c3fd4fde916..a5330d3daf92f 100644 --- a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHashTests.java +++ b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/security/enrollment/tool/AutoConfigGenerateElasticPasswordHashTests.java @@ -97,18 +97,18 @@ protected Environment createEnv(OptionSet options, ProcessInfo processInfo) thro public void testSuccessfullyGenerateAndStoreHash() throws Exception { execute(); assertThat(terminal.getOutput(), hasLength(20)); - KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(env.configFile()); + KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(env.configDir()); assertNotNull(keyStoreWrapper); keyStoreWrapper.decrypt(new char[0]); assertThat(keyStoreWrapper.getSettingNames(), containsInAnyOrder(AUTOCONFIG_ELASTIC_PASSWORD_HASH.getKey(), "keystore.seed")); } public void testExistingKeystoreWithWrongPassword() throws Exception { - KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(env.configFile()); + KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(env.configDir()); assertNotNull(keyStoreWrapper); keyStoreWrapper.decrypt(new char[0]); // set a random password so that we fail to decrypt it in GenerateElasticPasswordHash#execute - keyStoreWrapper.save(env.configFile(), randomAlphaOfLength(16).toCharArray()); + keyStoreWrapper.save(env.configDir(), randomAlphaOfLength(16).toCharArray()); UserException e = expectThrows(UserException.class, this::execute); assertThat(e.getMessage(), equalTo("Failed to generate a password for the elastic user")); assertThat(terminal.getOutput(), is(emptyString()));