diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command index 0c0ede8c3a076..a886220c84cda 100644 --- a/.buildkite/hooks/pre-command +++ b/.buildkite/hooks/pre-command @@ -109,3 +109,11 @@ EOF Agent information from gobld EOF fi + +# Amazon Linux 2 has DNS resolution issues with resource-based hostnames in EC2 +# We have many functional tests that try to lookup and resolve the hostname of the local machine in a particular way +# And they fail. This sets up a manual entry for the hostname in dnsmasq. +if [[ -f /etc/os-release ]] && grep -q '"Amazon Linux 2"' /etc/os-release; then + echo "$(hostname -i | cut -d' ' -f 2) $(hostname -f)." | sudo tee /etc/dnsmasq.hosts + sudo systemctl restart dnsmasq.service +fi diff --git a/.buildkite/pipelines/dra-workflow.yml b/.buildkite/pipelines/dra-workflow.yml index bcc6c9c57d756..25477c8541fa9 100644 --- a/.buildkite/pipelines/dra-workflow.yml +++ b/.buildkite/pipelines/dra-workflow.yml @@ -6,7 +6,8 @@ steps: provider: gcp image: family/elasticsearch-ubuntu-2204 machineType: custom-32-98304 - buildDirectory: /dev/shm/bk + localSsds: 1 + localSsdInterface: nvme diskSizeGb: 350 - wait # The hadoop build depends on the ES artifact diff --git a/.buildkite/pipelines/intake.template.yml b/.buildkite/pipelines/intake.template.yml index 1a513971b2c10..f530f237113a9 100644 --- a/.buildkite/pipelines/intake.template.yml +++ b/.buildkite/pipelines/intake.template.yml @@ -7,7 +7,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - wait - label: part1 command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart1 @@ -17,7 +16,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: part2 command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart2 timeout_in_minutes: 300 @@ -26,7 +24,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: part3 command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart3 timeout_in_minutes: 300 @@ -35,7 +32,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: part4 command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart4 timeout_in_minutes: 300 @@ -44,7 +40,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: part5 command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart5 timeout_in_minutes: 300 @@ -53,7 +48,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - group: bwc-snapshots steps: - label: "{{matrix.BWC_VERSION}} / bwc-snapshots" @@ -67,7 +61,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: "{{matrix.BWC_VERSION}}" - label: rest-compat @@ -78,7 +71,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - wait - trigger: elasticsearch-dra-workflow label: Trigger DRA snapshot workflow diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index a6af8bd35c7a0..1bb13c4c10966 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -8,7 +8,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - wait - label: part1 command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart1 @@ -18,7 +17,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: part2 command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart2 timeout_in_minutes: 300 @@ -27,7 +25,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: part3 command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart3 timeout_in_minutes: 300 @@ -36,7 +33,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: part4 command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart4 timeout_in_minutes: 300 @@ -45,7 +41,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: part5 command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart5 timeout_in_minutes: 300 @@ -54,7 +49,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - group: bwc-snapshots steps: - label: "{{matrix.BWC_VERSION}} / bwc-snapshots" @@ -68,7 +62,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: "{{matrix.BWC_VERSION}}" - label: rest-compat @@ -79,11 +72,11 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - wait - trigger: elasticsearch-dra-workflow label: Trigger DRA snapshot workflow async: true + branches: "main 8.* 7.17" build: branch: "$BUILDKITE_BRANCH" commit: "$BUILDKITE_COMMIT" diff --git a/.buildkite/pipelines/lucene-snapshot/build-snapshot.yml b/.buildkite/pipelines/lucene-snapshot/build-snapshot.yml index 1f69b8faa7ab4..8cf2a8aacbece 100644 --- a/.buildkite/pipelines/lucene-snapshot/build-snapshot.yml +++ b/.buildkite/pipelines/lucene-snapshot/build-snapshot.yml @@ -15,7 +15,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - wait - trigger: "elasticsearch-lucene-snapshot-tests" build: diff --git a/.buildkite/pipelines/lucene-snapshot/run-tests.yml b/.buildkite/pipelines/lucene-snapshot/run-tests.yml index 49c3396488d82..c76c54a56494e 100644 --- a/.buildkite/pipelines/lucene-snapshot/run-tests.yml +++ b/.buildkite/pipelines/lucene-snapshot/run-tests.yml @@ -7,7 +7,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - wait: null - label: part1 command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart1 @@ -17,7 +16,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: part2 command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart2 timeout_in_minutes: 300 @@ -26,7 +24,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: part3 command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart3 timeout_in_minutes: 300 @@ -35,7 +32,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: part4 command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart4 timeout_in_minutes: 300 @@ -44,7 +40,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: part5 command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart5 timeout_in_minutes: 300 @@ -53,7 +48,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - group: bwc-snapshots steps: - label: "{{matrix.BWC_VERSION}} / bwc-snapshots" @@ -70,7 +64,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: "{{matrix.BWC_VERSION}}" - label: rest-compat @@ -81,4 +74,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/periodic-packaging.bwc.template.yml b/.buildkite/pipelines/periodic-packaging.bwc.template.yml index 8a6fa2553b204..b06bc80d3535d 100644 --- a/.buildkite/pipelines/periodic-packaging.bwc.template.yml +++ b/.buildkite/pipelines/periodic-packaging.bwc.template.yml @@ -11,6 +11,5 @@ image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: $BWC_VERSION diff --git a/.buildkite/pipelines/periodic-packaging.template.yml b/.buildkite/pipelines/periodic-packaging.template.yml index 14a2fd7ba1bc4..e0da1f46486ea 100644 --- a/.buildkite/pipelines/periodic-packaging.template.yml +++ b/.buildkite/pipelines/periodic-packaging.template.yml @@ -3,7 +3,7 @@ steps: steps: - label: "{{matrix.image}} / packaging-tests-unix" command: ./.ci/scripts/packaging-test.sh destructivePackagingTest - timeout_in_minutes: 300 + timeout_in_minutes: 420 matrix: setup: image: diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 115873552e056..5a05d75cf95ac 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -4,7 +4,7 @@ steps: steps: - label: "{{matrix.image}} / packaging-tests-unix" command: ./.ci/scripts/packaging-test.sh destructivePackagingTest - timeout_in_minutes: 300 + timeout_in_minutes: 420 matrix: setup: image: @@ -44,7 +44,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 8.0.1 @@ -61,7 +60,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 8.1.3 @@ -78,7 +76,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 8.2.3 @@ -95,7 +92,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 8.3.3 @@ -112,7 +108,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 8.4.3 @@ -129,7 +124,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 8.5.3 @@ -146,7 +140,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 8.6.2 @@ -163,7 +156,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 8.7.1 @@ -180,7 +172,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 8.8.2 @@ -197,7 +188,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 8.9.2 @@ -214,7 +204,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 8.10.4 @@ -231,7 +220,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 8.11.4 @@ -248,7 +236,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 8.12.2 @@ -265,7 +252,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 8.13.4 @@ -282,7 +268,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 8.14.3 @@ -299,7 +284,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 8.15.3 @@ -316,7 +300,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 8.16.0 @@ -333,7 +316,6 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: 9.0.0 diff --git a/.buildkite/pipelines/periodic-platform-support.yml b/.buildkite/pipelines/periodic-platform-support.yml index 29feb5b832ee2..86e0623ba5b87 100644 --- a/.buildkite/pipelines/periodic-platform-support.yml +++ b/.buildkite/pipelines/periodic-platform-support.yml @@ -28,7 +28,6 @@ steps: localSsds: 1 localSsdInterface: nvme machineType: custom-32-98304 - diskSizeGb: 250 env: {} - group: platform-support-windows steps: diff --git a/.buildkite/pipelines/periodic.bwc.template.yml b/.buildkite/pipelines/periodic.bwc.template.yml index b22270dbf221c..43a0a7438d656 100644 --- a/.buildkite/pipelines/periodic.bwc.template.yml +++ b/.buildkite/pipelines/periodic.bwc.template.yml @@ -7,7 +7,6 @@ machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: $BWC_VERSION retry: diff --git a/.buildkite/pipelines/periodic.template.yml b/.buildkite/pipelines/periodic.template.yml index 08648e2eedd3c..201c34058a409 100644 --- a/.buildkite/pipelines/periodic.template.yml +++ b/.buildkite/pipelines/periodic.template.yml @@ -25,7 +25,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: example-plugins command: |- cd $$WORKSPACE/plugins/examples @@ -37,7 +36,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - group: java-fips-matrix steps: - label: "{{matrix.ES_RUNTIME_JAVA}} / {{matrix.GRADLE_TASK}} / java-fips-matrix" @@ -59,7 +57,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: ES_RUNTIME_JAVA: "{{matrix.ES_RUNTIME_JAVA}}" GRADLE_TASK: "{{matrix.GRADLE_TASK}}" @@ -76,7 +73,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: ES_RUNTIME_JAVA: "{{matrix.ES_RUNTIME_JAVA}}" BWC_VERSION: "{{matrix.BWC_VERSION}}" @@ -102,7 +98,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: ES_RUNTIME_JAVA: "{{matrix.ES_RUNTIME_JAVA}}" GRADLE_TASK: "{{matrix.GRADLE_TASK}}" @@ -120,7 +115,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: ES_RUNTIME_JAVA: "{{matrix.ES_RUNTIME_JAVA}}" BWC_VERSION: "{{matrix.BWC_VERSION}}" @@ -156,7 +150,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: third-party / azure command: | export azure_storage_container=elasticsearch-ci-thirdparty @@ -171,7 +164,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: third-party / gcs command: | export google_storage_bucket=elasticsearch-ci-thirdparty @@ -186,7 +178,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: third-party / geoip command: | .ci/scripts/run-gradle.sh :modules:ingest-geoip:internalClusterTest -Dtests.jvm.argline="-Dgeoip_use_service=true" @@ -196,7 +187,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: third-party / s3 command: | export amazon_s3_bucket=elasticsearch-ci.us-west-2 @@ -211,7 +201,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: Upload Snyk Dependency Graph command: .ci/scripts/run-gradle.sh uploadSnykDependencyGraph -PsnykTargetReference=$BUILDKITE_BRANCH env: @@ -222,8 +211,7 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - if: build.branch == "main" || build.branch == "7.17" + if: build.branch == "main" || build.branch == "8.x" || build.branch == "7.17" - label: check-branch-consistency command: .ci/scripts/run-gradle.sh branchConsistency timeout_in_minutes: 15 @@ -231,7 +219,6 @@ steps: provider: gcp image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-2 - diskSizeGb: 250 - label: check-branch-protection-rules command: .buildkite/scripts/branch-protection.sh timeout_in_minutes: 5 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 8e35d1561b6d7..cbca7f820c7b7 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -11,7 +11,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 8.0.1 retry: @@ -31,7 +30,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 8.1.3 retry: @@ -51,7 +49,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 8.2.3 retry: @@ -71,7 +68,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 8.3.3 retry: @@ -91,7 +87,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 8.4.3 retry: @@ -111,7 +106,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 8.5.3 retry: @@ -131,7 +125,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 8.6.2 retry: @@ -151,7 +144,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 8.7.1 retry: @@ -171,7 +163,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 8.8.2 retry: @@ -191,7 +182,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 8.9.2 retry: @@ -211,7 +201,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 8.10.4 retry: @@ -231,7 +220,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 8.11.4 retry: @@ -251,7 +239,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 8.12.2 retry: @@ -271,7 +258,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 8.13.4 retry: @@ -291,7 +277,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 8.14.3 retry: @@ -311,7 +296,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 8.15.3 retry: @@ -331,7 +315,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 8.16.0 retry: @@ -351,7 +334,6 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk preemptible: true - diskSizeGb: 250 env: BWC_VERSION: 9.0.0 retry: @@ -386,7 +368,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: example-plugins command: |- cd $$WORKSPACE/plugins/examples @@ -398,7 +379,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - group: java-fips-matrix steps: - label: "{{matrix.ES_RUNTIME_JAVA}} / {{matrix.GRADLE_TASK}} / java-fips-matrix" @@ -420,7 +400,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: ES_RUNTIME_JAVA: "{{matrix.ES_RUNTIME_JAVA}}" GRADLE_TASK: "{{matrix.GRADLE_TASK}}" @@ -437,7 +416,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: ES_RUNTIME_JAVA: "{{matrix.ES_RUNTIME_JAVA}}" BWC_VERSION: "{{matrix.BWC_VERSION}}" @@ -463,7 +441,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: ES_RUNTIME_JAVA: "{{matrix.ES_RUNTIME_JAVA}}" GRADLE_TASK: "{{matrix.GRADLE_TASK}}" @@ -481,7 +458,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: ES_RUNTIME_JAVA: "{{matrix.ES_RUNTIME_JAVA}}" BWC_VERSION: "{{matrix.BWC_VERSION}}" @@ -517,7 +493,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: third-party / azure command: | export azure_storage_container=elasticsearch-ci-thirdparty @@ -532,7 +507,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: third-party / gcs command: | export google_storage_bucket=elasticsearch-ci-thirdparty @@ -547,7 +521,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: third-party / geoip command: | .ci/scripts/run-gradle.sh :modules:ingest-geoip:internalClusterTest -Dtests.jvm.argline="-Dgeoip_use_service=true" @@ -557,7 +530,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: third-party / s3 command: | export amazon_s3_bucket=elasticsearch-ci.us-west-2 @@ -572,7 +544,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk - diskSizeGb: 250 - label: Upload Snyk Dependency Graph command: .ci/scripts/run-gradle.sh uploadSnykDependencyGraph -PsnykTargetReference=$BUILDKITE_BRANCH env: @@ -583,7 +554,6 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk - diskSizeGb: 250 if: build.branch == "main" || build.branch == "7.17" - label: check-branch-consistency command: .ci/scripts/run-gradle.sh branchConsistency @@ -592,7 +562,6 @@ steps: provider: gcp image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-2 - diskSizeGb: 250 - label: check-branch-protection-rules command: .buildkite/scripts/branch-protection.sh timeout_in_minutes: 5 diff --git a/.buildkite/pipelines/pull-request/build-benchmark.yml b/.buildkite/pipelines/pull-request/build-benchmark.yml index 96330bee03638..8d3215b8393ce 100644 --- a/.buildkite/pipelines/pull-request/build-benchmark.yml +++ b/.buildkite/pipelines/pull-request/build-benchmark.yml @@ -22,4 +22,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/bwc-snapshots.yml b/.buildkite/pipelines/pull-request/bwc-snapshots.yml index 8f59e593b286f..5a9fc2d938ac0 100644 --- a/.buildkite/pipelines/pull-request/bwc-snapshots.yml +++ b/.buildkite/pipelines/pull-request/bwc-snapshots.yml @@ -18,4 +18,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n1-standard-32 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/cloud-deploy.yml b/.buildkite/pipelines/pull-request/cloud-deploy.yml index 2932f874c5cf8..ce8e8206d51ff 100644 --- a/.buildkite/pipelines/pull-request/cloud-deploy.yml +++ b/.buildkite/pipelines/pull-request/cloud-deploy.yml @@ -11,4 +11,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/docs-check.yml b/.buildkite/pipelines/pull-request/docs-check.yml index 3bf1e43697a7c..2201eb2d1e4ea 100644 --- a/.buildkite/pipelines/pull-request/docs-check.yml +++ b/.buildkite/pipelines/pull-request/docs-check.yml @@ -12,4 +12,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/eql-correctness.yml b/.buildkite/pipelines/pull-request/eql-correctness.yml index d85827d10e886..8f7ca6942c0e9 100644 --- a/.buildkite/pipelines/pull-request/eql-correctness.yml +++ b/.buildkite/pipelines/pull-request/eql-correctness.yml @@ -7,4 +7,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/example-plugins.yml b/.buildkite/pipelines/pull-request/example-plugins.yml index fb4a17fb214cb..18d0de6594980 100644 --- a/.buildkite/pipelines/pull-request/example-plugins.yml +++ b/.buildkite/pipelines/pull-request/example-plugins.yml @@ -16,4 +16,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/full-bwc.yml b/.buildkite/pipelines/pull-request/full-bwc.yml index c404069bd0e60..d3fa8eccaf7d9 100644 --- a/.buildkite/pipelines/pull-request/full-bwc.yml +++ b/.buildkite/pipelines/pull-request/full-bwc.yml @@ -13,4 +13,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/packaging-upgrade-tests.yml b/.buildkite/pipelines/pull-request/packaging-upgrade-tests.yml index 970dafbb28647..c62cf23310422 100644 --- a/.buildkite/pipelines/pull-request/packaging-upgrade-tests.yml +++ b/.buildkite/pipelines/pull-request/packaging-upgrade-tests.yml @@ -18,6 +18,5 @@ steps: image: family/elasticsearch-{{matrix.image}} machineType: custom-16-32768 buildDirectory: /dev/shm/bk - diskSizeGb: 250 env: BWC_VERSION: $BWC_VERSION diff --git a/.buildkite/pipelines/pull-request/part-1-fips.yml b/.buildkite/pipelines/pull-request/part-1-fips.yml index 99544e7f5a80b..42f930c1bde9a 100644 --- a/.buildkite/pipelines/pull-request/part-1-fips.yml +++ b/.buildkite/pipelines/pull-request/part-1-fips.yml @@ -9,4 +9,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/part-1.yml b/.buildkite/pipelines/pull-request/part-1.yml index b4b9d5469ec41..3d467c6c41e43 100644 --- a/.buildkite/pipelines/pull-request/part-1.yml +++ b/.buildkite/pipelines/pull-request/part-1.yml @@ -7,4 +7,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/part-2-fips.yml b/.buildkite/pipelines/pull-request/part-2-fips.yml index 36a9801547d78..6a3647ceb50ae 100644 --- a/.buildkite/pipelines/pull-request/part-2-fips.yml +++ b/.buildkite/pipelines/pull-request/part-2-fips.yml @@ -9,4 +9,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/part-2.yml b/.buildkite/pipelines/pull-request/part-2.yml index 12bd78cf895fd..43de69bbcd945 100644 --- a/.buildkite/pipelines/pull-request/part-2.yml +++ b/.buildkite/pipelines/pull-request/part-2.yml @@ -7,4 +7,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/part-3-fips.yml b/.buildkite/pipelines/pull-request/part-3-fips.yml index 4a2df3026e782..cee3ea153acb9 100644 --- a/.buildkite/pipelines/pull-request/part-3-fips.yml +++ b/.buildkite/pipelines/pull-request/part-3-fips.yml @@ -9,4 +9,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/part-3.yml b/.buildkite/pipelines/pull-request/part-3.yml index 6991c05da85c6..12abae7634822 100644 --- a/.buildkite/pipelines/pull-request/part-3.yml +++ b/.buildkite/pipelines/pull-request/part-3.yml @@ -9,4 +9,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/part-4-fips.yml b/.buildkite/pipelines/pull-request/part-4-fips.yml index 734f8af816895..11a50456ca4c0 100644 --- a/.buildkite/pipelines/pull-request/part-4-fips.yml +++ b/.buildkite/pipelines/pull-request/part-4-fips.yml @@ -9,4 +9,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/part-4.yml b/.buildkite/pipelines/pull-request/part-4.yml index 59f2f2898a590..af11f08953d07 100644 --- a/.buildkite/pipelines/pull-request/part-4.yml +++ b/.buildkite/pipelines/pull-request/part-4.yml @@ -9,4 +9,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/part-5-fips.yml b/.buildkite/pipelines/pull-request/part-5-fips.yml index 801b812bb99c0..4e193ac751086 100644 --- a/.buildkite/pipelines/pull-request/part-5-fips.yml +++ b/.buildkite/pipelines/pull-request/part-5-fips.yml @@ -9,4 +9,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/part-5.yml b/.buildkite/pipelines/pull-request/part-5.yml index c7e50631d1cdd..306ce7533d0ed 100644 --- a/.buildkite/pipelines/pull-request/part-5.yml +++ b/.buildkite/pipelines/pull-request/part-5.yml @@ -9,4 +9,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/precommit.yml b/.buildkite/pipelines/pull-request/precommit.yml index 8d1458b1b60c8..f6548dfeed9b2 100644 --- a/.buildkite/pipelines/pull-request/precommit.yml +++ b/.buildkite/pipelines/pull-request/precommit.yml @@ -10,4 +10,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/rest-compatibility.yml b/.buildkite/pipelines/pull-request/rest-compatibility.yml index 16144a2a0780f..a69810e23d960 100644 --- a/.buildkite/pipelines/pull-request/rest-compatibility.yml +++ b/.buildkite/pipelines/pull-request/rest-compatibility.yml @@ -9,4 +9,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/.buildkite/pipelines/pull-request/validate-changelogs.yml b/.buildkite/pipelines/pull-request/validate-changelogs.yml index 296ef11637118..9451d321a9b39 100644 --- a/.buildkite/pipelines/pull-request/validate-changelogs.yml +++ b/.buildkite/pipelines/pull-request/validate-changelogs.yml @@ -7,4 +7,3 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk - diskSizeGb: 250 diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/DecodeBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/DecodeBenchmark.java index 284324b3d9206..b8f0a11e21c8f 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/DecodeBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/codec/tsdb/internal/DecodeBenchmark.java @@ -12,6 +12,7 @@ import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.store.DataOutput; +import org.elasticsearch.index.codec.tsdb.DocValuesForUtil; import org.openjdk.jmh.infra.Blackhole; import java.io.IOException; @@ -43,7 +44,7 @@ public void setupInvocation(int bitsPerValue) { @Override public void benchmark(int bitsPerValue, Blackhole bh) throws IOException { - forUtil.decode(bitsPerValue, this.dataInput, this.output); + DocValuesForUtil.decode(bitsPerValue, this.dataInput, this.output); bh.consume(this.output); } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index ed1689cfb0eb9..793ff6049e10e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -33,7 +33,11 @@ public enum DockerBase { "docker.elastic.co/wolfi/chainguard-base:latest@sha256:c16d3ad6cebf387e8dd2ad769f54320c4819fbbaa21e729fad087c7ae223b4d0", "-wolfi", "apk" - ); + ), + + // Based on WOLFI above, with more extras. We don't set a base image because + // we programmatically extend from the Wolfi image. + WOLFI_ESS(null, "-wolfi-ess", "apk"); private final String image; private final String suffix; diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java index 19309fe2da8a3..6b93ea10283ae 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java @@ -181,6 +181,9 @@ private static String distributionProjectName(ElasticsearchDistribution distribu if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_WOLFI) { return projectName + "wolfi-docker" + archString + "-export"; } + if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_WOLFI_ESS) { + return projectName + "wolfi-ess-docker" + archString + "-export"; + } return projectName + distribution.getType().getName(); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerWolfiEssElasticsearchDistributionType.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerWolfiEssElasticsearchDistributionType.java new file mode 100644 index 0000000000000..550c43d43a536 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerWolfiEssElasticsearchDistributionType.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.distribution; + +import org.elasticsearch.gradle.ElasticsearchDistributionType; + +public class DockerWolfiEssElasticsearchDistributionType implements ElasticsearchDistributionType { + + DockerWolfiEssElasticsearchDistributionType() {} + + @Override + public String getName() { + return "dockerWolfiEss"; + } + + @Override + public boolean isDocker() { + return true; + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java index ba0e76b3f5b99..077a47041861f 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java @@ -22,6 +22,7 @@ public class InternalElasticsearchDistributionTypes { public static ElasticsearchDistributionType DOCKER_CLOUD = new DockerCloudElasticsearchDistributionType(); public static ElasticsearchDistributionType DOCKER_CLOUD_ESS = new DockerCloudEssElasticsearchDistributionType(); public static ElasticsearchDistributionType DOCKER_WOLFI = new DockerWolfiElasticsearchDistributionType(); + public static ElasticsearchDistributionType DOCKER_WOLFI_ESS = new DockerWolfiEssElasticsearchDistributionType(); public static List ALL_INTERNAL = List.of( DEB, @@ -31,6 +32,7 @@ public class InternalElasticsearchDistributionTypes { DOCKER_IRONBANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, - DOCKER_WOLFI + DOCKER_WOLFI, + DOCKER_WOLFI_ESS ); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java index 77ab9557eac33..cc852e615726a 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java @@ -54,6 +54,7 @@ import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_IRONBANK; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_UBI; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_WOLFI; +import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_WOLFI_ESS; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.RPM; /** @@ -152,6 +153,7 @@ private static Map> lifecycleTask lifecyleTasks.put(DOCKER_CLOUD, project.getTasks().register(taskPrefix + ".docker-cloud")); lifecyleTasks.put(DOCKER_CLOUD_ESS, project.getTasks().register(taskPrefix + ".docker-cloud-ess")); lifecyleTasks.put(DOCKER_WOLFI, project.getTasks().register(taskPrefix + ".docker-wolfi")); + lifecyleTasks.put(DOCKER_WOLFI_ESS, project.getTasks().register(taskPrefix + ".docker-wolfi-ess")); lifecyleTasks.put(ARCHIVE, project.getTasks().register(taskPrefix + ".archives")); lifecyleTasks.put(DEB, project.getTasks().register(taskPrefix + ".packages")); lifecyleTasks.put(RPM, lifecyleTasks.get(DEB)); diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index ac75a3a968ed1..169c187ef115a 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -30,7 +30,7 @@ httpcore = 4.4.13 httpasyncclient = 4.1.5 commonslogging = 1.2 commonscodec = 1.15 -protobuf = 3.21.9 +protobuf = 3.25.5 # test dependencies randomizedrunner = 2.8.0 diff --git a/distribution/docker/README.md b/distribution/docker/README.md index eb0e7b296097d..28e6ff314d91a 100644 --- a/distribution/docker/README.md +++ b/distribution/docker/README.md @@ -7,6 +7,17 @@ the [DockerBase] enum. * UBI - the same as the default image, but based upon [RedHat's UBI images][ubi], specifically their minimal flavour. * Wolfi - the same as the default image, but based upon [Wolfi](https://github.com/wolfi-dev) + * Wolfi ESS - this directly extends the Wolfi image, and adds all ES plugins + that the ES build generates in an archive directory. It also sets an + environment variable that points at this directory. This allows plugins to + be installed from the archive instead of the internet, speeding up + deployment times. Furthermore this image has + * `filebeat` and `metricbeat` included + * `wget` included + * The `ENTRYPOINT` is just `/sbin/tini`, and the `CMD` is + `/app/elasticsearch.sh`. In normal use this file would be bind-mounted + in, but the image ships a stub version of this file so that the image + can still be tested. * Iron Bank - this is the US Department of Defence's repository of digitally signed, binary container images including both Free and Open-Source software (FOSS) and Commercial off-the-shelf (COTS). In practice, this is @@ -17,7 +28,7 @@ the [DockerBase] enum. * `filebeat` and `metricbeat` are included * `wget` is included * The `ENTRYPOINT` is just `/bin/tini`, and the `CMD` is - `/app/elasticsearc.sh`. In normal use this file would be bind-mounted + `/app/elasticsearch.sh`. In normal use this file would be bind-mounted in, but the image ships a stub version of this file so that the image can still be tested. * Cloud ESS - this directly extends the Cloud image, and adds all ES plugins diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index 30974ed2396a8..99c482d91085a 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -1,4 +1,3 @@ -import org.elasticsearch.gradle.Architecture import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.DockerBase @@ -8,8 +7,10 @@ import org.elasticsearch.gradle.internal.docker.DockerSupportPlugin import org.elasticsearch.gradle.internal.docker.DockerSupportService import org.elasticsearch.gradle.internal.docker.ShellRetry import org.elasticsearch.gradle.internal.docker.TransformLog4jConfigFilter +import org.elasticsearch.gradle.internal.docker.* import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.util.GradleUtils +import org.elasticsearch.gradle.Architecture import java.nio.file.Path import java.time.temporal.ChronoUnit @@ -99,9 +100,9 @@ String tiniArch = Architecture.current() == Architecture.AARCH64 ? 'arm64' : 'am dependencies { aarch64DockerSource project(":distribution:archives:linux-aarch64-tar") - aarch64DockerSourceTar project(path: ":distribution:archives:linux-aarch64-tar", configuration:"default") + aarch64DockerSourceTar project(path: ":distribution:archives:linux-aarch64-tar", configuration: "default") dockerSource project(":distribution:archives:linux-tar") - dockerSourceTar project(path: ":distribution:archives:linux-tar", configuration:"default") + dockerSourceTar project(path: ":distribution:archives:linux-tar", configuration: "default") log4jConfig project(path: ":distribution", configuration: 'log4jConfig') tini "krallin:tini:0.19.0:${tiniArch}" allPlugins project(path: ':plugins', configuration: 'allPlugins') @@ -112,7 +113,7 @@ dependencies { } ext.expansions = { Architecture architecture, DockerBase base -> - def (major,minor) = VersionProperties.elasticsearch.split("\\.") + def (major, minor) = VersionProperties.elasticsearch.split("\\.") // We tag our Docker images with various pieces of information, including a timestamp // for when the image was built. However, this makes it impossible completely cache @@ -216,7 +217,8 @@ elasticsearch_distributions { } interface Injected { - @Inject FileSystemOperations getFs() + @Inject + FileSystemOperations getFs() } tasks.named("preProcessFixture").configure { @@ -300,7 +302,10 @@ void addBuildDockerContextTask(Architecture architecture, DockerBase base) { // For some reason, the artifact name can differ depending on what repository we used. rename ~/((?:file|metric)beat)-.*\.tar\.gz$/, "\$1-${VersionProperties.elasticsearch}.tar.gz" } - Provider serviceProvider = GradleUtils.getBuildService(project.gradle.sharedServices, DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME) + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME + ) onlyIf("$architecture supported") { serviceProvider.get().isArchitectureSupported(architecture) } } @@ -337,9 +342,9 @@ void addTransformDockerContextTask(Architecture architecture, DockerBase base) { into "${project.buildDir}/docker-context/${archiveName}" // Since we replaced the remote URL in the Dockerfile, copy in the required file - if(base == DockerBase.IRON_BANK) { + if (base == DockerBase.IRON_BANK) { from(architecture == Architecture.AARCH64 ? configurations.aarch64DockerSourceTar : configurations.dockerSourceTar) - from (configurations.tini) { + from(configurations.tini) { rename { _ -> 'tini' } } } else { @@ -349,7 +354,10 @@ void addTransformDockerContextTask(Architecture architecture, DockerBase base) { expansions(architecture, base).findAll { it.key != 'build_date' }.each { k, v -> inputs.property(k, { v.toString() }) } - Provider serviceProvider = GradleUtils.getBuildService(project.gradle.sharedServices, DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME) + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME + ) onlyIf("$architecture supported") { serviceProvider.get().isArchitectureSupported(architecture) } } @@ -373,7 +381,7 @@ private static List generateTags(DockerBase base, Architecture architect String image = "elasticsearch${base.suffix}" String namespace = 'elasticsearch' - if (base == DockerBase.CLOUD || base == DockerBase.CLOUD_ESS) { + if (base == DockerBase.CLOUD || base == DockerBase.CLOUD_ESS || base == DockerBase.WOLFI_ESS) { namespace += '-ci' } @@ -423,7 +431,10 @@ void addBuildDockerImageTask(Architecture architecture, DockerBase base) { baseImages = [base.image] } - Provider serviceProvider = GradleUtils.getBuildService(project.gradle.sharedServices, DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME) + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME + ) onlyIf("$architecture supported") { serviceProvider.get().isArchitectureSupported(architecture) } } @@ -435,13 +446,12 @@ void addBuildDockerImageTask(Architecture architecture, DockerBase base) { } } -void addBuildEssDockerImageTask(Architecture architecture) { - DockerBase base = DockerBase.CLOUD_ESS +void addBuildEssDockerImageTask(Architecture architecture, DockerBase dockerBase) { String arch = architecture == Architecture.AARCH64 ? '-aarch64' : '' - String contextDir = "${project.buildDir}/docker-context/elasticsearch${base.suffix}-${VersionProperties.elasticsearch}-docker-build-context${arch}" + String contextDir = "${project.buildDir}/docker-context/elasticsearch${dockerBase.suffix}-${VersionProperties.elasticsearch}-docker-build-context${arch}" final TaskProvider buildContextTask = - tasks.register(taskName('build', architecture, base, 'DockerContext'), Sync) { + tasks.register(taskName('build', architecture, dockerBase, 'DockerContext'), Sync) { into contextDir final Path projectDir = project.projectDir.toPath() @@ -450,28 +460,54 @@ void addBuildEssDockerImageTask(Architecture architecture) { from configurations.allPlugins } - from(projectDir.resolve("src/docker/Dockerfile.cloud-ess")) { - expand([ - base_image: "elasticsearch${DockerBase.CLOUD.suffix}:${architecture.classifier}" - ]) + if (dockerBase == DockerBase.WOLFI_ESS) { + // If we're performing a release build, but `build.id` hasn't been set, we can + // infer that we're not at the Docker building stage of the build, and therefore + // we should skip the beats part of the build. + String buildId = providers.systemProperty('build.id').getOrNull() + boolean includeBeats = VersionProperties.isElasticsearchSnapshot() == true || buildId != null || useDra + + if (includeBeats) { + from configurations.getByName("filebeat_${architecture.classifier}") + from configurations.getByName("metricbeat_${architecture.classifier}") + } + // For some reason, the artifact name can differ depending on what repository we used. + rename ~/((?:file|metric)beat)-.*\.tar\.gz$/, "\$1-${VersionProperties.elasticsearch}.tar.gz" + } + + String baseSuffix = dockerBase == DockerBase.CLOUD_ESS ? DockerBase.CLOUD.suffix : DockerBase.WOLFI.suffix + from(projectDir.resolve("src/docker/Dockerfile.ess")) { + expand( + [ + base_image: "elasticsearch${baseSuffix}:${architecture.classifier}", + docker_base: "${dockerBase.name().toLowerCase()}", + version: "${VersionProperties.elasticsearch}", + retry: ShellRetry + ] + ) filter SquashNewlinesFilter - rename ~/Dockerfile\.cloud-ess$/, 'Dockerfile' + rename ~/Dockerfile\.ess$/, 'Dockerfile' } } final TaskProvider buildDockerImageTask = - tasks.register(taskName("build", architecture, base, "DockerImage"), DockerBuildTask) { + tasks.register(taskName("build", architecture, dockerBase, "DockerImage"), DockerBuildTask) { - TaskProvider buildCloudTask = tasks.named(taskName("build", architecture, DockerBase.CLOUD, "DockerImage")) - inputs.files(buildCloudTask) + DockerBase base = dockerBase == DockerBase.CLOUD_ESS ? DockerBase.CLOUD : DockerBase.WOLFI + + TaskProvider buildBaseTask = tasks.named(taskName("build", architecture, base, "DockerImage")) + inputs.files(buildBaseTask) dockerContext.fileProvider(buildContextTask.map { it.getDestinationDir() }) noCache = BuildParams.isCi() baseImages = [] - tags = generateTags(base, architecture) + tags = generateTags(dockerBase, architecture) platforms.add(architecture.dockerPlatform) - Provider serviceProvider = GradleUtils.getBuildService(project.gradle.sharedServices, DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME) + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME + ) onlyIf("$architecture supported") { serviceProvider.get().isArchitectureSupported(architecture) } } @@ -483,7 +519,7 @@ void addBuildEssDockerImageTask(Architecture architecture) { for (final Architecture architecture : Architecture.values()) { for (final DockerBase base : DockerBase.values()) { - if (base == DockerBase.CLOUD_ESS) { + if (base == DockerBase.CLOUD_ESS || base == DockerBase.WOLFI_ESS) { continue } addBuildDockerContextTask(architecture, base) @@ -491,7 +527,8 @@ for (final Architecture architecture : Architecture.values()) { addBuildDockerImageTask(architecture, base) } - addBuildEssDockerImageTask(architecture) + addBuildEssDockerImageTask(architecture, DockerBase.CLOUD_ESS) + addBuildEssDockerImageTask(architecture, DockerBase.WOLFI_ESS) } def exportDockerImages = tasks.register("exportDockerImages") @@ -515,6 +552,8 @@ subprojects { Project subProject -> base = DockerBase.CLOUD_ESS } else if (subProject.name.contains('cloud-')) { base = DockerBase.CLOUD + } else if (subProject.name.contains('wolfi-ess')) { + base = DockerBase.WOLFI_ESS } else if (subProject.name.contains('wolfi-')) { base = DockerBase.WOLFI } @@ -525,7 +564,8 @@ subprojects { Project subProject -> (base == DockerBase.CLOUD ? 'cloud.tar' : (base == DockerBase.CLOUD_ESS ? 'cloud-ess.tar' : (base == DockerBase.WOLFI ? 'wolfi.tar' : - 'docker.tar')))) + (base == DockerBase.WOLFI_ESS ? 'wolfi-ess.tar' : + 'docker.tar'))))) final String artifactName = "elasticsearch${arch}${base.suffix}_test" final String exportTaskName = taskName("export", architecture, base, 'DockerImage') @@ -541,7 +581,10 @@ subprojects { Project subProject -> tarFile, "elasticsearch${base.suffix}:${architecture.classifier}" dependsOn(parent.path + ":" + buildTaskName) - Provider serviceProvider = GradleUtils.getBuildService(project.gradle.sharedServices, DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME) + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME + ) onlyIf("$architecture supported") { serviceProvider.get().isArchitectureSupported(architecture) } } diff --git a/distribution/docker/src/docker/Dockerfile.cloud-ess b/distribution/docker/src/docker/Dockerfile.cloud-ess deleted file mode 100644 index f82752d67a284..0000000000000 --- a/distribution/docker/src/docker/Dockerfile.cloud-ess +++ /dev/null @@ -1,13 +0,0 @@ -FROM ${base_image} AS builder - -USER root - -COPY plugins/*.zip /opt/plugins/archive/ - -RUN chown root.root /opt/plugins/archive/* -RUN chmod 0444 /opt/plugins/archive/* - -FROM ${base_image} - -COPY --from=builder /opt/plugins /opt/plugins -ENV ES_PLUGIN_ARCHIVE_DIR /opt/plugins/archive diff --git a/distribution/docker/src/docker/Dockerfile.ess b/distribution/docker/src/docker/Dockerfile.ess new file mode 100644 index 0000000000000..4a230bb562786 --- /dev/null +++ b/distribution/docker/src/docker/Dockerfile.ess @@ -0,0 +1,51 @@ +FROM ${base_image} AS builder + +USER root + +<% if (docker_base == "wolfi_ess") { %> + # Add plugins infrastructure + RUN mkdir -p /opt/plugins/archive + RUN chmod -R 0555 /opt/plugins + + COPY filebeat-${version}.tar.gz metricbeat-${version}.tar.gz /tmp/ + RUN set -eux ; \\ + for beat in filebeat metricbeat ; do \\ + if [ ! -s /tmp/\$beat-${version}.tar.gz ]; then \\ + echo "/tmp/\$beat-${version}.tar.gz is empty - cannot uncompress" 2>&1 ; \\ + exit 1 ; \\ + fi ; \\ + if ! tar tf /tmp/\$beat-${version}.tar.gz >/dev/null; then \\ + echo "/tmp/\$beat-${version}.tar.gz is corrupt - cannot uncompress" 2>&1 ; \\ + exit 1 ; \\ + fi ; \\ + mkdir -p /opt/\$beat ; \\ + tar xf /tmp/\$beat-${version}.tar.gz -C /opt/\$beat --strip-components=1 ; \\ + done +<% } %> + +COPY plugins/*.zip /opt/plugins/archive/ + +RUN chown root.root /opt/plugins/archive/* +RUN chmod 0444 /opt/plugins/archive/* + +FROM ${base_image} +<% if (docker_base == "wolfi_ess") { %> +USER root + +RUN <%= retry.loop("apk", "export DEBIAN_FRONTEND=noninteractive && apk update && apk update && apk add --no-cache wget") %> + +# tweak entry point for ESS specific wolfi image +ENTRYPOINT ["/sbin/tini", "--"] +CMD ["/app/elasticsearch.sh"] +# Generate a stub command that will be overwritten at runtime +RUN mkdir /app && \\ + echo -e '#!/bin/bash\\nexec /usr/local/bin/docker-entrypoint.sh eswrapper' > /app/elasticsearch.sh && \\ + chmod 0555 /app/elasticsearch.sh + +COPY --from=builder --chown=0:0 /opt /opt +USER 1000:0 +<% } else { %> +COPY --from=builder /opt/plugins /opt/plugins +<% } %> + +ENV ES_PLUGIN_ARCHIVE_DIR /opt/plugins/archive diff --git a/distribution/docker/wolfi-ess-docker-aarch64-export/build.gradle b/distribution/docker/wolfi-ess-docker-aarch64-export/build.gradle new file mode 100644 index 0000000000000..537b5a093683e --- /dev/null +++ b/distribution/docker/wolfi-ess-docker-aarch64-export/build.gradle @@ -0,0 +1,2 @@ +// This file is intentionally blank. All configuration of the +// export is done in the parent project. diff --git a/distribution/docker/wolfi-ess-docker-export/build.gradle b/distribution/docker/wolfi-ess-docker-export/build.gradle new file mode 100644 index 0000000000000..537b5a093683e --- /dev/null +++ b/distribution/docker/wolfi-ess-docker-export/build.gradle @@ -0,0 +1,2 @@ +// This file is intentionally blank. All configuration of the +// export is done in the parent project. diff --git a/docs/changelog/112933.yaml b/docs/changelog/112933.yaml new file mode 100644 index 0000000000000..222cd5aadf739 --- /dev/null +++ b/docs/changelog/112933.yaml @@ -0,0 +1,5 @@ +pr: 112933 +summary: "Allow incubating Panama Vector in simdvec, and add vectorized `ipByteBin`" +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/113251.yaml b/docs/changelog/113251.yaml new file mode 100644 index 0000000000000..49167e6e4c915 --- /dev/null +++ b/docs/changelog/113251.yaml @@ -0,0 +1,5 @@ +pr: 113251 +summary: Span term query to convert to match no docs when unmapped field is targeted +area: Search +type: bug +issues: [] diff --git a/docs/changelog/113297.yaml b/docs/changelog/113297.yaml new file mode 100644 index 0000000000000..476619f432639 --- /dev/null +++ b/docs/changelog/113297.yaml @@ -0,0 +1,5 @@ +pr: 113297 +summary: "[ES|QL] add reverse function" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/113498.yaml b/docs/changelog/113498.yaml new file mode 100644 index 0000000000000..93b21a1d171eb --- /dev/null +++ b/docs/changelog/113498.yaml @@ -0,0 +1,5 @@ +pr: 113498 +summary: Listing all available databases in the _ingest/geoip/database API +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/changelog/113561.yaml b/docs/changelog/113561.yaml new file mode 100644 index 0000000000000..d00eac7685bcc --- /dev/null +++ b/docs/changelog/113561.yaml @@ -0,0 +1,5 @@ +pr: 113561 +summary: Add link to Circuit Breaker "Data too large" exception message +area: Infra/Circuit Breakers +type: enhancement +issues: [] diff --git a/docs/changelog/113690.yaml b/docs/changelog/113690.yaml new file mode 100644 index 0000000000000..bd5f1245f471e --- /dev/null +++ b/docs/changelog/113690.yaml @@ -0,0 +1,5 @@ +pr: 113690 +summary: Add object param for keeping synthetic source +area: Mapping +type: enhancement +issues: [] diff --git a/docs/changelog/113846.yaml b/docs/changelog/113846.yaml new file mode 100644 index 0000000000000..5fdd56e98d706 --- /dev/null +++ b/docs/changelog/113846.yaml @@ -0,0 +1,6 @@ +pr: 113846 +summary: Don't validate internal stats if they are empty +area: Aggregations +type: bug +issues: + - 113811 diff --git a/docs/changelog/113869.yaml b/docs/changelog/113869.yaml new file mode 100644 index 0000000000000..f1cd1ec423966 --- /dev/null +++ b/docs/changelog/113869.yaml @@ -0,0 +1,5 @@ +pr: 113869 +summary: Upgrade protobufer to 3.25.5 +area: Snapshot/Restore +type: upgrade +issues: [] diff --git a/docs/changelog/113873.yaml b/docs/changelog/113873.yaml new file mode 100644 index 0000000000000..ac52aaf94d518 --- /dev/null +++ b/docs/changelog/113873.yaml @@ -0,0 +1,5 @@ +pr: 113873 +summary: Default inference endpoint for ELSER +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/113900.yaml b/docs/changelog/113900.yaml new file mode 100644 index 0000000000000..25f833d251784 --- /dev/null +++ b/docs/changelog/113900.yaml @@ -0,0 +1,5 @@ +pr: 113900 +summary: Fix BWC for file-settings based role mappings +area: Authentication +type: bug +issues: [] diff --git a/docs/changelog/113910.yaml b/docs/changelog/113910.yaml new file mode 100644 index 0000000000000..aa9d3b61fe768 --- /dev/null +++ b/docs/changelog/113910.yaml @@ -0,0 +1,5 @@ +pr: 113910 +summary: Do not expand dots when storing objects in ignored source +area: Logs +type: bug +issues: [] diff --git a/docs/changelog/113961.yaml b/docs/changelog/113961.yaml new file mode 100644 index 0000000000000..24cb1f45f029e --- /dev/null +++ b/docs/changelog/113961.yaml @@ -0,0 +1,5 @@ +pr: 113961 +summary: "[ESQL] Support datetime data type in Least and Greatest functions" +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/113989.yaml b/docs/changelog/113989.yaml new file mode 100644 index 0000000000000..7bf50b52d9e07 --- /dev/null +++ b/docs/changelog/113989.yaml @@ -0,0 +1,5 @@ +pr: 113989 +summary: Add `max_multipart_parts` setting to S3 repository +area: Snapshot/Restore +type: enhancement +issues: [] diff --git a/docs/changelog/114116.yaml b/docs/changelog/114116.yaml new file mode 100644 index 0000000000000..8d1c9e162ae23 --- /dev/null +++ b/docs/changelog/114116.yaml @@ -0,0 +1,5 @@ +pr: 114116 +summary: "ES|QL: Ensure minimum capacity for `PlanStreamInput` caches" +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/114177.yaml b/docs/changelog/114177.yaml new file mode 100644 index 0000000000000..d68486469d797 --- /dev/null +++ b/docs/changelog/114177.yaml @@ -0,0 +1,5 @@ +pr: 114177 +summary: "Make `randomInstantBetween` always return value in range [minInstant, `maxInstant]`" +area: Infra/Metrics +type: bug +issues: [] diff --git a/docs/reference/connector/apis/create-connector-api.asciidoc b/docs/reference/connector/apis/create-connector-api.asciidoc index a115eab8853c0..3ecef6d302732 100644 --- a/docs/reference/connector/apis/create-connector-api.asciidoc +++ b/docs/reference/connector/apis/create-connector-api.asciidoc @@ -116,7 +116,7 @@ PUT _connector/my-connector "name": "My Connector", "description": "My Connector to sync data to Elastic index from Google Drive", "service_type": "google_drive", - "language": "english" + "language": "en" } ---- diff --git a/docs/reference/connector/docs/connectors-zoom.asciidoc b/docs/reference/connector/docs/connectors-zoom.asciidoc index d01b9c2be0368..d945a0aec3da1 100644 --- a/docs/reference/connector/docs/connectors-zoom.asciidoc +++ b/docs/reference/connector/docs/connectors-zoom.asciidoc @@ -63,18 +63,22 @@ To connect to Zoom you need to https://developers.zoom.us/docs/internal-apps/s2s 6. Click on the "Create" button to create the app registration. 7. After the registration is complete, you will be redirected to the app's overview page. Take note of the "App Credentials" value, as you'll need it later. 8. Navigate to the "Scopes" section and click on the "Add Scopes" button. -9. The following scopes need to be added to the app. +9. The following granular scopes need to be added to the app. + [source,bash] ---- -user:read:admin -meeting:read:admin -chat_channel:read:admin -recording:read:admin -chat_message:read:admin -report:read:admin +user:read:list_users:admin +meeting:read:list_meetings:admin +meeting:read:list_past_participants:admin +cloud_recording:read:list_user_recordings:admin +team_chat:read:list_user_channels:admin +team_chat:read:list_user_messages:admin ---- - +[NOTE] +==== +The connector requires a minimum scope of `user:read:list_users:admin` to ingest data into Elasticsearch. +==== ++ 10. Click on the "Done" button to add the selected scopes to your app. 11. Navigate to the "Activation" section and input the necessary information to activate the app. @@ -220,18 +224,22 @@ To connect to Zoom you need to https://developers.zoom.us/docs/internal-apps/s2s 6. Click on the "Create" button to create the app registration. 7. After the registration is complete, you will be redirected to the app's overview page. Take note of the "App Credentials" value, as you'll need it later. 8. Navigate to the "Scopes" section and click on the "Add Scopes" button. -9. The following scopes need to be added to the app. +9. The following granular scopes need to be added to the app. + [source,bash] ---- -user:read:admin -meeting:read:admin -chat_channel:read:admin -recording:read:admin -chat_message:read:admin -report:read:admin +user:read:list_users:admin +meeting:read:list_meetings:admin +meeting:read:list_past_participants:admin +cloud_recording:read:list_user_recordings:admin +team_chat:read:list_user_channels:admin +team_chat:read:list_user_messages:admin ---- - +[NOTE] +==== +The connector requires a minimum scope of `user:read:list_users:admin` to ingest data into Elasticsearch. +==== ++ 10. Click on the "Done" button to add the selected scopes to your app. 11. Navigate to the "Activation" section and input the necessary information to activate the app. diff --git a/docs/reference/esql/functions/description/reverse.asciidoc b/docs/reference/esql/functions/description/reverse.asciidoc new file mode 100644 index 0000000000000..fbb3f3f6b4d54 --- /dev/null +++ b/docs/reference/esql/functions/description/reverse.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns a new string representing the input string in reverse order. diff --git a/docs/reference/esql/functions/examples/reverse.asciidoc b/docs/reference/esql/functions/examples/reverse.asciidoc new file mode 100644 index 0000000000000..67c8af077b174 --- /dev/null +++ b/docs/reference/esql/functions/examples/reverse.asciidoc @@ -0,0 +1,22 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Examples* + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=reverse] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/string.csv-spec[tag=reverse-result] +|=== +`REVERSE` works with unicode, too! It keeps unicode grapheme clusters together during reversal. +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=reverseEmoji] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/string.csv-spec[tag=reverseEmoji-result] +|=== + diff --git a/docs/reference/esql/functions/kibana/definition/greatest.json b/docs/reference/esql/functions/kibana/definition/greatest.json index 0e32fca5b4ca1..2818a5ac56339 100644 --- a/docs/reference/esql/functions/kibana/definition/greatest.json +++ b/docs/reference/esql/functions/kibana/definition/greatest.json @@ -35,6 +35,24 @@ "variadic" : true, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "first", + "type" : "date", + "optional" : false, + "description" : "First of the columns to evaluate." + }, + { + "name" : "rest", + "type" : "date", + "optional" : true, + "description" : "The rest of the columns to evaluate." + } + ], + "variadic" : true, + "returnType" : "date" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/least.json b/docs/reference/esql/functions/kibana/definition/least.json index 0ba34cf3cc9a2..7b545896f4ddc 100644 --- a/docs/reference/esql/functions/kibana/definition/least.json +++ b/docs/reference/esql/functions/kibana/definition/least.json @@ -34,6 +34,24 @@ "variadic" : true, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "first", + "type" : "date", + "optional" : false, + "description" : "First of the columns to evaluate." + }, + { + "name" : "rest", + "type" : "date", + "optional" : true, + "description" : "The rest of the columns to evaluate." + } + ], + "variadic" : true, + "returnType" : "date" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/reverse.json b/docs/reference/esql/functions/kibana/definition/reverse.json new file mode 100644 index 0000000000000..1b222691530f2 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/reverse.json @@ -0,0 +1,38 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "reverse", + "description" : "Returns a new string representing the input string in reverse order.", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "String expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "String expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "text" + } + ], + "examples" : [ + "ROW message = \"Some Text\" | EVAL message_reversed = REVERSE(message);", + "ROW bending_arts = \"💧🪨🔥💨\" | EVAL bending_arts_reversed = REVERSE(bending_arts);" + ], + "preview" : false, + "snapshot_only" : false +} diff --git a/docs/reference/esql/functions/kibana/docs/reverse.md b/docs/reference/esql/functions/kibana/docs/reverse.md new file mode 100644 index 0000000000000..cbeade9189d80 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/reverse.md @@ -0,0 +1,10 @@ + + +### REVERSE +Returns a new string representing the input string in reverse order. + +``` +ROW message = "Some Text" | EVAL message_reversed = REVERSE(message); +``` diff --git a/docs/reference/esql/functions/layout/reverse.asciidoc b/docs/reference/esql/functions/layout/reverse.asciidoc new file mode 100644 index 0000000000000..99c236d63492e --- /dev/null +++ b/docs/reference/esql/functions/layout/reverse.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-reverse]] +=== `REVERSE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/reverse.svg[Embedded,opts=inline] + +include::../parameters/reverse.asciidoc[] +include::../description/reverse.asciidoc[] +include::../types/reverse.asciidoc[] +include::../examples/reverse.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/reverse.asciidoc b/docs/reference/esql/functions/parameters/reverse.asciidoc new file mode 100644 index 0000000000000..d56d115662491 --- /dev/null +++ b/docs/reference/esql/functions/parameters/reverse.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`str`:: +String expression. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/signature/reverse.svg b/docs/reference/esql/functions/signature/reverse.svg new file mode 100644 index 0000000000000..c23ce5583a8c0 --- /dev/null +++ b/docs/reference/esql/functions/signature/reverse.svg @@ -0,0 +1 @@ +REVERSE(str) \ No newline at end of file diff --git a/docs/reference/esql/functions/string-functions.asciidoc b/docs/reference/esql/functions/string-functions.asciidoc index ed97769b900e7..f5222330d579d 100644 --- a/docs/reference/esql/functions/string-functions.asciidoc +++ b/docs/reference/esql/functions/string-functions.asciidoc @@ -17,6 +17,7 @@ * <> * <> * <> +* <> * <> * <> * <> @@ -38,6 +39,7 @@ include::layout/locate.asciidoc[] include::layout/ltrim.asciidoc[] include::layout/repeat.asciidoc[] include::layout/replace.asciidoc[] +include::layout/reverse.asciidoc[] include::layout/right.asciidoc[] include::layout/rtrim.asciidoc[] include::layout/space.asciidoc[] diff --git a/docs/reference/esql/functions/types/greatest.asciidoc b/docs/reference/esql/functions/types/greatest.asciidoc index 537be55cd17ef..1454bbb6f81c1 100644 --- a/docs/reference/esql/functions/types/greatest.asciidoc +++ b/docs/reference/esql/functions/types/greatest.asciidoc @@ -7,6 +7,7 @@ first | rest | result boolean | boolean | boolean boolean | | boolean +date | date | date double | double | double integer | integer | integer integer | | integer diff --git a/docs/reference/esql/functions/types/least.asciidoc b/docs/reference/esql/functions/types/least.asciidoc index 537be55cd17ef..1454bbb6f81c1 100644 --- a/docs/reference/esql/functions/types/least.asciidoc +++ b/docs/reference/esql/functions/types/least.asciidoc @@ -7,6 +7,7 @@ first | rest | result boolean | boolean | boolean boolean | | boolean +date | date | date double | double | double integer | integer | integer integer | | integer diff --git a/docs/reference/esql/functions/types/reverse.asciidoc b/docs/reference/esql/functions/types/reverse.asciidoc new file mode 100644 index 0000000000000..974066d225bca --- /dev/null +++ b/docs/reference/esql/functions/types/reverse.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +str | result +keyword | keyword +text | text +|=== diff --git a/docs/reference/ingest/processors/inference.asciidoc b/docs/reference/ingest/processors/inference.asciidoc index fa4f246cdd7c8..4699f634afe37 100644 --- a/docs/reference/ingest/processors/inference.asciidoc +++ b/docs/reference/ingest/processors/inference.asciidoc @@ -169,6 +169,18 @@ include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizatio include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= +`deberta_v2`:::: +(Optional, object) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-deberta-v2] ++ +.Properties of deberta_v2 +[%collapsible%open] +======= +`truncate`:::: +(Optional, string) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate-deberta-v2] +======= + `roberta`:::: (Optional, object) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] @@ -224,6 +236,18 @@ include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizatio include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= +`deberta_v2`:::: +(Optional, object) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-deberta-v2] ++ +.Properties of deberta_v2 +[%collapsible%open] +======= +`truncate`:::: +(Optional, string) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate-deberta-v2] +======= + `roberta`:::: (Optional, object) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] @@ -304,6 +328,23 @@ include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizatio include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= +`deberta_v2`:::: +(Optional, object) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-deberta-v2] ++ +.Properties of deberta_v2 +[%collapsible%open] +======= +`span`:::: +(Optional, integer) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] + +`truncate`:::: +(Optional, string) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate-deberta-v2] +======= + + `roberta`:::: (Optional, object) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] @@ -363,6 +404,18 @@ include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizatio include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= +`deberta_v2`:::: +(Optional, object) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-deberta-v2] ++ +.Properties of deberta_v2 +[%collapsible%open] +======= +`truncate`:::: +(Optional, string) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate-deberta-v2] +======= + `roberta`:::: (Optional, object) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] @@ -424,6 +477,22 @@ include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizatio include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= +`deberta_v2`:::: +(Optional, object) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-deberta-v2] ++ +.Properties of deberta_v2 +[%collapsible%open] +======= +`span`:::: +(Optional, integer) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] + +`truncate`:::: +(Optional, string) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate-deberta-v2] +======= + `roberta`:::: (Optional, object) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] @@ -515,6 +584,18 @@ include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizatio include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= +`deberta_v2`:::: +(Optional, object) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-deberta-v2] ++ +.Properties of deberta_v2 +[%collapsible%open] +======= +`truncate`:::: +(Optional, string) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate-deberta-v2] +======= + `roberta`:::: (Optional, object) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] diff --git a/docs/reference/mapping/fields/synthetic-source.asciidoc b/docs/reference/mapping/fields/synthetic-source.asciidoc index ccea38cf602da..902b6c26611e5 100644 --- a/docs/reference/mapping/fields/synthetic-source.asciidoc +++ b/docs/reference/mapping/fields/synthetic-source.asciidoc @@ -32,18 +32,25 @@ space. Additional latency can be avoided by not loading `_source` field in queri [[synthetic-source-fields]] ===== Supported fields -Synthetic `_source` is supported by all field types. Depending on implementation details, field types have different properties when used with synthetic `_source`. +Synthetic `_source` is supported by all field types. Depending on implementation details, field types have different +properties when used with synthetic `_source`. -<> construct synthetic `_source` using existing data, most commonly <> and <>. For these field types, no additional space is needed to store the contents of `_source` field. Due to the storage layout of <>, the generated `_source` field undergoes <> compared to original document. +<> construct synthetic `_source` using existing data, most +commonly <> and <>. For these field types, no additional space +is needed to store the contents of `_source` field. Due to the storage layout of <>, the +generated `_source` field undergoes <> compared to original document. -For all other field types, the original value of the field is stored as is, in the same way as the `_source` field in non-synthetic mode. In this case there are no modifications and field data in `_source` is the same as in the original document. Similarly, malformed values of fields that use <> or <> need to be stored as is. This approach is less storage efficient since data needed for `_source` reconstruction is stored in addition to other data required to index the field (like `doc_values`). +For all other field types, the original value of the field is stored as is, in the same way as the `_source` field in +non-synthetic mode. In this case there are no modifications and field data in `_source` is the same as in the original +document. Similarly, malformed values of fields that use <> or +<> need to be stored as is. This approach is less storage efficient since data needed for +`_source` reconstruction is stored in addition to other data required to index the field (like `doc_values`). [[synthetic-source-restrictions]] ===== Synthetic `_source` restrictions -Synthetic `_source` cannot be used together with field mappings that use <>. - -Some field types have additional restrictions. These restrictions are documented in the **synthetic `_source`** section of the field type's <>. +Some field types have additional restrictions. These restrictions are documented in the **synthetic `_source`** section +of the field type's <>. [[synthetic-source-modifications]] ===== Synthetic `_source` modifications @@ -144,6 +151,42 @@ Will become: ---- // TEST[s/^/{"_source":/ s/\n$/}/] +This impacts how source contents can be referenced in <>. For instance, referencing +a script in its original source form will return null: + +[source,js] +---- +"script": { "source": """ emit(params._source['foo.bar.baz']) """ } +---- +// NOTCONSOLE + +Instead, source references need to be in line with the mapping structure: + +[source,js] +---- +"script": { "source": """ emit(params._source['foo']['bar']['baz']) """ } +---- +// NOTCONSOLE + +or simply + +[source,js] +---- +"script": { "source": """ emit(params._source.foo.bar.baz) """ } +---- +// NOTCONSOLE + +The following <> are preferable as, in addition to being agnostic to the +mapping structure, they make use of docvalues if available and fall back to synthetic source only when needed. This +reduces source synthesizing, a slow and costly operation. + +[source,js] +---- +"script": { "source": """ emit(field('foo.bar.baz').get(null)) """ } +"script": { "source": """ emit($('foo.bar.baz', null)) """ } +---- +// NOTCONSOLE + [[synthetic-source-modifications-alphabetical]] ====== Alphabetical sorting Synthetic `_source` fields are sorted alphabetically. The @@ -155,18 +198,99 @@ that ordering. [[synthetic-source-modifications-ranges]] ====== Representation of ranges -Range field values (e.g. `long_range`) are always represented as inclusive on both sides with bounds adjusted accordingly. See <>. +Range field values (e.g. `long_range`) are always represented as inclusive on both sides with bounds adjusted +accordingly. See <>. [[synthetic-source-precision-loss-for-point-types]] ====== Reduced precision of `geo_point` values -Values of `geo_point` fields are represented in synthetic `_source` with reduced precision. See <>. +Values of `geo_point` fields are represented in synthetic `_source` with reduced precision. See +<>. + +[[synthetic-source-keep]] +====== Minimizing source modifications + +It is possible to avoid synthetic source modifications for a particular object or field, at extra storage cost. +This is controlled through param `synthetic_source_keep` with the following option: + + - `none`: synthetic source diverges from the original source as described above (default). + - `arrays`: arrays of the corresponding field or object preserve the original element ordering and duplicate elements. +The synthetic source fragment for such arrays is not guaranteed to match the original source exactly, e.g. array +`[1, 2, [5], [[4, [3]]], 5]` may appear as-is or in an equivalent format like `[1, 2, 5, 4, 3, 5]`. The exact format +may change in the future, in an effort to reduce the storage overhead of this option. +- `all`: the source for both singleton instances and arrays of the corresponding field or object gets recorded. When +applied to objects, the source of all sub-objects and sub-fields gets captured. Furthermore, the original source of +arrays gets captured and appears in synthetic source with no modifications. + +For instance: + +[source,console,id=create-index-with-synthetic-source-keep] +---- +PUT idx_keep +{ + "mappings": { + "_source": { + "mode": "synthetic" + }, + "properties": { + "path": { + "type": "object", + "synthetic_source_keep": "all" + }, + "ids": { + "type": "integer", + "synthetic_source_keep": "arrays" + } + } + } +} +---- +// TEST + +[source,console,id=synthetic-source-keep-example] +---- +PUT idx_keep/_doc/1 +{ + "path": { + "to": [ + { "foo": [3, 2, 1] }, + { "foo": [30, 20, 10] } + ], + "bar": "baz" + }, + "ids": [ 200, 100, 300, 100 ] +} +---- +// TEST[s/$/\nGET idx_keep\/_doc\/1?filter_path=_source\n/] + +returns the original source, with no array deduplication and sorting: + +[source,console-result] +---- +{ + "path": { + "to": [ + { "foo": [3, 2, 1] }, + { "foo": [30, 20, 10] } + ], + "bar": "baz" + }, + "ids": [ 200, 100, 300, 100 ] +} +---- +// TEST[s/^/{"_source":/ s/\n$/}/] +The option for capturing the source of arrays can be applied at index level, by setting +`index.mapping.synthetic_source_keep` to `arrays`. This applies to all objects and fields in the index, except for +the ones with explicit overrides of `synthetic_source_keep` set to `none`. In this case, the storage overhead grows +with the number and sizes of arrays present in source of each document, naturally. [[synthetic-source-fields-native-list]] ===== Field types that support synthetic source with no storage overhead -The following field types support synthetic source using data from <> or <>, and require no additional storage space to construct the `_source` field. +The following field types support synthetic source using data from <> or +>, and require no additional storage space to construct the `_source` field. -NOTE: If you enable the <> or <> settings, then additional storage is required to store ignored field values for these types. +NOTE: If you enable the <> or <> settings, then +additional storage is required to store ignored field values for these types. ** <> ** {plugins}/mapper-annotated-text-usage.html#annotated-text-synthetic-source[`annotated-text`] diff --git a/docs/reference/mapping/runtime.asciidoc b/docs/reference/mapping/runtime.asciidoc index 190081fa801b4..1ee1194279061 100644 --- a/docs/reference/mapping/runtime.asciidoc +++ b/docs/reference/mapping/runtime.asciidoc @@ -821,8 +821,6 @@ address. [[lookup-runtime-fields]] ==== Retrieve fields from related indices -experimental[] - The <> parameter on the `_search` API can also be used to retrieve fields from the related indices via runtime fields with a type of `lookup`. diff --git a/docs/reference/mapping/types/date.asciidoc b/docs/reference/mapping/types/date.asciidoc index 44e1c2949775e..ca2c23f932fc3 100644 --- a/docs/reference/mapping/types/date.asciidoc +++ b/docs/reference/mapping/types/date.asciidoc @@ -125,8 +125,7 @@ The following parameters are accepted by `date` fields: `locale`:: The locale to use when parsing dates since months do not have the same names - and/or abbreviations in all languages. The default is the - https://docs.oracle.com/javase/8/docs/api/java/util/Locale.html#ROOT[`ROOT` locale]. + and/or abbreviations in all languages. The default is ENGLISH. <>:: diff --git a/docs/reference/mapping/types/semantic-text.asciidoc b/docs/reference/mapping/types/semantic-text.asciidoc index d0fdf0145aa58..07abbff986643 100644 --- a/docs/reference/mapping/types/semantic-text.asciidoc +++ b/docs/reference/mapping/types/semantic-text.asciidoc @@ -63,12 +63,14 @@ PUT my-index-000002 `inference_id`:: (Required, string) {infer-cap} endpoint that will be used to generate the embeddings for the field. +This parameter cannot be updated. Use the <> to create the endpoint. If `search_inference_id` is specified, the {infer} endpoint defined by `inference_id` will only be used at index time. `search_inference_id`:: (Optional, string) {infer-cap} endpoint that will be used to generate embeddings at query time. +You can update this parameter by using the <>. Use the <> to create the endpoint. If not specified, the {infer} endpoint defined by `inference_id` will be used at both index and query time. diff --git a/docs/reference/ml/ml-shared.asciidoc b/docs/reference/ml/ml-shared.asciidoc index 97122141d7558..ef19fbf4e267d 100644 --- a/docs/reference/ml/ml-shared.asciidoc +++ b/docs/reference/ml/ml-shared.asciidoc @@ -988,6 +988,7 @@ values are + -- * `bert`: Use for BERT-style models +* `deberta_v2`: Use for DeBERTa v2 and v3-style models * `mpnet`: Use for MPNet-style models * `roberta`: Use for RoBERTa-style and BART-style models * experimental:[] `xlm_roberta`: Use for XLMRoBERTa-style models @@ -1037,6 +1038,19 @@ sequence. Therefore, do not use `second` in this case. end::inference-config-nlp-tokenization-truncate[] +tag::inference-config-nlp-tokenization-truncate-deberta-v2[] +Indicates how tokens are truncated when they exceed `max_sequence_length`. +The default value is `first`. ++ +-- +* `balanced`: One or both of the first and second sequences may be truncated so as to balance the tokens included from both sequences. +* `none`: No truncation occurs; the inference request receives an error. +* `first`: Only the first sequence is truncated. +* `second`: Only the second sequence is truncated. If there is just one sequence, that sequence is truncated. +-- + +end::inference-config-nlp-tokenization-truncate-deberta-v2[] + tag::inference-config-nlp-tokenization-bert-with-special-tokens[] Tokenize with special tokens. The tokens typically included in BERT-style tokenization are: + @@ -1050,10 +1064,23 @@ tag::inference-config-nlp-tokenization-bert-ja-with-special-tokens[] Tokenize with special tokens if `true`. end::inference-config-nlp-tokenization-bert-ja-with-special-tokens[] +tag::inference-config-nlp-tokenization-deberta-v2[] +DeBERTa-style tokenization is to be performed with the enclosed settings. +end::inference-config-nlp-tokenization-deberta-v2[] + tag::inference-config-nlp-tokenization-max-sequence-length[] Specifies the maximum number of tokens allowed to be output by the tokenizer. end::inference-config-nlp-tokenization-max-sequence-length[] +tag::inference-config-nlp-tokenization-deberta-v2-with-special-tokens[] +Tokenize with special tokens. The tokens typically included in DeBERTa-style tokenization are: ++ +-- +* `[CLS]`: The first token of the sequence being classified. +* `[SEP]`: Indicates sequence separation and sequence end. +-- +end::inference-config-nlp-tokenization-deberta-v2-with-special-tokens[] + tag::inference-config-nlp-tokenization-roberta[] RoBERTa-style tokenization is to be performed with the enclosed settings. end::inference-config-nlp-tokenization-roberta[] diff --git a/docs/reference/ml/trained-models/apis/infer-trained-model.asciidoc b/docs/reference/ml/trained-models/apis/infer-trained-model.asciidoc index 9aac913e7559f..99c3ecad03a9d 100644 --- a/docs/reference/ml/trained-models/apis/infer-trained-model.asciidoc +++ b/docs/reference/ml/trained-models/apis/infer-trained-model.asciidoc @@ -137,6 +137,18 @@ include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizatio (Optional, string) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= +`deberta_v2`:::: +(Optional, object) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-deberta-v2] ++ +.Properties of deberta_v2 +[%collapsible%open] +======= +`truncate`:::: +(Optional, string) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate-deberta-v2] +======= + `roberta`:::: (Optional, object) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] diff --git a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc index e29bc8823ab29..32265af5f795b 100644 --- a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc +++ b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc @@ -773,6 +773,37 @@ include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizatio (Optional, boolean) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] ==== +`deberta_v2`:: +(Optional, object) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-deberta-v2] ++ +.Properties of deberta_v2 +[%collapsible%open] +==== +`do_lower_case`::: +(Optional, boolean) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] ++ +-- +Defaults to `false`. +-- + +`max_sequence_length`::: +(Optional, integer) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] + +`span`::: +(Optional, integer) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] + +`truncate`::: +(Optional, string) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate-deberta-v2] + +`with_special_tokens`::: +(Optional, boolean) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-deberta-v2-with-special-tokens] +==== `roberta`:: (Optional, object) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] diff --git a/docs/reference/release-notes/8.15.2.asciidoc b/docs/reference/release-notes/8.15.2.asciidoc new file mode 100644 index 0000000000000..7dfd8690109b2 --- /dev/null +++ b/docs/reference/release-notes/8.15.2.asciidoc @@ -0,0 +1,42 @@ +[[release-notes-8.15.2]] +== {es} version 8.15.2 + +Also see <>. + +[[bug-8.15.2]] +[float] +=== Bug fixes + +Authorization:: +* Fix remote cluster credential secure settings reload {es-pull}111535[#111535] + +ES|QL:: +* ESQL: Don't mutate the `BoolQueryBuilder` in plan {es-pull}111519[#111519] +* ES|QL: Fix `ResolvedEnrichPolicy` serialization (bwc) in v 8.15 {es-pull}112985[#112985] (issue: {es-issue}112968[#112968]) +* Fix union-types where one index is missing the field {es-pull}111932[#111932] (issue: {es-issue}111912[#111912]) +* Support widening of numeric types in union-types {es-pull}112610[#112610] (issue: {es-issue}111277[#111277]) + +Infra/Core:: +* JSON parse failures should be 4xx codes {es-pull}112703[#112703] +* Json parsing exceptions should not cause 500 errors {es-pull}111548[#111548] (issue: {es-issue}111542[#111542]) +* Make sure file accesses in `DnRoleMapper` are done in stack frames with permissions {es-pull}112400[#112400] + +Ingest Node:: +* Fix missing header in `put_geoip_database` JSON spec {es-pull}112581[#112581] + +Logs:: +* Fix encoding of dynamic arrays in ignored source {es-pull}112713[#112713] + +Mapping:: +* Full coverage of ECS by ecs@mappings when `date_detection` is disabled {es-pull}112444[#112444] (issue: {es-issue}112398[#112398]) + +Search:: +* Fix parsing error in `_terms_enum` API {es-pull}112872[#112872] (issue: {es-issue}94378[#94378]) + +Security:: +* Allowlist `tracestate` header on remote server port {es-pull}112649[#112649] + +Vector Search:: +* Fix NPE in `dense_vector` stats {es-pull}112720[#112720] + + diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index bf5260928797c..1e0018f590ac0 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -72,16 +72,54 @@ version 8.16 `allow_rebalance` setting defaults to `always` unless the legacy al [discrete] [[add_global_retention_in_data_stream_lifecycle]] === Add global retention in data stream lifecycle -Data stream lifecycle now supports configuring retention on a cluster level, namely global retention. Global retention -allows us to configure two different retentions: +Data stream lifecycle now supports configuring retention on a cluster level, +namely global retention. Global retention \nallows us to configure two different +retentions: -- `data_streams.lifecycle.retention.default` is applied to all data streams managed by the data stream lifecycle that do not have retention -defined on the data stream level. -- `data_streams.lifecycle.retention.max` is applied to all data streams managed by the data stream lifecycle and it allows any data stream -data to be deleted after the `max_retention` has passed. +- `data_streams.lifecycle.retention.default` is applied to all data streams managed +by the data stream lifecycle that do not have retention defined on the data stream level. +- `data_streams.lifecycle.retention.max` is applied to all data streams managed by the +data stream lifecycle and it allows any data stream \ndata to be deleted after the `max_retention` has passed. {es-pull}111972[#111972] +[discrete] +[[enable_zstandard_compression_for_indices_with_index_codec_set_to_best_compression]] +=== Enable ZStandard compression for indices with index.codec set to best_compression +Before DEFLATE compression was used to compress stored fields in indices with index.codec index setting set to +best_compression, with this change ZStandard is used as compression algorithm to stored fields for indices with +index.codec index setting set to best_compression. The usage ZStandard results in less storage usage with a +similar indexing throughput depending on what options are used. Experiments with indexing logs have shown that +ZStandard offers ~12% lower storage usage and a ~14% higher indexing throughput compared to DEFLATE. + +{es-pull}112665[#112665] + // end::notable-highlights[] +[discrete] +[[esql_multi_value_fields_supported_in_geospatial_predicates]] +=== ESQL: Multi-value fields supported in Geospatial predicates +Supporting multi-value fields in `WHERE` predicates is a challenge due to not knowing whether `ALL` or `ANY` +of the values in the field should pass the predicate. +For example, should the field `age:[10,30]` pass the predicate `WHERE age>20` or not? +This ambiguity does not exist with the spatial predicates +`ST_INTERSECTS` and `ST_DISJOINT`, because the choice between `ANY` or `ALL` +is implied by the predicate itself. +Consider a predicate checking a field named `location` against a test geometry named `shape`: + +* `ST_INTERSECTS(field, shape)` - true if `ANY` value can intersect the shape +* `ST_DISJOINT(field, shape)` - true only if `ALL` values are disjoint from the shape + +This works even if the shape argument is itself a complex or compound geometry. + +Similar logic exists for `ST_CONTAINS` and `ST_WITHIN` predicates, but these are not as easily solved +with `ANY` or `ALL`, because a collection of geometries contains another collection if each of the contained +geometries is within at least one of the containing geometries. Evaluating this requires that the multi-value +field is first combined into a single geometry before performing the predicate check. + +* `ST_CONTAINS(field, shape)` - true if the combined geometry contains the shape +* `ST_WITHIN(field, shape)` - true if the combined geometry is within the shape + +{es-pull}112063[#112063] + diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 4a8895807f2fa..4dcf0d328e4f1 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -206,7 +206,12 @@ GET /_xpack/usage "inference": { "available" : true, "enabled" : true, - "models" : [] + "models" : [{ + "service": "elasticsearch", + "task_type": "SPARSE_EMBEDDING", + "count": 1 + } + ] }, "logstash" : { "available" : true, diff --git a/docs/reference/setup/install.asciidoc b/docs/reference/setup/install.asciidoc index 89373d0ce8d44..a38fdcfc36fd5 100644 --- a/docs/reference/setup/install.asciidoc +++ b/docs/reference/setup/install.asciidoc @@ -76,27 +76,29 @@ Docker container images may be downloaded from the Elastic Docker Registry. [[jvm-version]] === Java (JVM) Version -{es} is built using Java, and includes a bundled version of -https://openjdk.java.net[OpenJDK] from the JDK maintainers (GPLv2+CE) within -each distribution. The bundled JVM is the recommended JVM. - -To use your own version of Java, set the `ES_JAVA_HOME` environment variable. -If you must use a version of Java that is different from the bundled JVM, it is -best to use the latest release of a link:/support/matrix[supported] -https://www.oracle.com/technetwork/java/eol-135779.html[LTS version of Java]. -{es} is closely coupled to certain OpenJDK-specific features, so it may not -work correctly with other JVMs. {es} will refuse to start if a known-bad -version of Java is used. - -If you use a JVM other than the bundled one, you are responsible for reacting -to announcements related to its security issues and bug fixes, and must -yourself determine whether each update is necessary or not. In contrast, the -bundled JVM is treated as an integral part of {es}, which means that Elastic -takes responsibility for keeping it up to date. Security issues and bugs within -the bundled JVM are treated as if they were within {es} itself. - -The bundled JVM is located within the `jdk` subdirectory of the {es} home -directory. You may remove this directory if using your own JVM. +{es} is built using Java, and includes a bundled version of https://openjdk.java.net[OpenJDK] within each distribution. We strongly +recommend using the bundled JVM in all installations of {es}. + +The bundled JVM is treated the same as any other dependency of {es} in terms of support and maintenance. This means that Elastic takes +responsibility for keeping it up to date, and reacts to security issues and bug reports as needed to address vulnerabilities and other bugs +in {es}. Elastic's support of the bundled JVM is subject to Elastic's https://www.elastic.co/support_policy[support policy] and +https://www.elastic.co/support/eol[end-of-life schedule] and is independent of the support policy and end-of-life schedule offered by the +original supplier of the JVM. Elastic does not support using the bundled JVM for purposes other than running {es}. + +TIP: {es} uses only a subset of the features offered by the JVM. Bugs and security issues in the bundled JVM often relate to features that +{es} does not use. Such issues do not apply to {es}. Elastic analyzes reports of security vulnerabilities in all its dependencies, including +in the bundled JVM, and will issue an https://www.elastic.co/community/security[Elastic Security Advisory] if such an advisory is needed. + +If you decide to run {es} using a version of Java that is different from the bundled one, prefer to use the latest release of a +https://www.oracle.com/technetwork/java/eol-135779.html[LTS version of Java] which is link:/support/matrix[listed in the support matrix]. +Although such a configuration is supported, if you encounter a security issue or other bug in your chosen JVM then Elastic may not be able +to help unless the issue is also present in the bundled JVM. Instead, you must seek assistance directly from the supplier of your chosen +JVM. You must also take responsibility for reacting to security and bug announcements from the supplier of your chosen JVM. {es} may not +perform optimally if using a JVM other than the bundled one. {es} is closely coupled to certain OpenJDK-specific features, so it may not +work correctly with JVMs that are not OpenJDK. {es} will refuse to start if you attempt to use a known-bad JVM version. + +To use your own version of Java, set the `ES_JAVA_HOME` environment variable to the path to your own JVM installation. The bundled JVM is +located within the `jdk` subdirectory of the {es} home directory. You may remove this directory if using your own JVM. [discrete] [[jvm-agents]] diff --git a/docs/reference/snapshot-restore/repository-s3.asciidoc b/docs/reference/snapshot-restore/repository-s3.asciidoc index a75a1a3ce1042..71a9fd8b87c96 100644 --- a/docs/reference/snapshot-restore/repository-s3.asciidoc +++ b/docs/reference/snapshot-restore/repository-s3.asciidoc @@ -261,9 +261,11 @@ multiple deployments may share the same bucket. `chunk_size`:: - (<>) Big files can be broken down into chunks during snapshotting if needed. - Specify the chunk size as a value and unit, for example: - `1TB`, `1GB`, `10MB`. Defaults to the maximum size of a blob in the S3 which is `5TB`. + (<>) The maximum size of object that {es} will write to the repository + when creating a snapshot. Files which are larger than `chunk_size` will be chunked into several + smaller objects. {es} may also split a file across multiple objects to satisfy other constraints + such as the `max_multipart_parts` limit. Defaults to `5TB` which is the + https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html[maximum size of an object in AWS S3]. `compress`:: @@ -292,6 +294,13 @@ include::repository-shared-settings.asciidoc[] size allowed by S3. Defaults to `100mb` or `5%` of JVM heap, whichever is smaller. +`max_multipart_parts` :: + + (integer) The maximum number of parts that {es} will write during a multipart upload of a single object. Files which are larger than + `buffer_size × max_multipart_parts` will be chunked into several smaller objects. {es} may also split a file across multiple objects to + satisfy other constraints such as the `chunk_size` limit. Defaults to `10000` which is the + https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html[maximum number of parts in a multipart upload in AWS S3]. + `canned_acl`:: The S3 repository supports all @@ -311,20 +320,14 @@ include::repository-shared-settings.asciidoc[] `delete_objects_max_size`:: - (<>) Sets the maxmimum batch size, betewen 1 and 1000, used - for `DeleteObjects` requests. Defaults to 1000 which is the maximum number - supported by the - https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObjects.html[AWS - DeleteObjects API]. + (integer) Sets the maxmimum batch size, betewen 1 and 1000, used for `DeleteObjects` requests. Defaults to 1000 which is the maximum + number supported by the https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObjects.html[AWS DeleteObjects API]. `max_multipart_upload_cleanup_size`:: - (<>) Sets the maximum number of possibly-dangling multipart - uploads to clean up in each batch of snapshot deletions. Defaults to `1000` - which is the maximum number supported by the - https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListMultipartUploads.html[AWS - ListMultipartUploads API]. If set to `0`, {es} will not attempt to clean up - dangling multipart uploads. + (integer) Sets the maximum number of possibly-dangling multipart uploads to clean up in each batch of snapshot deletions. Defaults to + `1000` which is the maximum number supported by the https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListMultipartUploads.html[AWS + ListMultipartUploads API]. If set to `0`, {es} will not attempt to clean up dangling multipart uploads. NOTE: The option of defining client settings in the repository settings as documented below is considered deprecated, and will be removed in a future diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index f1c4b15ea5702..53a65e217ed18 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -749,9 +749,9 @@ - - - + + + @@ -759,9 +759,9 @@ - - - + + + diff --git a/libs/h3/src/main/java/org/elasticsearch/h3/CellBoundary.java b/libs/h3/src/main/java/org/elasticsearch/h3/CellBoundary.java index 74115d5a002d6..e0f9df174c2b5 100644 --- a/libs/h3/src/main/java/org/elasticsearch/h3/CellBoundary.java +++ b/libs/h3/src/main/java/org/elasticsearch/h3/CellBoundary.java @@ -22,36 +22,52 @@ */ package org.elasticsearch.h3; +import java.util.Arrays; +import java.util.Objects; + /** * cell boundary points as {@link LatLng} */ public final class CellBoundary { - /** Maximum number of cell boundary vertices; worst case is pentagon: * 5 original verts + 5 edge crossings */ - private static final int MAX_CELL_BNDRY_VERTS = 10; + static final int MAX_CELL_BNDRY_VERTS = 10; /** How many points it holds */ - private int numVertext; + private final int numPoints; /** The actual points */ - private final LatLng[] points = new LatLng[MAX_CELL_BNDRY_VERTS]; - - CellBoundary() {} + private final LatLng[] points; - void add(LatLng point) { - points[numVertext++] = point; + CellBoundary(LatLng[] points, int numPoints) { + this.points = points; + this.numPoints = numPoints; } /** Number of points in this boundary */ public int numPoints() { - return numVertext; + return numPoints; } /** Return the point at the given position*/ public LatLng getLatLon(int i) { - if (i >= numVertext) { - throw new IndexOutOfBoundsException(); - } + assert i >= 0 && i < numPoints; return points[i]; } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + final CellBoundary that = (CellBoundary) o; + return numPoints == that.numPoints && Arrays.equals(points, that.points); + } + + @Override + public int hashCode() { + return Objects.hash(numPoints, Arrays.hashCode(points)); + } } diff --git a/libs/h3/src/main/java/org/elasticsearch/h3/Constants.java b/libs/h3/src/main/java/org/elasticsearch/h3/Constants.java index 570052700615f..3b3f760c0534f 100644 --- a/libs/h3/src/main/java/org/elasticsearch/h3/Constants.java +++ b/libs/h3/src/main/java/org/elasticsearch/h3/Constants.java @@ -34,10 +34,6 @@ final class Constants { * 2.0 * PI */ public static final double M_2PI = 2.0 * Math.PI; - /** - * max H3 resolution; H3 version 1 has 16 resolutions, numbered 0 through 15 - */ - public static int MAX_H3_RES = 15; /** * The number of H3 base cells */ diff --git a/libs/h3/src/main/java/org/elasticsearch/h3/FaceIJK.java b/libs/h3/src/main/java/org/elasticsearch/h3/FaceIJK.java index ae59ff359d1f8..866fdfe8a7f8b 100644 --- a/libs/h3/src/main/java/org/elasticsearch/h3/FaceIJK.java +++ b/libs/h3/src/main/java/org/elasticsearch/h3/FaceIJK.java @@ -439,7 +439,8 @@ public CellBoundary faceIjkPentToCellBoundary(int res, int start, int length) { // convert each vertex to lat/lng // adjust the face of each vertex as appropriate and introduce // edge-crossing vertices as needed - final CellBoundary boundary = new CellBoundary(); + final LatLng[] points = new LatLng[CellBoundary.MAX_CELL_BNDRY_VERTS]; + int numPoints = 0; final CoordIJK scratch = new CoordIJK(0, 0, 0); final FaceIJK fijk = new FaceIJK(this.face, scratch); final int[][] coord = isResolutionClassIII ? VERTEX_CLASSIII : VERTEX_CLASSII; @@ -501,21 +502,19 @@ public CellBoundary faceIjkPentToCellBoundary(int res, int start, int length) { // find the intersection and add the lat/lng point to the result final Vec2d inter = Vec2d.v2dIntersect(orig2d0, orig2d1, edge0, edge1); - final LatLng point = inter.hex2dToGeo(fijkOrient.face, adjRes, true); - boundary.add(point); + points[numPoints++] = inter.hex2dToGeo(fijkOrient.face, adjRes, true); } // convert vertex to lat/lng and add to the result // vert == start + NUM_PENT_VERTS is only used to test for possible // intersection on last edge if (vert < start + Constants.NUM_PENT_VERTS) { - final LatLng point = fijk.coord.ijkToGeo(fijk.face, adjRes, true); - boundary.add(point); + points[numPoints++] = fijk.coord.ijkToGeo(fijk.face, adjRes, true); } lastFace = fijk.face; lastCoord.reset(fijk.coord.i, fijk.coord.j, fijk.coord.k); } - return boundary; + return new CellBoundary(points, numPoints); } /** @@ -547,7 +546,8 @@ public CellBoundary faceIjkToCellBoundary(final int res, final int start, final // convert each vertex to lat/lng // adjust the face of each vertex as appropriate and introduce // edge-crossing vertices as needed - final CellBoundary boundary = new CellBoundary(); + final LatLng[] points = new LatLng[CellBoundary.MAX_CELL_BNDRY_VERTS]; + int numPoints = 0; final CoordIJK scratch1 = new CoordIJK(0, 0, 0); final FaceIJK fijk = new FaceIJK(this.face, scratch1); final CoordIJK scratch2 = isResolutionClassIII ? new CoordIJK(0, 0, 0) : null; @@ -616,8 +616,7 @@ public CellBoundary faceIjkToCellBoundary(final int res, final int start, final */ final boolean isIntersectionAtVertex = orig2d0.numericallyIdentical(inter) || orig2d1.numericallyIdentical(inter); if (isIntersectionAtVertex == false) { - final LatLng point = inter.hex2dToGeo(this.face, adjRes, true); - boundary.add(point); + points[numPoints++] = inter.hex2dToGeo(this.face, adjRes, true); } } @@ -625,13 +624,12 @@ public CellBoundary faceIjkToCellBoundary(final int res, final int start, final // vert == start + NUM_HEX_VERTS is only used to test for possible // intersection on last edge if (vert < start + Constants.NUM_HEX_VERTS) { - final LatLng point = fijk.coord.ijkToGeo(fijk.face, adjRes, true); - boundary.add(point); + points[numPoints++] = fijk.coord.ijkToGeo(fijk.face, adjRes, true); } lastFace = fijk.face; lastOverage = overage; } - return boundary; + return new CellBoundary(points, numPoints); } /** diff --git a/libs/h3/src/main/java/org/elasticsearch/h3/H3.java b/libs/h3/src/main/java/org/elasticsearch/h3/H3.java index 46bcc3f141dde..8c0bba62cecdb 100644 --- a/libs/h3/src/main/java/org/elasticsearch/h3/H3.java +++ b/libs/h3/src/main/java/org/elasticsearch/h3/H3.java @@ -30,8 +30,10 @@ * Defines the public API of the H3 library. */ public final class H3 { - - public static int MAX_H3_RES = Constants.MAX_H3_RES; + /** + * max H3 resolution; H3 version 1 has 16 resolutions, numbered 0 through 15 + */ + public static int MAX_H3_RES = 15; private static final long[] NORTH = new long[MAX_H3_RES + 1]; private static final long[] SOUTH = new long[MAX_H3_RES + 1]; @@ -97,7 +99,7 @@ public static boolean h3IsValid(long h3) { } int res = H3Index.H3_get_resolution(h3); - if (res < 0 || res > Constants.MAX_H3_RES) { // LCOV_EXCL_BR_LINE + if (res < 0 || res > MAX_H3_RES) { // LCOV_EXCL_BR_LINE // Resolutions less than zero can not be represented in an index return false; } @@ -118,7 +120,7 @@ public static boolean h3IsValid(long h3) { } } - for (int r = res + 1; r <= Constants.MAX_H3_RES; r++) { + for (int r = res + 1; r <= MAX_H3_RES; r++) { int digit = H3Index.H3_get_index_digit(h3, r); if (digit != CoordIJK.Direction.INVALID_DIGIT.digit()) { return false; @@ -601,7 +603,7 @@ private static String[] h3ToStringList(long[] h3s) { * @throws IllegalArgumentException res is not a valid H3 resolution. */ private static void checkResolution(int res) { - if (res < 0 || res > Constants.MAX_H3_RES) { + if (res < 0 || res > MAX_H3_RES) { throw new IllegalArgumentException("resolution [" + res + "] is out of range (must be 0 <= res <= 15)"); } } diff --git a/libs/h3/src/main/java/org/elasticsearch/h3/H3Index.java b/libs/h3/src/main/java/org/elasticsearch/h3/H3Index.java index 7babedc55eb0e..2b1b9cade21a4 100644 --- a/libs/h3/src/main/java/org/elasticsearch/h3/H3Index.java +++ b/libs/h3/src/main/java/org/elasticsearch/h3/H3Index.java @@ -160,14 +160,14 @@ public static int H3_get_resolution(long h3) { * Gets the resolution res integer digit (0-7) of h3. */ public static int H3_get_index_digit(long h3, int res) { - return ((int) ((((h3) >> ((Constants.MAX_H3_RES - (res)) * H3_PER_DIGIT_OFFSET)) & H3_DIGIT_MASK))); + return ((int) ((((h3) >> ((H3.MAX_H3_RES - (res)) * H3_PER_DIGIT_OFFSET)) & H3_DIGIT_MASK))); } /** * Sets the resolution res digit of h3 to the integer digit (0-7) */ public static long H3_set_index_digit(long h3, int res, long digit) { - int x = (Constants.MAX_H3_RES - res) * H3_PER_DIGIT_OFFSET; + int x = (H3.MAX_H3_RES - res) * H3_PER_DIGIT_OFFSET; return (((h3) & ~((H3_DIGIT_MASK << (x)))) | (((digit)) << x)); } diff --git a/libs/h3/src/test/java/org/elasticsearch/h3/CellBoundaryTests.java b/libs/h3/src/test/java/org/elasticsearch/h3/CellBoundaryTests.java index 903e4ed40ec16..00ca6f7021e3d 100644 --- a/libs/h3/src/test/java/org/elasticsearch/h3/CellBoundaryTests.java +++ b/libs/h3/src/test/java/org/elasticsearch/h3/CellBoundaryTests.java @@ -218,4 +218,22 @@ private boolean isSharedBoundary(int clon1, int clat1, int clon2, int clat2, Cel } return false; } + + public void testEqualsAndHashCode() { + final long h3 = H3.geoToH3(GeoTestUtil.nextLatitude(), GeoTestUtil.nextLongitude(), randomIntBetween(0, 15)); + final CellBoundary boundary1 = H3.h3ToGeoBoundary(h3); + final CellBoundary boundary2 = H3.h3ToGeoBoundary(h3); + assertEquals(boundary1, boundary2); + assertEquals(boundary1.hashCode(), boundary2.hashCode()); + + final long otherH3 = H3.geoToH3(GeoTestUtil.nextLatitude(), GeoTestUtil.nextLongitude(), randomIntBetween(0, 15)); + final CellBoundary otherCellBoundary = H3.h3ToGeoBoundary(otherH3); + if (otherH3 != h3) { + assertNotEquals(boundary1, otherCellBoundary); + assertNotEquals(boundary1.hashCode(), otherCellBoundary.hashCode()); + } else { + assertEquals(boundary1, otherCellBoundary); + assertEquals(boundary1.hashCode(), otherCellBoundary.hashCode()); + } + } } diff --git a/libs/h3/src/test/java/org/elasticsearch/h3/GeoToH3Tests.java b/libs/h3/src/test/java/org/elasticsearch/h3/GeoToH3Tests.java index cb7d416a5a9d3..3f2c329d9ff3c 100644 --- a/libs/h3/src/test/java/org/elasticsearch/h3/GeoToH3Tests.java +++ b/libs/h3/src/test/java/org/elasticsearch/h3/GeoToH3Tests.java @@ -38,7 +38,7 @@ public void testRandomPoints() { private void testPoint(double lat, double lon) { GeoPoint point = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat), Math.toRadians(lon)); - for (int res = 0; res < Constants.MAX_H3_RES; res++) { + for (int res = 0; res < H3.MAX_H3_RES; res++) { String h3Address = H3.geoToH3Address(lat, lon, res); assertEquals(res, H3.getResolution(h3Address)); GeoPolygon polygon = getGeoPolygon(h3Address); diff --git a/libs/h3/src/test/java/org/elasticsearch/h3/HexRingTests.java b/libs/h3/src/test/java/org/elasticsearch/h3/HexRingTests.java index 8fe5c6206fff8..864c0322cac90 100644 --- a/libs/h3/src/test/java/org/elasticsearch/h3/HexRingTests.java +++ b/libs/h3/src/test/java/org/elasticsearch/h3/HexRingTests.java @@ -38,7 +38,7 @@ public void testHexRing() { for (int i = 0; i < 500; i++) { double lat = GeoTestUtil.nextLatitude(); double lon = GeoTestUtil.nextLongitude(); - for (int res = 0; res <= Constants.MAX_H3_RES; res++) { + for (int res = 0; res <= H3.MAX_H3_RES; res++) { String origin = H3.geoToH3Address(lat, lon, res); assertFalse(H3.areNeighborCells(origin, origin)); String[] ring = H3.hexRing(origin); diff --git a/libs/simdvec/build.gradle b/libs/simdvec/build.gradle index 5a523a19d4b68..8b676a15038c1 100644 --- a/libs/simdvec/build.gradle +++ b/libs/simdvec/build.gradle @@ -23,6 +23,19 @@ dependencies { } } +tasks.named("compileMain21Java").configure { + options.compilerArgs << '--add-modules=jdk.incubator.vector' + // we remove Werror, since incubating suppression (-Xlint:-incubating) + // is only support since JDK 22 + options.compilerArgs -= '-Werror' +} + +test { + if (JavaVersion.current().majorVersion.toInteger() >= 21) { + jvmArgs '--add-modules=jdk.incubator.vector' + } +} + tasks.withType(CheckForbiddenApisTask).configureEach { replaceSignatureFiles 'jdk-signatures' } diff --git a/libs/simdvec/src/main/java/module-info.java b/libs/simdvec/src/main/java/module-info.java index 64e685ba3cbb5..44f6e39d5dbab 100644 --- a/libs/simdvec/src/main/java/module-info.java +++ b/libs/simdvec/src/main/java/module-info.java @@ -10,6 +10,7 @@ module org.elasticsearch.simdvec { requires org.elasticsearch.nativeaccess; requires org.apache.lucene.core; + requires org.elasticsearch.logging; exports org.elasticsearch.simdvec to org.elasticsearch.server; } diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java new file mode 100644 index 0000000000000..91193d5fa6eaf --- /dev/null +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec; + +import org.elasticsearch.simdvec.internal.vectorization.ESVectorUtilSupport; +import org.elasticsearch.simdvec.internal.vectorization.ESVectorizationProvider; + +import static org.elasticsearch.simdvec.internal.vectorization.ESVectorUtilSupport.B_QUERY; + +public class ESVectorUtil { + + private static final ESVectorUtilSupport IMPL = ESVectorizationProvider.getInstance().getVectorUtilSupport(); + + public static long ipByteBinByte(byte[] q, byte[] d) { + if (q.length != d.length * B_QUERY) { + throw new IllegalArgumentException("vector dimensions incompatible: " + q.length + "!= " + B_QUERY + " x " + d.length); + } + return IMPL.ipByteBinByte(q, d); + } +} diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorUtilSupport.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorUtilSupport.java new file mode 100644 index 0000000000000..4a08096119d6a --- /dev/null +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorUtilSupport.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec.internal.vectorization; + +import org.apache.lucene.util.BitUtil; + +final class DefaultESVectorUtilSupport implements ESVectorUtilSupport { + + DefaultESVectorUtilSupport() {} + + @Override + public long ipByteBinByte(byte[] q, byte[] d) { + return ipByteBinByteImpl(q, d); + } + + public static long ipByteBinByteImpl(byte[] q, byte[] d) { + long ret = 0; + int size = d.length; + for (int i = 0; i < B_QUERY; i++) { + int r = 0; + long subRet = 0; + for (final int upperBound = d.length & -Integer.BYTES; r < upperBound; r += Integer.BYTES) { + subRet += Integer.bitCount((int) BitUtil.VH_NATIVE_INT.get(q, i * size + r) & (int) BitUtil.VH_NATIVE_INT.get(d, r)); + } + for (; r < d.length; r++) { + subRet += Integer.bitCount((q[i * size + r] & d[r]) & 0xFF); + } + ret += subRet << i; + } + return ret; + } +} diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorizationProvider.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorizationProvider.java new file mode 100644 index 0000000000000..6c0f7ed146b86 --- /dev/null +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorizationProvider.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec.internal.vectorization; + +final class DefaultESVectorizationProvider extends ESVectorizationProvider { + private final ESVectorUtilSupport vectorUtilSupport; + + DefaultESVectorizationProvider() { + vectorUtilSupport = new DefaultESVectorUtilSupport(); + } + + @Override + public ESVectorUtilSupport getVectorUtilSupport() { + return vectorUtilSupport; + } +} diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorUtilSupport.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorUtilSupport.java new file mode 100644 index 0000000000000..d7611173ca693 --- /dev/null +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorUtilSupport.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec.internal.vectorization; + +public interface ESVectorUtilSupport { + + short B_QUERY = 4; + + long ipByteBinByte(byte[] q, byte[] d); +} diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java new file mode 100644 index 0000000000000..e541c10e145bf --- /dev/null +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec.internal.vectorization; + +import java.util.Objects; + +public abstract class ESVectorizationProvider { + + public static ESVectorizationProvider getInstance() { + return Objects.requireNonNull( + ESVectorizationProvider.Holder.INSTANCE, + "call to getInstance() from subclass of VectorizationProvider" + ); + } + + ESVectorizationProvider() {} + + public abstract ESVectorUtilSupport getVectorUtilSupport(); + + // visible for tests + static ESVectorizationProvider lookup(boolean testMode) { + return new DefaultESVectorizationProvider(); + } + + /** This static holder class prevents classloading deadlock. */ + private static final class Holder { + private Holder() {} + + static final ESVectorizationProvider INSTANCE = lookup(false); + } +} diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java new file mode 100644 index 0000000000000..5b7aab7ddfa48 --- /dev/null +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec.internal.vectorization; + +import org.apache.lucene.util.Constants; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; + +import java.util.Locale; +import java.util.Objects; +import java.util.Optional; + +public abstract class ESVectorizationProvider { + + protected static final Logger logger = LogManager.getLogger(ESVectorizationProvider.class); + + public static ESVectorizationProvider getInstance() { + return Objects.requireNonNull( + ESVectorizationProvider.Holder.INSTANCE, + "call to getInstance() from subclass of VectorizationProvider" + ); + } + + ESVectorizationProvider() {} + + public abstract ESVectorUtilSupport getVectorUtilSupport(); + + // visible for tests + static ESVectorizationProvider lookup(boolean testMode) { + final int runtimeVersion = Runtime.version().feature(); + assert runtimeVersion >= 21; + if (runtimeVersion <= 23) { + // only use vector module with Hotspot VM + if (Constants.IS_HOTSPOT_VM == false) { + logger.warn("Java runtime is not using Hotspot VM; Java vector incubator API can't be enabled."); + return new DefaultESVectorizationProvider(); + } + // is the incubator module present and readable (JVM providers may to exclude them or it is + // build with jlink) + final var vectorMod = lookupVectorModule(); + if (vectorMod.isEmpty()) { + logger.warn( + "Java vector incubator module is not readable. " + + "For optimal vector performance, pass '--add-modules jdk.incubator.vector' to enable Vector API." + ); + return new DefaultESVectorizationProvider(); + } + vectorMod.ifPresent(ESVectorizationProvider.class.getModule()::addReads); + var impl = new PanamaESVectorizationProvider(); + logger.info( + String.format( + Locale.ENGLISH, + "Java vector incubator API enabled; uses preferredBitSize=%d", + PanamaESVectorUtilSupport.VECTOR_BITSIZE + ) + ); + return impl; + } else { + logger.warn( + "You are running with unsupported Java " + + runtimeVersion + + ". To make full use of the Vector API, please update Elasticsearch." + ); + } + return new DefaultESVectorizationProvider(); + } + + private static Optional lookupVectorModule() { + return Optional.ofNullable(ESVectorizationProvider.class.getModule().getLayer()) + .orElse(ModuleLayer.boot()) + .findModule("jdk.incubator.vector"); + } + + /** This static holder class prevents classloading deadlock. */ + private static final class Holder { + private Holder() {} + + static final ESVectorizationProvider INSTANCE = lookup(false); + } +} diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorUtilSupport.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorUtilSupport.java new file mode 100644 index 0000000000000..0e5827d046736 --- /dev/null +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorUtilSupport.java @@ -0,0 +1,153 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec.internal.vectorization; + +import jdk.incubator.vector.ByteVector; +import jdk.incubator.vector.IntVector; +import jdk.incubator.vector.LongVector; +import jdk.incubator.vector.VectorOperators; +import jdk.incubator.vector.VectorShape; +import jdk.incubator.vector.VectorSpecies; + +import org.apache.lucene.util.Constants; + +public final class PanamaESVectorUtilSupport implements ESVectorUtilSupport { + + static final int VECTOR_BITSIZE; + + /** Whether integer vectors can be trusted to actually be fast. */ + static final boolean HAS_FAST_INTEGER_VECTORS; + + static { + // default to platform supported bitsize + VECTOR_BITSIZE = VectorShape.preferredShape().vectorBitSize(); + + // hotspot misses some SSE intrinsics, workaround it + // to be fair, they do document this thing only works well with AVX2/AVX3 and Neon + boolean isAMD64withoutAVX2 = Constants.OS_ARCH.equals("amd64") && VECTOR_BITSIZE < 256; + HAS_FAST_INTEGER_VECTORS = isAMD64withoutAVX2 == false; + } + + @Override + public long ipByteBinByte(byte[] q, byte[] d) { + // 128 / 8 == 16 + if (d.length >= 16 && HAS_FAST_INTEGER_VECTORS) { + if (VECTOR_BITSIZE >= 256) { + return ipByteBin256(q, d); + } else if (VECTOR_BITSIZE == 128) { + return ipByteBin128(q, d); + } + } + return DefaultESVectorUtilSupport.ipByteBinByteImpl(q, d); + } + + private static final VectorSpecies BYTE_SPECIES_128 = ByteVector.SPECIES_128; + private static final VectorSpecies BYTE_SPECIES_256 = ByteVector.SPECIES_256; + + static long ipByteBin256(byte[] q, byte[] d) { + long subRet0 = 0; + long subRet1 = 0; + long subRet2 = 0; + long subRet3 = 0; + int i = 0; + + if (d.length >= ByteVector.SPECIES_256.vectorByteSize() * 2) { + int limit = ByteVector.SPECIES_256.loopBound(d.length); + var sum0 = LongVector.zero(LongVector.SPECIES_256); + var sum1 = LongVector.zero(LongVector.SPECIES_256); + var sum2 = LongVector.zero(LongVector.SPECIES_256); + var sum3 = LongVector.zero(LongVector.SPECIES_256); + for (; i < limit; i += ByteVector.SPECIES_256.length()) { + var vq0 = ByteVector.fromArray(BYTE_SPECIES_256, q, i).reinterpretAsLongs(); + var vq1 = ByteVector.fromArray(BYTE_SPECIES_256, q, i + d.length).reinterpretAsLongs(); + var vq2 = ByteVector.fromArray(BYTE_SPECIES_256, q, i + d.length * 2).reinterpretAsLongs(); + var vq3 = ByteVector.fromArray(BYTE_SPECIES_256, q, i + d.length * 3).reinterpretAsLongs(); + var vd = ByteVector.fromArray(BYTE_SPECIES_256, d, i).reinterpretAsLongs(); + sum0 = sum0.add(vq0.and(vd).lanewise(VectorOperators.BIT_COUNT)); + sum1 = sum1.add(vq1.and(vd).lanewise(VectorOperators.BIT_COUNT)); + sum2 = sum2.add(vq2.and(vd).lanewise(VectorOperators.BIT_COUNT)); + sum3 = sum3.add(vq3.and(vd).lanewise(VectorOperators.BIT_COUNT)); + } + subRet0 += sum0.reduceLanes(VectorOperators.ADD); + subRet1 += sum1.reduceLanes(VectorOperators.ADD); + subRet2 += sum2.reduceLanes(VectorOperators.ADD); + subRet3 += sum3.reduceLanes(VectorOperators.ADD); + } + + if (d.length - i >= ByteVector.SPECIES_128.vectorByteSize()) { + var sum0 = LongVector.zero(LongVector.SPECIES_128); + var sum1 = LongVector.zero(LongVector.SPECIES_128); + var sum2 = LongVector.zero(LongVector.SPECIES_128); + var sum3 = LongVector.zero(LongVector.SPECIES_128); + int limit = ByteVector.SPECIES_128.loopBound(d.length); + for (; i < limit; i += ByteVector.SPECIES_128.length()) { + var vq0 = ByteVector.fromArray(BYTE_SPECIES_128, q, i).reinterpretAsLongs(); + var vq1 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + d.length).reinterpretAsLongs(); + var vq2 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + d.length * 2).reinterpretAsLongs(); + var vq3 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + d.length * 3).reinterpretAsLongs(); + var vd = ByteVector.fromArray(BYTE_SPECIES_128, d, i).reinterpretAsLongs(); + sum0 = sum0.add(vq0.and(vd).lanewise(VectorOperators.BIT_COUNT)); + sum1 = sum1.add(vq1.and(vd).lanewise(VectorOperators.BIT_COUNT)); + sum2 = sum2.add(vq2.and(vd).lanewise(VectorOperators.BIT_COUNT)); + sum3 = sum3.add(vq3.and(vd).lanewise(VectorOperators.BIT_COUNT)); + } + subRet0 += sum0.reduceLanes(VectorOperators.ADD); + subRet1 += sum1.reduceLanes(VectorOperators.ADD); + subRet2 += sum2.reduceLanes(VectorOperators.ADD); + subRet3 += sum3.reduceLanes(VectorOperators.ADD); + } + // tail as bytes + for (; i < d.length; i++) { + subRet0 += Integer.bitCount((q[i] & d[i]) & 0xFF); + subRet1 += Integer.bitCount((q[i + d.length] & d[i]) & 0xFF); + subRet2 += Integer.bitCount((q[i + 2 * d.length] & d[i]) & 0xFF); + subRet3 += Integer.bitCount((q[i + 3 * d.length] & d[i]) & 0xFF); + } + return subRet0 + (subRet1 << 1) + (subRet2 << 2) + (subRet3 << 3); + } + + public static long ipByteBin128(byte[] q, byte[] d) { + long subRet0 = 0; + long subRet1 = 0; + long subRet2 = 0; + long subRet3 = 0; + int i = 0; + + var sum0 = IntVector.zero(IntVector.SPECIES_128); + var sum1 = IntVector.zero(IntVector.SPECIES_128); + var sum2 = IntVector.zero(IntVector.SPECIES_128); + var sum3 = IntVector.zero(IntVector.SPECIES_128); + int limit = ByteVector.SPECIES_128.loopBound(d.length); + for (; i < limit; i += ByteVector.SPECIES_128.length()) { + var vd = ByteVector.fromArray(BYTE_SPECIES_128, d, i).reinterpretAsInts(); + var vq0 = ByteVector.fromArray(BYTE_SPECIES_128, q, i).reinterpretAsInts(); + var vq1 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + d.length).reinterpretAsInts(); + var vq2 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + d.length * 2).reinterpretAsInts(); + var vq3 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + d.length * 3).reinterpretAsInts(); + sum0 = sum0.add(vd.and(vq0).lanewise(VectorOperators.BIT_COUNT)); + sum1 = sum1.add(vd.and(vq1).lanewise(VectorOperators.BIT_COUNT)); + sum2 = sum2.add(vd.and(vq2).lanewise(VectorOperators.BIT_COUNT)); + sum3 = sum3.add(vd.and(vq3).lanewise(VectorOperators.BIT_COUNT)); + } + subRet0 += sum0.reduceLanes(VectorOperators.ADD); + subRet1 += sum1.reduceLanes(VectorOperators.ADD); + subRet2 += sum2.reduceLanes(VectorOperators.ADD); + subRet3 += sum3.reduceLanes(VectorOperators.ADD); + // tail as bytes + for (; i < d.length; i++) { + int dValue = d[i]; + subRet0 += Integer.bitCount((dValue & q[i]) & 0xFF); + subRet1 += Integer.bitCount((dValue & q[i + d.length]) & 0xFF); + subRet2 += Integer.bitCount((dValue & q[i + 2 * d.length]) & 0xFF); + subRet3 += Integer.bitCount((dValue & q[i + 3 * d.length]) & 0xFF); + } + return subRet0 + (subRet1 << 1) + (subRet2 << 2) + (subRet3 << 3); + } +} diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorizationProvider.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorizationProvider.java new file mode 100644 index 0000000000000..62d25d79487ed --- /dev/null +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorizationProvider.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec.internal.vectorization; + +final class PanamaESVectorizationProvider extends ESVectorizationProvider { + + private final ESVectorUtilSupport vectorUtilSupport; + + PanamaESVectorizationProvider() { + vectorUtilSupport = new PanamaESVectorUtilSupport(); + } + + @Override + public ESVectorUtilSupport getVectorUtilSupport() { + return vectorUtilSupport; + } +} diff --git a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/ESVectorUtilTests.java b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/ESVectorUtilTests.java new file mode 100644 index 0000000000000..0dbc41c0c1055 --- /dev/null +++ b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/ESVectorUtilTests.java @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec; + +import org.elasticsearch.simdvec.internal.vectorization.BaseVectorizationTests; +import org.elasticsearch.simdvec.internal.vectorization.ESVectorizationProvider; + +import java.util.Arrays; + +import static org.elasticsearch.simdvec.internal.vectorization.ESVectorUtilSupport.B_QUERY; + +public class ESVectorUtilTests extends BaseVectorizationTests { + + static final ESVectorizationProvider defaultedProvider = BaseVectorizationTests.defaultProvider(); + static final ESVectorizationProvider defOrPanamaProvider = BaseVectorizationTests.maybePanamaProvider(); + + public void testIpByteBinInvariants() { + int iterations = atLeast(10); + for (int i = 0; i < iterations; i++) { + int size = randomIntBetween(1, 10); + var d = new byte[size]; + var q = new byte[size * B_QUERY - 1]; + expectThrows(IllegalArgumentException.class, () -> ESVectorUtil.ipByteBinByte(q, d)); + } + } + + public void testBasicIpByteBin() { + testBasicIpByteBinImpl(ESVectorUtil::ipByteBinByte); + testBasicIpByteBinImpl(defaultedProvider.getVectorUtilSupport()::ipByteBinByte); + testBasicIpByteBinImpl(defOrPanamaProvider.getVectorUtilSupport()::ipByteBinByte); + } + + interface IpByteBin { + long apply(byte[] q, byte[] d); + } + + void testBasicIpByteBinImpl(IpByteBin ipByteBinFunc) { + assertEquals(15L, ipByteBinFunc.apply(new byte[] { 1, 1, 1, 1 }, new byte[] { 1 })); + assertEquals(30L, ipByteBinFunc.apply(new byte[] { 1, 2, 1, 2, 1, 2, 1, 2 }, new byte[] { 1, 2 })); + + var d = new byte[] { 1, 2, 3 }; + var q = new byte[] { 1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3 }; + assert scalarIpByteBin(q, d) == 60L; // 4 + 8 + 16 + 32 + assertEquals(60L, ipByteBinFunc.apply(q, d)); + + d = new byte[] { 1, 2, 3, 4 }; + q = new byte[] { 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4 }; + assert scalarIpByteBin(q, d) == 75L; // 5 + 10 + 20 + 40 + assertEquals(75L, ipByteBinFunc.apply(q, d)); + + d = new byte[] { 1, 2, 3, 4, 5 }; + q = new byte[] { 1, 2, 3, 4, 5, 1, 2, 3, 4, 5, 1, 2, 3, 4, 5, 1, 2, 3, 4, 5 }; + assert scalarIpByteBin(q, d) == 105L; // 7 + 14 + 28 + 56 + assertEquals(105L, ipByteBinFunc.apply(q, d)); + + d = new byte[] { 1, 2, 3, 4, 5, 6 }; + q = new byte[] { 1, 2, 3, 4, 5, 6, 1, 2, 3, 4, 5, 6, 1, 2, 3, 4, 5, 6, 1, 2, 3, 4, 5, 6 }; + assert scalarIpByteBin(q, d) == 135L; // 9 + 18 + 36 + 72 + assertEquals(135L, ipByteBinFunc.apply(q, d)); + + d = new byte[] { 1, 2, 3, 4, 5, 6, 7 }; + q = new byte[] { 1, 2, 3, 4, 5, 6, 7, 1, 2, 3, 4, 5, 6, 7, 1, 2, 3, 4, 5, 6, 7, 1, 2, 3, 4, 5, 6, 7 }; + assert scalarIpByteBin(q, d) == 180L; // 12 + 24 + 48 + 96 + assertEquals(180L, ipByteBinFunc.apply(q, d)); + + d = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 }; + q = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8 }; + assert scalarIpByteBin(q, d) == 195L; // 13 + 26 + 52 + 104 + assertEquals(195L, ipByteBinFunc.apply(q, d)); + + d = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 }; + q = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9 }; + assert scalarIpByteBin(q, d) == 225L; // 15 + 30 + 60 + 120 + assertEquals(225L, ipByteBinFunc.apply(q, d)); + } + + public void testIpByteBin() { + testIpByteBinImpl(ESVectorUtil::ipByteBinByte); + testIpByteBinImpl(defaultedProvider.getVectorUtilSupport()::ipByteBinByte); + testIpByteBinImpl(defOrPanamaProvider.getVectorUtilSupport()::ipByteBinByte); + } + + void testIpByteBinImpl(IpByteBin ipByteBinFunc) { + int iterations = atLeast(50); + for (int i = 0; i < iterations; i++) { + int size = random().nextInt(5000); + var d = new byte[size]; + var q = new byte[size * B_QUERY]; + random().nextBytes(d); + random().nextBytes(q); + assertEquals(scalarIpByteBin(q, d), ipByteBinFunc.apply(q, d)); + + Arrays.fill(d, Byte.MAX_VALUE); + Arrays.fill(q, Byte.MAX_VALUE); + assertEquals(scalarIpByteBin(q, d), ipByteBinFunc.apply(q, d)); + + Arrays.fill(d, Byte.MIN_VALUE); + Arrays.fill(q, Byte.MIN_VALUE); + assertEquals(scalarIpByteBin(q, d), ipByteBinFunc.apply(q, d)); + } + } + + static int scalarIpByteBin(byte[] q, byte[] d) { + int res = 0; + for (int i = 0; i < B_QUERY; i++) { + res += (popcount(q, i * d.length, d, d.length) << i); + } + return res; + } + + public static int popcount(byte[] a, int aOffset, byte[] b, int length) { + int res = 0; + for (int j = 0; j < length; j++) { + int value = (a[aOffset + j] & b[j]) & 0xFF; + for (int k = 0; k < Byte.SIZE; k++) { + if ((value & (1 << k)) != 0) { + ++res; + } + } + } + return res; + } +} diff --git a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/internal/vectorization/BaseVectorizationTests.java b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/internal/vectorization/BaseVectorizationTests.java new file mode 100644 index 0000000000000..f2bc8a11b04aa --- /dev/null +++ b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/internal/vectorization/BaseVectorizationTests.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec.internal.vectorization; + +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +public class BaseVectorizationTests extends ESTestCase { + + @Before + public void sanity() { + assert Runtime.version().feature() < 21 || ModuleLayer.boot().findModule("jdk.incubator.vector").isPresent(); + } + + public static ESVectorizationProvider defaultProvider() { + return new DefaultESVectorizationProvider(); + } + + public static ESVectorizationProvider maybePanamaProvider() { + return ESVectorizationProvider.lookup(true); + } +} diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java index 5455daf0a79ec..227557590731e 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.aggregations.bucket.histogram; -import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.aggregations.bucket.AggregationMultiBucketAggregationTestCase; import org.elasticsearch.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo; @@ -28,7 +27,6 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.test.TransportVersionUtils; import java.io.IOException; import java.time.Instant; @@ -459,33 +457,6 @@ public void testCreateWithReplacementBuckets() { assertThat(copy.getInterval(), equalTo(orig.getInterval())); } - public void testSerializationPre830() throws IOException { - // we need to test without sub-aggregations, otherwise we need to also update the interval within the inner aggs - InternalAutoDateHistogram instance = createTestInstance( - randomAlphaOfLengthBetween(3, 7), - createTestMetadata(), - InternalAggregations.EMPTY - ); - TransportVersion version = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_3_0) - ); - InternalAutoDateHistogram deserialized = copyInstance(instance, version); - assertEquals(1, deserialized.getBucketInnerInterval()); - - InternalAutoDateHistogram modified = new InternalAutoDateHistogram( - deserialized.getName(), - deserialized.getBuckets(), - deserialized.getTargetBuckets(), - deserialized.getBucketInfo(), - deserialized.getFormatter(), - deserialized.getMetadata(), - instance.getBucketInnerInterval() - ); - assertEqualInstances(instance, modified); - } - public void testReadFromPre830() throws IOException { byte[] bytes = Base64.getDecoder() .decode( diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric_fail_formatting.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric_fail_formatting.yml index d9298a832e650..82371c973407c 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric_fail_formatting.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric_fail_formatting.yml @@ -30,7 +30,7 @@ setup: cluster_features: "gte_v8.15.0" reason: fixed in 8.15.0 - do: - catch: /Cannot format stat \[sum\] with format \[DocValueFormat.DateTime\(format\[date_hour_minute_second_millis\] locale\[\], Z, MILLISECONDS\)\]/ + catch: /Cannot format stat \[sum\] with format \[DocValueFormat.DateTime\(format\[date_hour_minute_second_millis\] locale\[(en)?\], Z, MILLISECONDS\)\]/ search: index: test_date body: diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java index 515d07103bff8..8b29b1609711f 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java @@ -12,25 +12,18 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.logsdb.datageneration.DataGenerator; import org.elasticsearch.logsdb.datageneration.DataGeneratorSpecification; import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceHandler; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; -import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; import org.elasticsearch.logsdb.datageneration.fields.PredefinedField; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.function.Consumer; public class DataGenerationHelper { - private final ObjectMapper.Subobjects subobjects; private final boolean keepArraySource; private final DataGenerator dataGenerator; @@ -40,44 +33,10 @@ public DataGenerationHelper() { } public DataGenerationHelper(Consumer builderConfigurator) { - // TODO enable subobjects: auto - // It is disabled because it currently does not have auto flattening and that results in asserts being triggered when using copy_to. - this.subobjects = ESTestCase.randomValueOtherThan( - ObjectMapper.Subobjects.AUTO, - () -> ESTestCase.randomFrom(ObjectMapper.Subobjects.values()) - ); this.keepArraySource = ESTestCase.randomBoolean(); - var specificationBuilder = DataGeneratorSpecification.builder().withFullyDynamicMapping(ESTestCase.randomBoolean()); - if (subobjects != ObjectMapper.Subobjects.ENABLED) { - specificationBuilder = specificationBuilder.withNestedFieldsLimit(0); - } - - specificationBuilder.withDataSourceHandlers(List.of(new DataSourceHandler() { - @Override - public DataSourceResponse.ObjectMappingParametersGenerator handle(DataSourceRequest.ObjectMappingParametersGenerator request) { - if (subobjects == ObjectMapper.Subobjects.ENABLED) { - // Use default behavior - return null; - } - - assert request.isNested() == false; - - // "enabled: false" is not compatible with subobjects: false - // "dynamic: false/strict/runtime" is not compatible with subobjects: false - return new DataSourceResponse.ObjectMappingParametersGenerator(() -> { - var parameters = new HashMap(); - parameters.put("subobjects", subobjects.toString()); - if (ESTestCase.randomBoolean()) { - parameters.put("dynamic", "true"); - } - if (ESTestCase.randomBoolean()) { - parameters.put("enabled", "true"); - } - return parameters; - }); - } - })) + var specificationBuilder = DataGeneratorSpecification.builder() + .withFullyDynamicMapping(ESTestCase.randomBoolean()) .withPredefinedFields( List.of( // Customized because it always needs doc_values for aggregations. @@ -136,11 +95,7 @@ void logsDbMapping(XContentBuilder builder) throws IOException { } void standardMapping(XContentBuilder builder) throws IOException { - if (subobjects != ObjectMapper.Subobjects.ENABLED) { - dataGenerator.writeMapping(builder, Map.of("subobjects", subobjects.toString())); - } else { - dataGenerator.writeMapping(builder); - } + dataGenerator.writeMapping(builder); } void logsDbSettings(Settings.Builder builder) { diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/rest/RestGetDataStreamsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/rest/RestGetDataStreamsAction.java index da55376fb403b..3456f4b679474 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/rest/RestGetDataStreamsAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/rest/RestGetDataStreamsAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.datastreams.GetDataStreamAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.set.Sets; @@ -35,14 +36,14 @@ public class RestGetDataStreamsAction extends BaseRestHandler { Set.of( "name", "include_defaults", - "timeout", "master_timeout", IndicesOptions.WildcardOptions.EXPAND_WILDCARDS, IndicesOptions.ConcreteTargetOptions.IGNORE_UNAVAILABLE, IndicesOptions.WildcardOptions.ALLOW_NO_INDICES, IndicesOptions.GatekeeperOptions.IGNORE_THROTTLED, "verbose" - ) + ), + DataStream.isFailureStoreFeatureFlagEnabled() ? Set.of(IndicesOptions.FailureStoreOptions.FAILURE_STORE) : Set.of() ) ); diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index b50fc86282d1f..4312221b33937 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -88,3 +88,8 @@ tasks.named("dependencyLicenses").configure { artifacts { restTests(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test")) } + +tasks.named("yamlRestCompatTestTransform").configure({ task -> + task.skipTest("ingest_geoip/40_geoip_databases/Test adding, getting, and removing geoip databases", + "get databases behavior began returning more results in 8.16") +}) diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceIT.java index 73d8976c3a4b7..786f091e0c024 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceIT.java @@ -84,7 +84,7 @@ private void assertValidDatabase(DatabaseNodeService databaseNodeService, String IpDatabase database = databaseNodeService.getDatabase(databaseFileName); assertNotNull(database); assertThat(database.getDatabaseType(), equalTo(databaseType)); - CountryResponse countryResponse = database.getCountry("89.160.20.128"); + CountryResponse countryResponse = database.getResponse("89.160.20.128", GeoIpTestUtils::getCountry); assertNotNull(countryResponse); Country country = countryResponse.getCountry(); assertNotNull(country); diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java index 2c7d5fbcc56b7..b28926673069d 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java @@ -205,10 +205,10 @@ private static DatabaseNodeService createRegistry(Path geoIpConfigDir, Path geoI private static void lazyLoadReaders(DatabaseNodeService databaseNodeService) throws IOException { if (databaseNodeService.get("GeoLite2-City.mmdb") != null) { databaseNodeService.get("GeoLite2-City.mmdb").getDatabaseType(); - databaseNodeService.get("GeoLite2-City.mmdb").getCity("2.125.160.216"); + databaseNodeService.get("GeoLite2-City.mmdb").getResponse("2.125.160.216", GeoIpTestUtils::getCity); } databaseNodeService.get("GeoLite2-City-Test.mmdb").getDatabaseType(); - databaseNodeService.get("GeoLite2-City-Test.mmdb").getCity("2.125.160.216"); + databaseNodeService.get("GeoLite2-City-Test.mmdb").getResponse("2.125.160.216", GeoIpTestUtils::getCity); } } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java index dccda0d58cfbf..52ca5eea52c1a 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java @@ -9,7 +9,6 @@ package org.elasticsearch.ingest.geoip; -import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; import java.util.Arrays; @@ -19,12 +18,10 @@ import java.util.Set; /** - * A high-level representation of a kind of geoip database that is supported by the {@link GeoIpProcessor}. + * A high-level representation of a kind of ip location database that is supported by the {@link GeoIpProcessor}. *

* A database has a set of properties that are valid to use with it (see {@link Database#properties()}), * as well as a list of default properties to use if no properties are specified (see {@link Database#defaultProperties()}). - *

- * See especially {@link Database#getDatabase(String, String)} which is used to obtain instances of this class. */ enum Database { @@ -142,61 +139,6 @@ enum Database { ) ); - private static final String CITY_DB_SUFFIX = "-City"; - private static final String COUNTRY_DB_SUFFIX = "-Country"; - private static final String ASN_DB_SUFFIX = "-ASN"; - private static final String ANONYMOUS_IP_DB_SUFFIX = "-Anonymous-IP"; - private static final String CONNECTION_TYPE_DB_SUFFIX = "-Connection-Type"; - private static final String DOMAIN_DB_SUFFIX = "-Domain"; - private static final String ENTERPRISE_DB_SUFFIX = "-Enterprise"; - private static final String ISP_DB_SUFFIX = "-ISP"; - - @Nullable - private static Database getMaxmindDatabase(final String databaseType) { - if (databaseType.endsWith(Database.CITY_DB_SUFFIX)) { - return Database.City; - } else if (databaseType.endsWith(Database.COUNTRY_DB_SUFFIX)) { - return Database.Country; - } else if (databaseType.endsWith(Database.ASN_DB_SUFFIX)) { - return Database.Asn; - } else if (databaseType.endsWith(Database.ANONYMOUS_IP_DB_SUFFIX)) { - return Database.AnonymousIp; - } else if (databaseType.endsWith(Database.CONNECTION_TYPE_DB_SUFFIX)) { - return Database.ConnectionType; - } else if (databaseType.endsWith(Database.DOMAIN_DB_SUFFIX)) { - return Database.Domain; - } else if (databaseType.endsWith(Database.ENTERPRISE_DB_SUFFIX)) { - return Database.Enterprise; - } else if (databaseType.endsWith(Database.ISP_DB_SUFFIX)) { - return Database.Isp; - } else { - return null; // no match was found - } - } - - /** - * Parses the passed-in databaseType (presumably from the passed-in databaseFile) and return the Database instance that is - * associated with that databaseType. - * - * @param databaseType the database type String from the metadata of the database file - * @param databaseFile the database file from which the database type was obtained - * @throws IllegalArgumentException if the databaseType is not associated with a Database instance - * @return the Database instance that is associated with the databaseType - */ - public static Database getDatabase(final String databaseType, final String databaseFile) { - Database database = null; - - if (Strings.hasText(databaseType)) { - database = getMaxmindDatabase(databaseType); - } - - if (database == null) { - throw new IllegalArgumentException("Unsupported database type [" + databaseType + "] for file [" + databaseFile + "]"); - } - - return database; - } - private final Set properties; private final Set defaultProperties; diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java index ce15e02e6efcc..940231b12c894 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java @@ -20,11 +20,13 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; @@ -37,6 +39,7 @@ import org.elasticsearch.watcher.ResourceWatcherService; import java.io.Closeable; +import java.io.FileNotFoundException; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.file.FileAlreadyExistsException; @@ -51,8 +54,10 @@ import java.security.MessageDigest; import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; @@ -541,6 +546,35 @@ public Set getConfigDatabases() { return configDatabases.getConfigDatabases().keySet(); } + public Map getConfigDatabasesDetail() { + Map allDatabases = new HashMap<>(); + for (Map.Entry entry : configDatabases.getConfigDatabases().entrySet()) { + DatabaseReaderLazyLoader databaseReaderLazyLoader = entry.getValue(); + try { + allDatabases.put( + entry.getKey(), + new ConfigDatabaseDetail( + entry.getKey(), + databaseReaderLazyLoader.getMd5(), + databaseReaderLazyLoader.getBuildDateMillis(), + databaseReaderLazyLoader.getDatabaseType() + ) + ); + } catch (FileNotFoundException e) { + /* + * Since there is nothing to prevent a database from being deleted while this method is running, it is possible we get an + * exception here because the file no longer exists. We just log it and move on -- it's preferable to synchronization. + */ + logger.trace(Strings.format("Unable to get metadata for config database %s", entry.getKey()), e); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + return allDatabases; + } + + public record ConfigDatabaseDetail(String name, @Nullable String md5, @Nullable Long buildDateInMillis, @Nullable String type) {} + public Set getFilesInTemp() { try (Stream files = Files.list(geoipTmpDirectory)) { return files.map(Path::getFileName).map(Path::toString).collect(Collectors.toSet()); diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java index dff083ea0cde8..120afe0e9e815 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java @@ -9,18 +9,8 @@ package org.elasticsearch.ingest.geoip; -import com.maxmind.db.DatabaseRecord; -import com.maxmind.db.Network; import com.maxmind.db.NoCache; import com.maxmind.db.Reader; -import com.maxmind.geoip2.model.AnonymousIpResponse; -import com.maxmind.geoip2.model.AsnResponse; -import com.maxmind.geoip2.model.CityResponse; -import com.maxmind.geoip2.model.ConnectionTypeResponse; -import com.maxmind.geoip2.model.CountryResponse; -import com.maxmind.geoip2.model.DomainResponse; -import com.maxmind.geoip2.model.EnterpriseResponse; -import com.maxmind.geoip2.model.IspResponse; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -28,8 +18,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.CheckedSupplier; -import org.elasticsearch.common.network.InetAddresses; -import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; @@ -37,19 +25,16 @@ import java.io.File; import java.io.IOException; -import java.net.InetAddress; import java.nio.file.Files; import java.nio.file.Path; -import java.util.List; import java.util.Objects; -import java.util.Optional; import java.util.concurrent.atomic.AtomicInteger; /** * Facilitates lazy loading of the database reader, so that when the geoip plugin is installed, but not used, * no memory is being wasted on the database reader. */ -class DatabaseReaderLazyLoader implements IpDatabase { +public class DatabaseReaderLazyLoader implements IpDatabase { private static final boolean LOAD_DATABASE_ON_HEAP = Booleans.parseBoolean(System.getProperty("es.geoip.load_db_on_heap", "false")); @@ -63,6 +48,7 @@ class DatabaseReaderLazyLoader implements IpDatabase { // cache the database type so that we do not re-read it on every pipeline execution final SetOnce databaseType; + final SetOnce buildDate; private volatile boolean deleteDatabaseFileOnShutdown; private final AtomicInteger currentUsages = new AtomicInteger(0); @@ -74,6 +60,7 @@ class DatabaseReaderLazyLoader implements IpDatabase { this.loader = createDatabaseLoader(databasePath); this.databaseReader = new SetOnce<>(); this.databaseType = new SetOnce<>(); + this.buildDate = new SetOnce<>(); } /** @@ -94,94 +81,6 @@ public final String getDatabaseType() throws IOException { return databaseType.get(); } - @Nullable - @Override - public CityResponse getCity(String ipAddress) { - return getResponse(ipAddress, (reader, ip) -> lookup(reader, ip, CityResponse.class, CityResponse::new)); - } - - @Nullable - @Override - public CountryResponse getCountry(String ipAddress) { - return getResponse(ipAddress, (reader, ip) -> lookup(reader, ip, CountryResponse.class, CountryResponse::new)); - } - - @Nullable - @Override - public AsnResponse getAsn(String ipAddress) { - return getResponse( - ipAddress, - (reader, ip) -> lookup( - reader, - ip, - AsnResponse.class, - (response, responseIp, network, locales) -> new AsnResponse(response, responseIp, network) - ) - ); - } - - @Nullable - @Override - public AnonymousIpResponse getAnonymousIp(String ipAddress) { - return getResponse( - ipAddress, - (reader, ip) -> lookup( - reader, - ip, - AnonymousIpResponse.class, - (response, responseIp, network, locales) -> new AnonymousIpResponse(response, responseIp, network) - ) - ); - } - - @Nullable - @Override - public ConnectionTypeResponse getConnectionType(String ipAddress) { - return getResponse( - ipAddress, - (reader, ip) -> lookup( - reader, - ip, - ConnectionTypeResponse.class, - (response, responseIp, network, locales) -> new ConnectionTypeResponse(response, responseIp, network) - ) - ); - } - - @Nullable - @Override - public DomainResponse getDomain(String ipAddress) { - return getResponse( - ipAddress, - (reader, ip) -> lookup( - reader, - ip, - DomainResponse.class, - (response, responseIp, network, locales) -> new DomainResponse(response, responseIp, network) - ) - ); - } - - @Nullable - @Override - public EnterpriseResponse getEnterprise(String ipAddress) { - return getResponse(ipAddress, (reader, ip) -> lookup(reader, ip, EnterpriseResponse.class, EnterpriseResponse::new)); - } - - @Nullable - @Override - public IspResponse getIsp(String ipAddress) { - return getResponse( - ipAddress, - (reader, ip) -> lookup( - reader, - ip, - IspResponse.class, - (response, responseIp, network, locales) -> new IspResponse(response, responseIp, network) - ) - ); - } - boolean preLookup() { return currentUsages.updateAndGet(current -> current < 0 ? current : current + 1) > 0; } @@ -197,14 +96,12 @@ int current() { return currentUsages.get(); } + @Override @Nullable - private RESPONSE getResponse( - String ipAddress, - CheckedBiFunction, Exception> responseProvider - ) { + public RESPONSE getResponse(String ipAddress, CheckedBiFunction responseProvider) { return cache.putIfAbsent(ipAddress, databasePath.toString(), ip -> { try { - return responseProvider.apply(get(), ipAddress).orElse(null); + return responseProvider.apply(get(), ipAddress); } catch (Exception e) { throw ExceptionsHelper.convertToRuntime(e); } @@ -261,20 +158,14 @@ private static File pathToFile(Path databasePath) { return databasePath.toFile(); } - @FunctionalInterface - private interface ResponseBuilder { - RESPONSE build(RESPONSE response, String responseIp, Network network, List locales); - } - - private Optional lookup(Reader reader, String ip, Class clazz, ResponseBuilder builder) - throws IOException { - InetAddress inetAddress = InetAddresses.forString(ip); - DatabaseRecord record = reader.getRecord(inetAddress, clazz); - RESPONSE result = record.getData(); - if (result == null) { - return Optional.empty(); - } else { - return Optional.of(builder.build(result, NetworkAddress.format(inetAddress), record.getNetwork(), List.of("en"))); + long getBuildDateMillis() throws IOException { + if (buildDate.get() == null) { + synchronized (buildDate) { + if (buildDate.get() == null) { + buildDate.set(loader.get().getMetadata().getBuildDate().getTime()); + } + } } + return buildDate.get(); } } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java index acc51c1bb0b53..fa46540e29f7a 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java @@ -444,7 +444,9 @@ private void scheduleNextRun(TimeValue time) { } private ProviderDownload downloaderFor(DatabaseConfiguration database) { - return new MaxmindDownload(database.name(), database.maxmind()); + assert database.provider() instanceof DatabaseConfiguration.Maxmind + : "Attempt to use maxmind downloader with a provider of type" + database.provider().getClass(); + return new MaxmindDownload(database.name(), (DatabaseConfiguration.Maxmind) database.provider()); } class MaxmindDownload implements ProviderDownload { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java index 335331ac0ab9d..d9c9c3aaf3266 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java @@ -26,7 +26,7 @@ * cost of deserialization for each lookup (cached or not). This comes at slight expense of higher memory usage, but significant * reduction of CPU usage. */ -final class GeoIpCache { +public final class GeoIpCache { /** * Internal-only sentinel object for recording that a result from the geoip database was null (i.e. there was no result). By caching diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index ce160b060ae4c..e2b516bf5b943 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -9,23 +9,6 @@ package org.elasticsearch.ingest.geoip; -import com.maxmind.db.Network; -import com.maxmind.geoip2.model.AnonymousIpResponse; -import com.maxmind.geoip2.model.AsnResponse; -import com.maxmind.geoip2.model.CityResponse; -import com.maxmind.geoip2.model.ConnectionTypeResponse; -import com.maxmind.geoip2.model.ConnectionTypeResponse.ConnectionType; -import com.maxmind.geoip2.model.CountryResponse; -import com.maxmind.geoip2.model.DomainResponse; -import com.maxmind.geoip2.model.EnterpriseResponse; -import com.maxmind.geoip2.model.IspResponse; -import com.maxmind.geoip2.record.City; -import com.maxmind.geoip2.record.Continent; -import com.maxmind.geoip2.record.Country; -import com.maxmind.geoip2.record.Location; -import com.maxmind.geoip2.record.Subdivision; - -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; @@ -34,10 +17,10 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.geoip.Database.Property; +import org.elasticsearch.ingest.geoip.IpDataLookupFactories.IpDataLookupFactory; import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -60,7 +43,7 @@ public final class GeoIpProcessor extends AbstractProcessor { private final Supplier isValid; private final String targetField; private final CheckedSupplier supplier; - private final Set properties; + private final IpDataLookup ipDataLookup; private final boolean ignoreMissing; private final boolean firstOnly; private final String databaseFile; @@ -73,7 +56,7 @@ public final class GeoIpProcessor extends AbstractProcessor { * @param supplier a supplier of a geo-IP database reader; ideally this is lazily-loaded once on first use * @param isValid a supplier that determines if the available database files are up-to-date and license compliant * @param targetField the target field - * @param properties the properties; ideally this is lazily-loaded once on first use + * @param ipDataLookup a lookup capable of retrieving a result from an available geo-IP database reader * @param ignoreMissing true if documents with a missing value for the field should be ignored * @param firstOnly true if only first result should be returned in case of array * @param databaseFile the name of the database file being queried; used only for tagging documents if the database is unavailable @@ -85,7 +68,7 @@ public final class GeoIpProcessor extends AbstractProcessor { final CheckedSupplier supplier, final Supplier isValid, final String targetField, - final Set properties, + final IpDataLookup ipDataLookup, final boolean ignoreMissing, final boolean firstOnly, final String databaseFile @@ -95,7 +78,7 @@ public final class GeoIpProcessor extends AbstractProcessor { this.isValid = isValid; this.targetField = targetField; this.supplier = supplier; - this.properties = properties; + this.ipDataLookup = ipDataLookup; this.ignoreMissing = ignoreMissing; this.firstOnly = firstOnly; this.databaseFile = databaseFile; @@ -127,7 +110,7 @@ public IngestDocument execute(IngestDocument document) throws IOException { } if (ip instanceof String ipString) { - Map data = getGeoData(ipDatabase, ipString); + Map data = ipDataLookup.getData(ipDatabase, ipString); if (data.isEmpty() == false) { document.setFieldValue(targetField, data); } @@ -138,7 +121,7 @@ public IngestDocument execute(IngestDocument document) throws IOException { if (ipAddr instanceof String == false) { throw new IllegalArgumentException("array in field [" + field + "] should only contain strings"); } - Map data = getGeoData(ipDatabase, (String) ipAddr); + Map data = ipDataLookup.getData(ipDatabase, (String) ipAddr); if (data.isEmpty()) { dataList.add(null); continue; @@ -161,26 +144,6 @@ public IngestDocument execute(IngestDocument document) throws IOException { return document; } - private Map getGeoData(IpDatabase ipDatabase, String ipAddress) throws IOException { - final String databaseType = ipDatabase.getDatabaseType(); - final Database database; - try { - database = Database.getDatabase(databaseType, databaseFile); - } catch (IllegalArgumentException e) { - throw new ElasticsearchParseException(e.getMessage(), e); - } - return switch (database) { - case City -> retrieveCityGeoData(ipDatabase, ipAddress); - case Country -> retrieveCountryGeoData(ipDatabase, ipAddress); - case Asn -> retrieveAsnGeoData(ipDatabase, ipAddress); - case AnonymousIp -> retrieveAnonymousIpGeoData(ipDatabase, ipAddress); - case ConnectionType -> retrieveConnectionTypeGeoData(ipDatabase, ipAddress); - case Domain -> retrieveDomainGeoData(ipDatabase, ipAddress); - case Enterprise -> retrieveEnterpriseGeoData(ipDatabase, ipAddress); - case Isp -> retrieveIspGeoData(ipDatabase, ipAddress); - }; - } - @Override public String getType() { return TYPE; @@ -199,478 +162,7 @@ String getDatabaseType() throws IOException { } Set getProperties() { - return properties; - } - - private Map retrieveCityGeoData(IpDatabase ipDatabase, String ipAddress) { - CityResponse response = ipDatabase.getCity(ipAddress); - if (response == null) { - return Map.of(); - } - Country country = response.getCountry(); - City city = response.getCity(); - Location location = response.getLocation(); - Continent continent = response.getContinent(); - Subdivision subdivision = response.getMostSpecificSubdivision(); - - Map geoData = new HashMap<>(); - for (Property property : this.properties) { - switch (property) { - case IP -> geoData.put("ip", response.getTraits().getIpAddress()); - case COUNTRY_ISO_CODE -> { - String countryIsoCode = country.getIsoCode(); - if (countryIsoCode != null) { - geoData.put("country_iso_code", countryIsoCode); - } - } - case COUNTRY_NAME -> { - String countryName = country.getName(); - if (countryName != null) { - geoData.put("country_name", countryName); - } - } - case CONTINENT_CODE -> { - String continentCode = continent.getCode(); - if (continentCode != null) { - geoData.put("continent_code", continentCode); - } - } - case CONTINENT_NAME -> { - String continentName = continent.getName(); - if (continentName != null) { - geoData.put("continent_name", continentName); - } - } - case REGION_ISO_CODE -> { - // ISO 3166-2 code for country subdivisions. - // See iso.org/iso-3166-country-codes.html - String countryIso = country.getIsoCode(); - String subdivisionIso = subdivision.getIsoCode(); - if (countryIso != null && subdivisionIso != null) { - String regionIsoCode = countryIso + "-" + subdivisionIso; - geoData.put("region_iso_code", regionIsoCode); - } - } - case REGION_NAME -> { - String subdivisionName = subdivision.getName(); - if (subdivisionName != null) { - geoData.put("region_name", subdivisionName); - } - } - case CITY_NAME -> { - String cityName = city.getName(); - if (cityName != null) { - geoData.put("city_name", cityName); - } - } - case TIMEZONE -> { - String locationTimeZone = location.getTimeZone(); - if (locationTimeZone != null) { - geoData.put("timezone", locationTimeZone); - } - } - case LOCATION -> { - Double latitude = location.getLatitude(); - Double longitude = location.getLongitude(); - if (latitude != null && longitude != null) { - Map locationObject = new HashMap<>(); - locationObject.put("lat", latitude); - locationObject.put("lon", longitude); - geoData.put("location", locationObject); - } - } - } - } - return geoData; - } - - private Map retrieveCountryGeoData(IpDatabase ipDatabase, String ipAddress) { - CountryResponse response = ipDatabase.getCountry(ipAddress); - if (response == null) { - return Map.of(); - } - Country country = response.getCountry(); - Continent continent = response.getContinent(); - - Map geoData = new HashMap<>(); - for (Property property : this.properties) { - switch (property) { - case IP -> geoData.put("ip", response.getTraits().getIpAddress()); - case COUNTRY_ISO_CODE -> { - String countryIsoCode = country.getIsoCode(); - if (countryIsoCode != null) { - geoData.put("country_iso_code", countryIsoCode); - } - } - case COUNTRY_NAME -> { - String countryName = country.getName(); - if (countryName != null) { - geoData.put("country_name", countryName); - } - } - case CONTINENT_CODE -> { - String continentCode = continent.getCode(); - if (continentCode != null) { - geoData.put("continent_code", continentCode); - } - } - case CONTINENT_NAME -> { - String continentName = continent.getName(); - if (continentName != null) { - geoData.put("continent_name", continentName); - } - } - } - } - return geoData; - } - - private Map retrieveAsnGeoData(IpDatabase ipDatabase, String ipAddress) { - AsnResponse response = ipDatabase.getAsn(ipAddress); - if (response == null) { - return Map.of(); - } - Long asn = response.getAutonomousSystemNumber(); - String organizationName = response.getAutonomousSystemOrganization(); - Network network = response.getNetwork(); - - Map geoData = new HashMap<>(); - for (Property property : this.properties) { - switch (property) { - case IP -> geoData.put("ip", response.getIpAddress()); - case ASN -> { - if (asn != null) { - geoData.put("asn", asn); - } - } - case ORGANIZATION_NAME -> { - if (organizationName != null) { - geoData.put("organization_name", organizationName); - } - } - case NETWORK -> { - if (network != null) { - geoData.put("network", network.toString()); - } - } - } - } - return geoData; - } - - private Map retrieveAnonymousIpGeoData(IpDatabase ipDatabase, String ipAddress) { - AnonymousIpResponse response = ipDatabase.getAnonymousIp(ipAddress); - if (response == null) { - return Map.of(); - } - - boolean isHostingProvider = response.isHostingProvider(); - boolean isTorExitNode = response.isTorExitNode(); - boolean isAnonymousVpn = response.isAnonymousVpn(); - boolean isAnonymous = response.isAnonymous(); - boolean isPublicProxy = response.isPublicProxy(); - boolean isResidentialProxy = response.isResidentialProxy(); - - Map geoData = new HashMap<>(); - for (Property property : this.properties) { - switch (property) { - case IP -> geoData.put("ip", response.getIpAddress()); - case HOSTING_PROVIDER -> { - geoData.put("hosting_provider", isHostingProvider); - } - case TOR_EXIT_NODE -> { - geoData.put("tor_exit_node", isTorExitNode); - } - case ANONYMOUS_VPN -> { - geoData.put("anonymous_vpn", isAnonymousVpn); - } - case ANONYMOUS -> { - geoData.put("anonymous", isAnonymous); - } - case PUBLIC_PROXY -> { - geoData.put("public_proxy", isPublicProxy); - } - case RESIDENTIAL_PROXY -> { - geoData.put("residential_proxy", isResidentialProxy); - } - } - } - return geoData; - } - - private Map retrieveConnectionTypeGeoData(IpDatabase ipDatabase, String ipAddress) { - ConnectionTypeResponse response = ipDatabase.getConnectionType(ipAddress); - if (response == null) { - return Map.of(); - } - - ConnectionType connectionType = response.getConnectionType(); - - Map geoData = new HashMap<>(); - for (Property property : this.properties) { - switch (property) { - case IP -> geoData.put("ip", response.getIpAddress()); - case CONNECTION_TYPE -> { - if (connectionType != null) { - geoData.put("connection_type", connectionType.toString()); - } - } - } - } - return geoData; - } - - private Map retrieveDomainGeoData(IpDatabase ipDatabase, String ipAddress) { - DomainResponse response = ipDatabase.getDomain(ipAddress); - if (response == null) { - return Map.of(); - } - - String domain = response.getDomain(); - - Map geoData = new HashMap<>(); - for (Property property : this.properties) { - switch (property) { - case IP -> geoData.put("ip", response.getIpAddress()); - case DOMAIN -> { - if (domain != null) { - geoData.put("domain", domain); - } - } - } - } - return geoData; - } - - private Map retrieveEnterpriseGeoData(IpDatabase ipDatabase, String ipAddress) { - EnterpriseResponse response = ipDatabase.getEnterprise(ipAddress); - if (response == null) { - return Map.of(); - } - - Country country = response.getCountry(); - City city = response.getCity(); - Location location = response.getLocation(); - Continent continent = response.getContinent(); - Subdivision subdivision = response.getMostSpecificSubdivision(); - - Long asn = response.getTraits().getAutonomousSystemNumber(); - String organizationName = response.getTraits().getAutonomousSystemOrganization(); - Network network = response.getTraits().getNetwork(); - - String isp = response.getTraits().getIsp(); - String ispOrganization = response.getTraits().getOrganization(); - String mobileCountryCode = response.getTraits().getMobileCountryCode(); - String mobileNetworkCode = response.getTraits().getMobileNetworkCode(); - - boolean isHostingProvider = response.getTraits().isHostingProvider(); - boolean isTorExitNode = response.getTraits().isTorExitNode(); - boolean isAnonymousVpn = response.getTraits().isAnonymousVpn(); - boolean isAnonymous = response.getTraits().isAnonymous(); - boolean isPublicProxy = response.getTraits().isPublicProxy(); - boolean isResidentialProxy = response.getTraits().isResidentialProxy(); - - String userType = response.getTraits().getUserType(); - - String domain = response.getTraits().getDomain(); - - ConnectionType connectionType = response.getTraits().getConnectionType(); - - Map geoData = new HashMap<>(); - for (Property property : this.properties) { - switch (property) { - case IP -> geoData.put("ip", response.getTraits().getIpAddress()); - case COUNTRY_ISO_CODE -> { - String countryIsoCode = country.getIsoCode(); - if (countryIsoCode != null) { - geoData.put("country_iso_code", countryIsoCode); - } - } - case COUNTRY_NAME -> { - String countryName = country.getName(); - if (countryName != null) { - geoData.put("country_name", countryName); - } - } - case CONTINENT_CODE -> { - String continentCode = continent.getCode(); - if (continentCode != null) { - geoData.put("continent_code", continentCode); - } - } - case CONTINENT_NAME -> { - String continentName = continent.getName(); - if (continentName != null) { - geoData.put("continent_name", continentName); - } - } - case REGION_ISO_CODE -> { - // ISO 3166-2 code for country subdivisions. - // See iso.org/iso-3166-country-codes.html - String countryIso = country.getIsoCode(); - String subdivisionIso = subdivision.getIsoCode(); - if (countryIso != null && subdivisionIso != null) { - String regionIsoCode = countryIso + "-" + subdivisionIso; - geoData.put("region_iso_code", regionIsoCode); - } - } - case REGION_NAME -> { - String subdivisionName = subdivision.getName(); - if (subdivisionName != null) { - geoData.put("region_name", subdivisionName); - } - } - case CITY_NAME -> { - String cityName = city.getName(); - if (cityName != null) { - geoData.put("city_name", cityName); - } - } - case TIMEZONE -> { - String locationTimeZone = location.getTimeZone(); - if (locationTimeZone != null) { - geoData.put("timezone", locationTimeZone); - } - } - case LOCATION -> { - Double latitude = location.getLatitude(); - Double longitude = location.getLongitude(); - if (latitude != null && longitude != null) { - Map locationObject = new HashMap<>(); - locationObject.put("lat", latitude); - locationObject.put("lon", longitude); - geoData.put("location", locationObject); - } - } - case ASN -> { - if (asn != null) { - geoData.put("asn", asn); - } - } - case ORGANIZATION_NAME -> { - if (organizationName != null) { - geoData.put("organization_name", organizationName); - } - } - case NETWORK -> { - if (network != null) { - geoData.put("network", network.toString()); - } - } - case HOSTING_PROVIDER -> { - geoData.put("hosting_provider", isHostingProvider); - } - case TOR_EXIT_NODE -> { - geoData.put("tor_exit_node", isTorExitNode); - } - case ANONYMOUS_VPN -> { - geoData.put("anonymous_vpn", isAnonymousVpn); - } - case ANONYMOUS -> { - geoData.put("anonymous", isAnonymous); - } - case PUBLIC_PROXY -> { - geoData.put("public_proxy", isPublicProxy); - } - case RESIDENTIAL_PROXY -> { - geoData.put("residential_proxy", isResidentialProxy); - } - case DOMAIN -> { - if (domain != null) { - geoData.put("domain", domain); - } - } - case ISP -> { - if (isp != null) { - geoData.put("isp", isp); - } - } - case ISP_ORGANIZATION_NAME -> { - if (ispOrganization != null) { - geoData.put("isp_organization_name", ispOrganization); - } - } - case MOBILE_COUNTRY_CODE -> { - if (mobileCountryCode != null) { - geoData.put("mobile_country_code", mobileCountryCode); - } - } - case MOBILE_NETWORK_CODE -> { - if (mobileNetworkCode != null) { - geoData.put("mobile_network_code", mobileNetworkCode); - } - } - case USER_TYPE -> { - if (userType != null) { - geoData.put("user_type", userType); - } - } - case CONNECTION_TYPE -> { - if (connectionType != null) { - geoData.put("connection_type", connectionType.toString()); - } - } - } - } - return geoData; - } - - private Map retrieveIspGeoData(IpDatabase ipDatabase, String ipAddress) { - IspResponse response = ipDatabase.getIsp(ipAddress); - if (response == null) { - return Map.of(); - } - - String isp = response.getIsp(); - String ispOrganization = response.getOrganization(); - String mobileNetworkCode = response.getMobileNetworkCode(); - String mobileCountryCode = response.getMobileCountryCode(); - Long asn = response.getAutonomousSystemNumber(); - String organizationName = response.getAutonomousSystemOrganization(); - Network network = response.getNetwork(); - - Map geoData = new HashMap<>(); - for (Property property : this.properties) { - switch (property) { - case IP -> geoData.put("ip", response.getIpAddress()); - case ASN -> { - if (asn != null) { - geoData.put("asn", asn); - } - } - case ORGANIZATION_NAME -> { - if (organizationName != null) { - geoData.put("organization_name", organizationName); - } - } - case NETWORK -> { - if (network != null) { - geoData.put("network", network.toString()); - } - } - case ISP -> { - if (isp != null) { - geoData.put("isp", isp); - } - } - case ISP_ORGANIZATION_NAME -> { - if (ispOrganization != null) { - geoData.put("isp_organization_name", ispOrganization); - } - } - case MOBILE_COUNTRY_CODE -> { - if (mobileCountryCode != null) { - geoData.put("mobile_country_code", mobileCountryCode); - } - } - case MOBILE_NETWORK_CODE -> { - if (mobileNetworkCode != null) { - geoData.put("mobile_network_code", mobileNetworkCode); - } - } - } - } - return geoData; + return ipDataLookup.getProperties(); } /** @@ -752,19 +244,20 @@ public Processor create( databaseType = ipDatabase.getDatabaseType(); } - final Database database; + final IpDataLookupFactory factory; try { - database = Database.getDatabase(databaseType, databaseFile); + factory = IpDataLookupFactories.get(databaseType, databaseFile); } catch (IllegalArgumentException e) { throw newConfigurationException(TYPE, processorTag, "database_file", e.getMessage()); } - final Set properties; + final IpDataLookup ipDataLookup; try { - properties = database.parseProperties(propertyNames); + ipDataLookup = factory.create(propertyNames); } catch (IllegalArgumentException e) { throw newConfigurationException(TYPE, processorTag, "properties", e.getMessage()); } + return new GeoIpProcessor( processorTag, description, @@ -772,7 +265,7 @@ public Processor create( new DatabaseVerifyingSupplier(ipDatabaseProvider, databaseFile, databaseType), () -> ipDatabaseProvider.isValid(databaseFile), targetField, - properties, + ipDataLookup, ignoreMissing, firstOnly, databaseFile diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java index 721493ac5ee5d..09ed10568ce8d 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java @@ -41,7 +41,7 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -class GeoIpTaskState implements PersistentTaskState, VersionedNamedWriteable { +public class GeoIpTaskState implements PersistentTaskState, VersionedNamedWriteable { private static boolean includeSha256(TransportVersion version) { return version.isPatchFrom(TransportVersions.ENTERPRISE_GEOIP_DOWNLOADER_BACKPORT_8_15) @@ -150,7 +150,7 @@ public void writeTo(StreamOutput out) throws IOException { }); } - record Metadata(long lastUpdate, int firstChunk, int lastChunk, String md5, long lastCheck, @Nullable String sha256) + public record Metadata(long lastUpdate, int firstChunk, int lastChunk, String md5, long lastCheck, @Nullable String sha256) implements ToXContentObject { @@ -198,7 +198,7 @@ public static Metadata fromXContent(XContentParser parser) { } } - Metadata { + public Metadata { Objects.requireNonNull(md5); } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadata.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadata.java index c7d3db5f5b572..b6e73f3f33f7c 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadata.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadata.java @@ -15,10 +15,9 @@ import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.ingest.geoip.direct.DatabaseConfigurationMetadata; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -91,8 +90,8 @@ public static IngestGeoIpMetadata fromXContent(XContentParser parser) throws IOE } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return Iterators.concat(ChunkedToXContentHelper.xContentValuesMap(DATABASES_FIELD.getPreferredName(), databases)); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).xContentObjectFields(DATABASES_FIELD.getPreferredName(), databases); } @Override diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 9551ca29294ab..f5ae869841b82 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -31,6 +31,7 @@ import org.elasticsearch.ingest.EnterpriseGeoIpTask.EnterpriseGeoIpTaskParams; import org.elasticsearch.ingest.IngestService; import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.geoip.direct.DatabaseConfiguration; import org.elasticsearch.ingest.geoip.direct.DeleteDatabaseConfigurationAction; import org.elasticsearch.ingest.geoip.direct.GetDatabaseConfigurationAction; import org.elasticsearch.ingest.geoip.direct.PutDatabaseConfigurationAction; @@ -232,7 +233,22 @@ public List getNamedWriteables() { new NamedWriteableRegistry.Entry(PersistentTaskParams.class, GEOIP_DOWNLOADER, GeoIpTaskParams::new), new NamedWriteableRegistry.Entry(PersistentTaskState.class, ENTERPRISE_GEOIP_DOWNLOADER, EnterpriseGeoIpTaskState::new), new NamedWriteableRegistry.Entry(PersistentTaskParams.class, ENTERPRISE_GEOIP_DOWNLOADER, EnterpriseGeoIpTaskParams::new), - new NamedWriteableRegistry.Entry(Task.Status.class, GEOIP_DOWNLOADER, GeoIpDownloaderStats::new) + new NamedWriteableRegistry.Entry(Task.Status.class, GEOIP_DOWNLOADER, GeoIpDownloaderStats::new), + new NamedWriteableRegistry.Entry( + DatabaseConfiguration.Provider.class, + DatabaseConfiguration.Maxmind.NAME, + DatabaseConfiguration.Maxmind::new + ), + new NamedWriteableRegistry.Entry( + DatabaseConfiguration.Provider.class, + DatabaseConfiguration.Local.NAME, + DatabaseConfiguration.Local::new + ), + new NamedWriteableRegistry.Entry( + DatabaseConfiguration.Provider.class, + DatabaseConfiguration.Web.NAME, + DatabaseConfiguration.Web::new + ) ); } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookup.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookup.java new file mode 100644 index 0000000000000..7442c8e930886 --- /dev/null +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookup.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.ingest.geoip; + +import java.io.IOException; +import java.util.Map; +import java.util.Set; + +interface IpDataLookup { + /** + * Gets data from the provided {@code ipDatabase} for the provided {@code ip} + * + * @param ipDatabase the database from which to lookup a result + * @param ip the ip address + * @return a map of data corresponding to the configured properties + * @throws IOException if the implementation encounters any problem while retrieving the response + */ + Map getData(IpDatabase ipDatabase, String ip) throws IOException; + + /** + * @return the set of properties this lookup will provide + */ + Set getProperties(); +} diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookupFactories.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookupFactories.java new file mode 100644 index 0000000000000..990788978a0ca --- /dev/null +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookupFactories.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.ingest.geoip; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Nullable; + +import java.util.List; +import java.util.Set; +import java.util.function.Function; + +final class IpDataLookupFactories { + + private IpDataLookupFactories() { + // utility class + } + + interface IpDataLookupFactory { + IpDataLookup create(List properties); + } + + private static final String CITY_DB_SUFFIX = "-City"; + private static final String COUNTRY_DB_SUFFIX = "-Country"; + private static final String ASN_DB_SUFFIX = "-ASN"; + private static final String ANONYMOUS_IP_DB_SUFFIX = "-Anonymous-IP"; + private static final String CONNECTION_TYPE_DB_SUFFIX = "-Connection-Type"; + private static final String DOMAIN_DB_SUFFIX = "-Domain"; + private static final String ENTERPRISE_DB_SUFFIX = "-Enterprise"; + private static final String ISP_DB_SUFFIX = "-ISP"; + + @Nullable + private static Database getMaxmindDatabase(final String databaseType) { + if (databaseType.endsWith(CITY_DB_SUFFIX)) { + return Database.City; + } else if (databaseType.endsWith(COUNTRY_DB_SUFFIX)) { + return Database.Country; + } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { + return Database.Asn; + } else if (databaseType.endsWith(ANONYMOUS_IP_DB_SUFFIX)) { + return Database.AnonymousIp; + } else if (databaseType.endsWith(CONNECTION_TYPE_DB_SUFFIX)) { + return Database.ConnectionType; + } else if (databaseType.endsWith(DOMAIN_DB_SUFFIX)) { + return Database.Domain; + } else if (databaseType.endsWith(ENTERPRISE_DB_SUFFIX)) { + return Database.Enterprise; + } else if (databaseType.endsWith(ISP_DB_SUFFIX)) { + return Database.Isp; + } else { + return null; // no match was found + } + } + + /** + * Parses the passed-in databaseType and return the Database instance that is + * associated with that databaseType. + * + * @param databaseType the database type String from the metadata of the database file + * @return the Database instance that is associated with the databaseType + */ + @Nullable + static Database getDatabase(final String databaseType) { + Database database = null; + + if (Strings.hasText(databaseType)) { + database = getMaxmindDatabase(databaseType); + } + + return database; + } + + static Function, IpDataLookup> getMaxmindLookup(final Database database) { + return switch (database) { + case City -> MaxmindIpDataLookups.City::new; + case Country -> MaxmindIpDataLookups.Country::new; + case Asn -> MaxmindIpDataLookups.Asn::new; + case AnonymousIp -> MaxmindIpDataLookups.AnonymousIp::new; + case ConnectionType -> MaxmindIpDataLookups.ConnectionType::new; + case Domain -> MaxmindIpDataLookups.Domain::new; + case Enterprise -> MaxmindIpDataLookups.Enterprise::new; + case Isp -> MaxmindIpDataLookups.Isp::new; + }; + } + + static IpDataLookupFactory get(final String databaseType, final String databaseFile) { + final Database database = getDatabase(databaseType); + if (database == null) { + throw new IllegalArgumentException("Unsupported database type [" + databaseType + "] for file [" + databaseFile + "]"); + } + + final Function, IpDataLookup> factoryMethod = getMaxmindLookup(database); + + // note: this can't presently be null, but keep this check -- it will be useful in the near future + if (factoryMethod == null) { + throw new IllegalArgumentException("Unsupported database type [" + databaseType + "] for file [" + databaseFile + "]"); + } + + return (properties) -> factoryMethod.apply(database.parseProperties(properties)); + } +} diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDatabase.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDatabase.java index f416259a87d27..db1ffc1c682b8 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDatabase.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDatabase.java @@ -9,15 +9,9 @@ package org.elasticsearch.ingest.geoip; -import com.maxmind.geoip2.model.AnonymousIpResponse; -import com.maxmind.geoip2.model.AsnResponse; -import com.maxmind.geoip2.model.CityResponse; -import com.maxmind.geoip2.model.ConnectionTypeResponse; -import com.maxmind.geoip2.model.CountryResponse; -import com.maxmind.geoip2.model.DomainResponse; -import com.maxmind.geoip2.model.EnterpriseResponse; -import com.maxmind.geoip2.model.IspResponse; +import com.maxmind.db.Reader; +import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.core.Nullable; import java.io.IOException; @@ -34,44 +28,15 @@ public interface IpDatabase extends AutoCloseable { String getDatabaseType() throws IOException; /** - * @param ipAddress the IP address to look up - * @return a response containing the city data for the given address if it exists, or null if it could not be found - * @throws UnsupportedOperationException may be thrown if the implementation does not support retrieving city data - */ - @Nullable - CityResponse getCity(String ipAddress); - - /** - * @param ipAddress the IP address to look up - * @return a response containing the country data for the given address if it exists, or null if it could not be found - * @throws UnsupportedOperationException may be thrown if the implementation does not support retrieving country data - */ - @Nullable - CountryResponse getCountry(String ipAddress); - - /** - * @param ipAddress the IP address to look up - * @return a response containing the Autonomous System Number for the given address if it exists, or null if it could not - * be found - * @throws UnsupportedOperationException may be thrown if the implementation does not support retrieving ASN data + * Returns a response from this database's reader for the given IP address. + * + * @param ipAddress the address to lookup + * @param responseProvider a method for extracting a response from a {@link Reader}, usually this will be a method reference + * @return a possibly-null response + * @param the type of response that will be returned */ @Nullable - AsnResponse getAsn(String ipAddress); - - @Nullable - AnonymousIpResponse getAnonymousIp(String ipAddress); - - @Nullable - ConnectionTypeResponse getConnectionType(String ipAddress); - - @Nullable - DomainResponse getDomain(String ipAddress); - - @Nullable - EnterpriseResponse getEnterprise(String ipAddress); - - @Nullable - IspResponse getIsp(String ipAddress); + RESPONSE getResponse(String ipAddress, CheckedBiFunction responseProvider); /** * Releases the current database object. Called after processing a single document. Databases should be closed or returned to a diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java new file mode 100644 index 0000000000000..5b22b3f4005a9 --- /dev/null +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java @@ -0,0 +1,606 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.ingest.geoip; + +import com.maxmind.db.DatabaseRecord; +import com.maxmind.db.Network; +import com.maxmind.db.Reader; +import com.maxmind.geoip2.model.AbstractResponse; +import com.maxmind.geoip2.model.AnonymousIpResponse; +import com.maxmind.geoip2.model.AsnResponse; +import com.maxmind.geoip2.model.CityResponse; +import com.maxmind.geoip2.model.ConnectionTypeResponse; +import com.maxmind.geoip2.model.CountryResponse; +import com.maxmind.geoip2.model.DomainResponse; +import com.maxmind.geoip2.model.EnterpriseResponse; +import com.maxmind.geoip2.model.IspResponse; +import com.maxmind.geoip2.record.Continent; +import com.maxmind.geoip2.record.Location; +import com.maxmind.geoip2.record.Subdivision; + +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.core.Nullable; + +import java.io.IOException; +import java.net.InetAddress; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * A collection of {@link IpDataLookup} implementations for MaxMind databases + */ +final class MaxmindIpDataLookups { + + private MaxmindIpDataLookups() { + // utility class + } + + static class AnonymousIp extends AbstractBase { + AnonymousIp(final Set properties) { + super( + properties, + AnonymousIpResponse.class, + (response, ipAddress, network, locales) -> new AnonymousIpResponse(response, ipAddress, network) + ); + } + + @Override + protected Map transform(final AnonymousIpResponse response) { + boolean isHostingProvider = response.isHostingProvider(); + boolean isTorExitNode = response.isTorExitNode(); + boolean isAnonymousVpn = response.isAnonymousVpn(); + boolean isAnonymous = response.isAnonymous(); + boolean isPublicProxy = response.isPublicProxy(); + boolean isResidentialProxy = response.isResidentialProxy(); + + Map data = new HashMap<>(); + for (Database.Property property : this.properties) { + switch (property) { + case IP -> data.put("ip", response.getIpAddress()); + case HOSTING_PROVIDER -> { + data.put("hosting_provider", isHostingProvider); + } + case TOR_EXIT_NODE -> { + data.put("tor_exit_node", isTorExitNode); + } + case ANONYMOUS_VPN -> { + data.put("anonymous_vpn", isAnonymousVpn); + } + case ANONYMOUS -> { + data.put("anonymous", isAnonymous); + } + case PUBLIC_PROXY -> { + data.put("public_proxy", isPublicProxy); + } + case RESIDENTIAL_PROXY -> { + data.put("residential_proxy", isResidentialProxy); + } + } + } + return data; + } + } + + static class Asn extends AbstractBase { + Asn(Set properties) { + super(properties, AsnResponse.class, (response, ipAddress, network, locales) -> new AsnResponse(response, ipAddress, network)); + } + + @Override + protected Map transform(final AsnResponse response) { + Long asn = response.getAutonomousSystemNumber(); + String organizationName = response.getAutonomousSystemOrganization(); + Network network = response.getNetwork(); + + Map data = new HashMap<>(); + for (Database.Property property : this.properties) { + switch (property) { + case IP -> data.put("ip", response.getIpAddress()); + case ASN -> { + if (asn != null) { + data.put("asn", asn); + } + } + case ORGANIZATION_NAME -> { + if (organizationName != null) { + data.put("organization_name", organizationName); + } + } + case NETWORK -> { + if (network != null) { + data.put("network", network.toString()); + } + } + } + } + return data; + } + } + + static class City extends AbstractBase { + City(final Set properties) { + super(properties, CityResponse.class, CityResponse::new); + } + + @Override + protected Map transform(final CityResponse response) { + com.maxmind.geoip2.record.Country country = response.getCountry(); + com.maxmind.geoip2.record.City city = response.getCity(); + Location location = response.getLocation(); + Continent continent = response.getContinent(); + Subdivision subdivision = response.getMostSpecificSubdivision(); + + Map data = new HashMap<>(); + for (Database.Property property : this.properties) { + switch (property) { + case IP -> data.put("ip", response.getTraits().getIpAddress()); + case COUNTRY_ISO_CODE -> { + String countryIsoCode = country.getIsoCode(); + if (countryIsoCode != null) { + data.put("country_iso_code", countryIsoCode); + } + } + case COUNTRY_NAME -> { + String countryName = country.getName(); + if (countryName != null) { + data.put("country_name", countryName); + } + } + case CONTINENT_CODE -> { + String continentCode = continent.getCode(); + if (continentCode != null) { + data.put("continent_code", continentCode); + } + } + case CONTINENT_NAME -> { + String continentName = continent.getName(); + if (continentName != null) { + data.put("continent_name", continentName); + } + } + case REGION_ISO_CODE -> { + // ISO 3166-2 code for country subdivisions. + // See iso.org/iso-3166-country-codes.html + String countryIso = country.getIsoCode(); + String subdivisionIso = subdivision.getIsoCode(); + if (countryIso != null && subdivisionIso != null) { + String regionIsoCode = countryIso + "-" + subdivisionIso; + data.put("region_iso_code", regionIsoCode); + } + } + case REGION_NAME -> { + String subdivisionName = subdivision.getName(); + if (subdivisionName != null) { + data.put("region_name", subdivisionName); + } + } + case CITY_NAME -> { + String cityName = city.getName(); + if (cityName != null) { + data.put("city_name", cityName); + } + } + case TIMEZONE -> { + String locationTimeZone = location.getTimeZone(); + if (locationTimeZone != null) { + data.put("timezone", locationTimeZone); + } + } + case LOCATION -> { + Double latitude = location.getLatitude(); + Double longitude = location.getLongitude(); + if (latitude != null && longitude != null) { + Map locationObject = new HashMap<>(); + locationObject.put("lat", latitude); + locationObject.put("lon", longitude); + data.put("location", locationObject); + } + } + } + } + return data; + } + } + + static class ConnectionType extends AbstractBase { + ConnectionType(final Set properties) { + super( + properties, + ConnectionTypeResponse.class, + (response, ipAddress, network, locales) -> new ConnectionTypeResponse(response, ipAddress, network) + ); + } + + @Override + protected Map transform(final ConnectionTypeResponse response) { + ConnectionTypeResponse.ConnectionType connectionType = response.getConnectionType(); + + Map data = new HashMap<>(); + for (Database.Property property : this.properties) { + switch (property) { + case IP -> data.put("ip", response.getIpAddress()); + case CONNECTION_TYPE -> { + if (connectionType != null) { + data.put("connection_type", connectionType.toString()); + } + } + } + } + return data; + } + } + + static class Country extends AbstractBase { + Country(final Set properties) { + super(properties, CountryResponse.class, CountryResponse::new); + } + + @Override + protected Map transform(final CountryResponse response) { + com.maxmind.geoip2.record.Country country = response.getCountry(); + Continent continent = response.getContinent(); + + Map data = new HashMap<>(); + for (Database.Property property : this.properties) { + switch (property) { + case IP -> data.put("ip", response.getTraits().getIpAddress()); + case COUNTRY_ISO_CODE -> { + String countryIsoCode = country.getIsoCode(); + if (countryIsoCode != null) { + data.put("country_iso_code", countryIsoCode); + } + } + case COUNTRY_NAME -> { + String countryName = country.getName(); + if (countryName != null) { + data.put("country_name", countryName); + } + } + case CONTINENT_CODE -> { + String continentCode = continent.getCode(); + if (continentCode != null) { + data.put("continent_code", continentCode); + } + } + case CONTINENT_NAME -> { + String continentName = continent.getName(); + if (continentName != null) { + data.put("continent_name", continentName); + } + } + } + } + return data; + } + } + + static class Domain extends AbstractBase { + Domain(final Set properties) { + super( + properties, + DomainResponse.class, + (response, ipAddress, network, locales) -> new DomainResponse(response, ipAddress, network) + ); + } + + @Override + protected Map transform(final DomainResponse response) { + String domain = response.getDomain(); + + Map data = new HashMap<>(); + for (Database.Property property : this.properties) { + switch (property) { + case IP -> data.put("ip", response.getIpAddress()); + case DOMAIN -> { + if (domain != null) { + data.put("domain", domain); + } + } + } + } + return data; + } + } + + static class Enterprise extends AbstractBase { + Enterprise(final Set properties) { + super(properties, EnterpriseResponse.class, EnterpriseResponse::new); + } + + @Override + protected Map transform(final EnterpriseResponse response) { + com.maxmind.geoip2.record.Country country = response.getCountry(); + com.maxmind.geoip2.record.City city = response.getCity(); + Location location = response.getLocation(); + Continent continent = response.getContinent(); + Subdivision subdivision = response.getMostSpecificSubdivision(); + + Long asn = response.getTraits().getAutonomousSystemNumber(); + String organizationName = response.getTraits().getAutonomousSystemOrganization(); + Network network = response.getTraits().getNetwork(); + + String isp = response.getTraits().getIsp(); + String ispOrganization = response.getTraits().getOrganization(); + String mobileCountryCode = response.getTraits().getMobileCountryCode(); + String mobileNetworkCode = response.getTraits().getMobileNetworkCode(); + + boolean isHostingProvider = response.getTraits().isHostingProvider(); + boolean isTorExitNode = response.getTraits().isTorExitNode(); + boolean isAnonymousVpn = response.getTraits().isAnonymousVpn(); + boolean isAnonymous = response.getTraits().isAnonymous(); + boolean isPublicProxy = response.getTraits().isPublicProxy(); + boolean isResidentialProxy = response.getTraits().isResidentialProxy(); + + String userType = response.getTraits().getUserType(); + + String domain = response.getTraits().getDomain(); + + ConnectionTypeResponse.ConnectionType connectionType = response.getTraits().getConnectionType(); + + Map data = new HashMap<>(); + for (Database.Property property : this.properties) { + switch (property) { + case IP -> data.put("ip", response.getTraits().getIpAddress()); + case COUNTRY_ISO_CODE -> { + String countryIsoCode = country.getIsoCode(); + if (countryIsoCode != null) { + data.put("country_iso_code", countryIsoCode); + } + } + case COUNTRY_NAME -> { + String countryName = country.getName(); + if (countryName != null) { + data.put("country_name", countryName); + } + } + case CONTINENT_CODE -> { + String continentCode = continent.getCode(); + if (continentCode != null) { + data.put("continent_code", continentCode); + } + } + case CONTINENT_NAME -> { + String continentName = continent.getName(); + if (continentName != null) { + data.put("continent_name", continentName); + } + } + case REGION_ISO_CODE -> { + // ISO 3166-2 code for country subdivisions. + // See iso.org/iso-3166-country-codes.html + String countryIso = country.getIsoCode(); + String subdivisionIso = subdivision.getIsoCode(); + if (countryIso != null && subdivisionIso != null) { + String regionIsoCode = countryIso + "-" + subdivisionIso; + data.put("region_iso_code", regionIsoCode); + } + } + case REGION_NAME -> { + String subdivisionName = subdivision.getName(); + if (subdivisionName != null) { + data.put("region_name", subdivisionName); + } + } + case CITY_NAME -> { + String cityName = city.getName(); + if (cityName != null) { + data.put("city_name", cityName); + } + } + case TIMEZONE -> { + String locationTimeZone = location.getTimeZone(); + if (locationTimeZone != null) { + data.put("timezone", locationTimeZone); + } + } + case LOCATION -> { + Double latitude = location.getLatitude(); + Double longitude = location.getLongitude(); + if (latitude != null && longitude != null) { + Map locationObject = new HashMap<>(); + locationObject.put("lat", latitude); + locationObject.put("lon", longitude); + data.put("location", locationObject); + } + } + case ASN -> { + if (asn != null) { + data.put("asn", asn); + } + } + case ORGANIZATION_NAME -> { + if (organizationName != null) { + data.put("organization_name", organizationName); + } + } + case NETWORK -> { + if (network != null) { + data.put("network", network.toString()); + } + } + case HOSTING_PROVIDER -> { + data.put("hosting_provider", isHostingProvider); + } + case TOR_EXIT_NODE -> { + data.put("tor_exit_node", isTorExitNode); + } + case ANONYMOUS_VPN -> { + data.put("anonymous_vpn", isAnonymousVpn); + } + case ANONYMOUS -> { + data.put("anonymous", isAnonymous); + } + case PUBLIC_PROXY -> { + data.put("public_proxy", isPublicProxy); + } + case RESIDENTIAL_PROXY -> { + data.put("residential_proxy", isResidentialProxy); + } + case DOMAIN -> { + if (domain != null) { + data.put("domain", domain); + } + } + case ISP -> { + if (isp != null) { + data.put("isp", isp); + } + } + case ISP_ORGANIZATION_NAME -> { + if (ispOrganization != null) { + data.put("isp_organization_name", ispOrganization); + } + } + case MOBILE_COUNTRY_CODE -> { + if (mobileCountryCode != null) { + data.put("mobile_country_code", mobileCountryCode); + } + } + case MOBILE_NETWORK_CODE -> { + if (mobileNetworkCode != null) { + data.put("mobile_network_code", mobileNetworkCode); + } + } + case USER_TYPE -> { + if (userType != null) { + data.put("user_type", userType); + } + } + case CONNECTION_TYPE -> { + if (connectionType != null) { + data.put("connection_type", connectionType.toString()); + } + } + } + } + return data; + } + } + + static class Isp extends AbstractBase { + Isp(final Set properties) { + super(properties, IspResponse.class, (response, ipAddress, network, locales) -> new IspResponse(response, ipAddress, network)); + } + + @Override + protected Map transform(final IspResponse response) { + String isp = response.getIsp(); + String ispOrganization = response.getOrganization(); + String mobileNetworkCode = response.getMobileNetworkCode(); + String mobileCountryCode = response.getMobileCountryCode(); + Long asn = response.getAutonomousSystemNumber(); + String organizationName = response.getAutonomousSystemOrganization(); + Network network = response.getNetwork(); + + Map data = new HashMap<>(); + for (Database.Property property : this.properties) { + switch (property) { + case IP -> data.put("ip", response.getIpAddress()); + case ASN -> { + if (asn != null) { + data.put("asn", asn); + } + } + case ORGANIZATION_NAME -> { + if (organizationName != null) { + data.put("organization_name", organizationName); + } + } + case NETWORK -> { + if (network != null) { + data.put("network", network.toString()); + } + } + case ISP -> { + if (isp != null) { + data.put("isp", isp); + } + } + case ISP_ORGANIZATION_NAME -> { + if (ispOrganization != null) { + data.put("isp_organization_name", ispOrganization); + } + } + case MOBILE_COUNTRY_CODE -> { + if (mobileCountryCode != null) { + data.put("mobile_country_code", mobileCountryCode); + } + } + case MOBILE_NETWORK_CODE -> { + if (mobileNetworkCode != null) { + data.put("mobile_network_code", mobileNetworkCode); + } + } + } + } + return data; + } + } + + /** + * As an internal detail, the {@code com.maxmind.geoip2.model } classes that are populated by + * {@link Reader#getRecord(InetAddress, Class)} are kinda half-populated and need to go through a second round of construction + * with context from the querying caller. This method gives us a place do that additional binding. Cleverly, the signature + * here matches the constructor for many of these model classes exactly, so an appropriate implementation can 'just' be a method + * reference in some cases (in other cases it needs to be a lambda). + */ + @FunctionalInterface + private interface ResponseBuilder { + RESPONSE build(RESPONSE resp, String address, Network network, List locales); + } + + /** + * The {@link MaxmindIpDataLookups.AbstractBase} is an abstract base implementation of {@link IpDataLookup} that + * provides common functionality for getting a specific kind of {@link AbstractResponse} from a {@link IpDatabase}. + * + * @param the intermediate type of {@link AbstractResponse} + */ + private abstract static class AbstractBase implements IpDataLookup { + + protected final Set properties; + protected final Class clazz; + protected final ResponseBuilder builder; + + AbstractBase(final Set properties, final Class clazz, final ResponseBuilder builder) { + this.properties = Set.copyOf(properties); + this.clazz = clazz; + this.builder = builder; + } + + @Override + public Set getProperties() { + return this.properties; + } + + @Override + public final Map getData(final IpDatabase ipDatabase, final String ipAddress) { + final RESPONSE response = ipDatabase.getResponse(ipAddress, this::lookup); + return (response == null) ? Map.of() : transform(response); + } + + @Nullable + private RESPONSE lookup(final Reader reader, final String ipAddress) throws IOException { + final InetAddress ip = InetAddresses.forString(ipAddress); + final DatabaseRecord record = reader.getRecord(ip, clazz); + final RESPONSE data = record.getData(); + return (data == null) ? null : builder.build(data, NetworkAddress.format(ip), record.getNetwork(), List.of("en")); + } + + /** + * Extract the configured properties from the retrieved response + * @param response the non-null response that was retrieved + * @return a mapping of properties for the ip from the response + */ + protected abstract Map transform(RESPONSE response); + } +} diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java index b8b48e0f738a5..3399b71879e26 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java @@ -9,13 +9,16 @@ package org.elasticsearch.ingest.geoip.direct; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.cluster.metadata.MetadataCreateIndexService; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -34,19 +37,19 @@ * That is, it has an id e.g. "my_db_config_1" and it says "download the file named XXXX from SomeCompany, and here's the * magic token to use to do that." */ -public record DatabaseConfiguration(String id, String name, Maxmind maxmind) implements Writeable, ToXContentObject { +public record DatabaseConfiguration(String id, String name, Provider provider) implements Writeable, ToXContentObject { // id is a user selected signifier like 'my_domain_db' // name is the name of a file that can be downloaded (like 'GeoIP2-Domain') - // a configuration will have a 'type' like "maxmind", and that might have some more details, + // a configuration will have a 'provider' like "maxmind", and that might have some more details, // for now, though the important thing is that the json has to have it even though we don't model it meaningfully in this class public DatabaseConfiguration { // these are invariants, not actual validation Objects.requireNonNull(id); Objects.requireNonNull(name); - Objects.requireNonNull(maxmind); + Objects.requireNonNull(provider); } /** @@ -76,25 +79,49 @@ public record DatabaseConfiguration(String id, String name, Maxmind maxmind) imp ); private static final ParseField NAME = new ParseField("name"); - private static final ParseField MAXMIND = new ParseField("maxmind"); + private static final ParseField MAXMIND = new ParseField(Maxmind.NAME); + private static final ParseField WEB = new ParseField(Web.NAME); + private static final ParseField LOCAL = new ParseField(Local.NAME); private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "database", false, (a, id) -> { String name = (String) a[0]; - Maxmind maxmind = (Maxmind) a[1]; - return new DatabaseConfiguration(id, name, maxmind); + Provider provider; + if (a[1] != null) { + provider = (Maxmind) a[1]; + } else if (a[2] != null) { + provider = (Web) a[2]; + } else { + provider = (Local) a[3]; + } + return new DatabaseConfiguration(id, name, provider); } ); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), (parser, id) -> Maxmind.PARSER.apply(parser, null), MAXMIND); + PARSER.declareObject( + ConstructingObjectParser.optionalConstructorArg(), + (parser, id) -> Maxmind.PARSER.apply(parser, null), + MAXMIND + ); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (parser, id) -> Web.PARSER.apply(parser, null), WEB); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (parser, id) -> Local.PARSER.apply(parser, null), LOCAL); } public DatabaseConfiguration(StreamInput in) throws IOException { - this(in.readString(), in.readString(), new Maxmind(in)); + this(in.readString(), in.readString(), readProvider(in)); + } + + private static Provider readProvider(StreamInput in) throws IOException { + if (in.getTransportVersion().onOrAfter(TransportVersions.INGEST_GEO_DATABASE_PROVIDERS)) { + return in.readNamedWriteable(Provider.class); + } else { + // prior to the above version, everything was always a maxmind, so this half of the if is logical + return new Maxmind(in.readString()); + } } public static DatabaseConfiguration parse(XContentParser parser, String id) { @@ -105,14 +132,27 @@ public static DatabaseConfiguration parse(XContentParser parser, String id) { public void writeTo(StreamOutput out) throws IOException { out.writeString(id); out.writeString(name); - maxmind.writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.INGEST_GEO_DATABASE_PROVIDERS)) { + out.writeNamedWriteable(provider); + } else { + if (provider instanceof Maxmind maxmind) { + out.writeString(maxmind.accountId); + } else { + /* + * The existence of a non-Maxmind providers is gated on the feature get_database_configuration_action.multi_node, and + * get_database_configuration_action.multi_node is only available on or after + * TransportVersions.INGEST_GEO_DATABASE_PROVIDERS. + */ + assert false : "non-maxmind DatabaseConfiguration.Provider [" + provider.getWriteableName() + "]"; + } + } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("name", name); - builder.field("maxmind", maxmind); + builder.field(provider.getWriteableName(), provider); builder.endObject(); return builder; } @@ -168,7 +208,24 @@ public ActionRequestValidationException validate() { return err.validationErrors().isEmpty() ? null : err; } - public record Maxmind(String accountId) implements Writeable, ToXContentObject { + public boolean isReadOnly() { + return provider.isReadOnly(); + } + + /** + * A marker interface that all providers need to implement. + */ + public interface Provider extends NamedWriteable, ToXContentObject { + boolean isReadOnly(); + } + + public record Maxmind(String accountId) implements Provider { + public static final String NAME = "maxmind"; + + @Override + public String getWriteableName() { + return NAME; + } public Maxmind { // this is an invariant, not actual validation @@ -206,5 +263,90 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } + + @Override + public boolean isReadOnly() { + return false; + } + } + + public record Local(String type) implements Provider { + public static final String NAME = "local"; + + private static final ParseField TYPE = new ParseField("type"); + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("database", false, (a, id) -> { + String type = (String) a[0]; + return new Local(type); + }); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), TYPE); + } + + public Local(StreamInput in) throws IOException { + this(in.readString()); + } + + public static Local parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(type); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("type", type); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public boolean isReadOnly() { + return true; + } + } + + public record Web() implements Provider { + public static final String NAME = "web"; + + private static final ObjectParser PARSER = new ObjectParser<>("database", Web::new); + + public Web(StreamInput in) throws IOException { + this(); + } + + public static Web parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException {} + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public boolean isReadOnly() { + return true; + } } } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/PutDatabaseConfigurationAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/PutDatabaseConfigurationAction.java index 41be25987a31b..b5343f17e47b6 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/PutDatabaseConfigurationAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/PutDatabaseConfigurationAction.java @@ -49,7 +49,12 @@ public DatabaseConfiguration getDatabase() { } public static Request parseRequest(TimeValue masterNodeTimeout, TimeValue ackTimeout, String id, XContentParser parser) { - return new Request(masterNodeTimeout, ackTimeout, DatabaseConfiguration.parse(parser, id)); + DatabaseConfiguration database = DatabaseConfiguration.parse(parser, id); + if (database.isReadOnly()) { + throw new IllegalArgumentException("Database " + id + " is read only"); + } else { + return new Request(masterNodeTimeout, ackTimeout, database); + } } @Override diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportDeleteDatabaseConfigurationAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportDeleteDatabaseConfigurationAction.java index 088cea04cef87..b73b2fd4beb08 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportDeleteDatabaseConfigurationAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportDeleteDatabaseConfigurationAction.java @@ -91,6 +91,8 @@ protected void masterOperation(Task task, Request request, ClusterState state, A final IngestGeoIpMetadata geoIpMeta = state.metadata().custom(IngestGeoIpMetadata.TYPE, IngestGeoIpMetadata.EMPTY); if (geoIpMeta.getDatabases().containsKey(id) == false) { throw new ResourceNotFoundException("Database configuration not found: {}", id); + } else if (geoIpMeta.getDatabases().get(id).database().isReadOnly()) { + throw new IllegalArgumentException("Database " + id + " is read only"); } deleteDatabaseConfigurationTaskQueue.submitTask( Strings.format("delete-geoip-database-configuration-[%s]", id), diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationAction.java index 0660a9ff0491d..c83c40e56b749 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.ingest.geoip.direct; -import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; @@ -19,19 +18,28 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.features.FeatureService; +import org.elasticsearch.ingest.geoip.DatabaseNodeService; +import org.elasticsearch.ingest.geoip.GeoIpTaskState; import org.elasticsearch.ingest.geoip.IngestGeoIpMetadata; import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; +import java.util.Base64; +import java.util.Collection; +import java.util.Comparator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; +import java.util.stream.Collectors; import static org.elasticsearch.ingest.IngestGeoIpFeatures.GET_DATABASE_CONFIGURATION_ACTION_MULTI_NODE; @@ -43,6 +51,7 @@ public class TransportGetDatabaseConfigurationAction extends TransportNodesActio List> { private final FeatureService featureService; + private final DatabaseNodeService databaseNodeService; @Inject public TransportGetDatabaseConfigurationAction( @@ -50,7 +59,8 @@ public TransportGetDatabaseConfigurationAction( ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, - FeatureService featureService + FeatureService featureService, + DatabaseNodeService databaseNodeService ) { super( GetDatabaseConfigurationAction.NAME, @@ -61,6 +71,7 @@ public TransportGetDatabaseConfigurationAction( threadPool.executor(ThreadPool.Names.MANAGEMENT) ); this.featureService = featureService; + this.databaseNodeService = databaseNodeService; } @Override @@ -74,9 +85,19 @@ protected void doExecute( * TransportGetDatabaseConfigurationAction used to be a TransportMasterNodeAction, and not all nodes in the cluster have been * updated. So we don't want to send node requests to the other nodes because they will blow up. Instead, we just return * the information that we used to return from the master node (it doesn't make any difference that this might not be the master - * node, because we're only reading the cluster state). + * node, because we're only reading the cluster state). Because older nodes only know about the Maxmind provider type, we filter + * out all others here to avoid causing problems on those nodes. */ - newResponseAsync(task, request, createActionContext(task, request), List.of(), List.of(), listener); + newResponseAsync( + task, + request, + createActionContext(task, request).stream() + .filter(database -> database.database().provider() instanceof DatabaseConfiguration.Maxmind) + .toList(), + List.of(), + List.of(), + listener + ); } else { super.doExecute(task, request, listener); } @@ -97,28 +118,79 @@ protected List createActionContext(Task task, Get ); } - final IngestGeoIpMetadata geoIpMeta = clusterService.state().metadata().custom(IngestGeoIpMetadata.TYPE, IngestGeoIpMetadata.EMPTY); List results = new ArrayList<>(); - + PersistentTasksCustomMetadata tasksMetadata = PersistentTasksCustomMetadata.getPersistentTasksCustomMetadata( + clusterService.state() + ); for (String id : ids) { - if (Regex.isSimpleMatchPattern(id)) { - for (Map.Entry entry : geoIpMeta.getDatabases().entrySet()) { - if (Regex.simpleMatch(id, entry.getKey())) { - results.add(entry.getValue()); + results.addAll(getWebDatabases(tasksMetadata, id)); + results.addAll(getMaxmindDatabases(clusterService, id)); + } + return results; + } + + /* + * This returns read-only database information about the databases managed by the standard downloader + */ + private static Collection getWebDatabases(PersistentTasksCustomMetadata tasksMetadata, String id) { + List webDatabases = new ArrayList<>(); + if (tasksMetadata != null) { + PersistentTasksCustomMetadata.PersistentTask maybeGeoIpTask = tasksMetadata.getTask("geoip-downloader"); + if (maybeGeoIpTask != null) { + GeoIpTaskState geoIpTaskState = (GeoIpTaskState) maybeGeoIpTask.getState(); + if (geoIpTaskState != null) { + Map databases = geoIpTaskState.getDatabases(); + for (String databaseFileName : databases.keySet()) { + String databaseName = getDatabaseNameForFileName(databaseFileName); + String databaseId = getDatabaseIdForFileName(DatabaseConfiguration.Web.NAME, databaseFileName); + if ((Regex.isSimpleMatchPattern(id) && Regex.simpleMatch(id, databaseId)) || id.equals(databaseId)) { + webDatabases.add( + new DatabaseConfigurationMetadata( + new DatabaseConfiguration(databaseId, databaseName, new DatabaseConfiguration.Web()), + -1, + databases.get(databaseFileName).lastUpdate() + ) + ); + } } } - } else { - DatabaseConfigurationMetadata meta = geoIpMeta.getDatabases().get(id); - if (meta == null) { - throw new ResourceNotFoundException("database configuration not found: {}", id); - } else { - results.add(meta); + } + } + return webDatabases; + } + + private static String getDatabaseIdForFileName(String providerType, String databaseFileName) { + return "_" + providerType + "_" + Base64.getEncoder().encodeToString(databaseFileName.getBytes(StandardCharsets.UTF_8)); + } + + private static String getDatabaseNameForFileName(String databaseFileName) { + return databaseFileName.endsWith(".mmdb") + ? databaseFileName.substring(0, databaseFileName.length() + 1 - ".mmmdb".length()) + : databaseFileName; + } + + /* + * This returns information about databases that are downloaded from maxmind. + */ + private static Collection getMaxmindDatabases(ClusterService clusterService, String id) { + List maxmindDatabases = new ArrayList<>(); + final IngestGeoIpMetadata geoIpMeta = clusterService.state().metadata().custom(IngestGeoIpMetadata.TYPE, IngestGeoIpMetadata.EMPTY); + if (Regex.isSimpleMatchPattern(id)) { + for (Map.Entry entry : geoIpMeta.getDatabases().entrySet()) { + if (Regex.simpleMatch(id, entry.getKey())) { + maxmindDatabases.add(entry.getValue()); } } + } else { + DatabaseConfigurationMetadata meta = geoIpMeta.getDatabases().get(id); + if (meta != null) { + maxmindDatabases.add(meta); + } } - return results; + return maxmindDatabases; } + @Override protected void newResponseAsync( Task task, GetDatabaseConfigurationAction.Request request, @@ -127,13 +199,47 @@ protected void newResponseAsync( List failures, ActionListener listener ) { - ActionListener.run( - listener, - l -> ActionListener.respondAndRelease( + ActionListener.run(listener, l -> { + List combinedResults = new ArrayList<>(results); + combinedResults.addAll( + deduplicateNodeResponses(responses, results.stream().map(result -> result.database().name()).collect(Collectors.toSet())) + ); + ActionListener.respondAndRelease( l, - new GetDatabaseConfigurationAction.Response(results, clusterService.getClusterName(), responses, failures) + new GetDatabaseConfigurationAction.Response(combinedResults, clusterService.getClusterName(), responses, failures) + ); + }); + } + + /* + * This deduplicates the nodeResponses by name, favoring the most recent. This is because each node is reporting the local databases + * that it has, and we don't want to report duplicates to the user. It also filters out any that already exist in the set of + * preExistingNames. This is because the non-local databases take precedence, so any local database with the same name as a non-local + * one will not be used. + * Non-private for unit testing + */ + static Collection deduplicateNodeResponses( + List nodeResponses, + Set preExistingNames + ) { + /* + * Each node reports the list of databases that are in its config/ingest-geoip directory. For the sake of this API we assume all + * local databases with the same name are the same database, and deduplicate by name and just return the newest. + */ + return nodeResponses.stream() + .flatMap(response -> response.getDatabases().stream()) + .collect( + Collectors.groupingBy( + database -> database.database().name(), + Collectors.maxBy(Comparator.comparing(DatabaseConfigurationMetadata::modifiedDate)) + ) ) - ); + .values() + .stream() + .filter(Optional::isPresent) + .map(Optional::get) + .filter(database -> preExistingNames.contains(database.database().name()) == false) + .toList(); } @Override @@ -157,7 +263,48 @@ protected GetDatabaseConfigurationAction.NodeResponse newNodeResponse(StreamInpu @Override protected GetDatabaseConfigurationAction.NodeResponse nodeOperation(GetDatabaseConfigurationAction.NodeRequest request, Task task) { - return new GetDatabaseConfigurationAction.NodeResponse(transportService.getLocalNode(), List.of()); + final Set ids; + if (request.getDatabaseIds().length == 0) { + // if we did not ask for a specific name, then return all databases + ids = Set.of("*"); + } else { + ids = new LinkedHashSet<>(Arrays.asList(request.getDatabaseIds())); + } + if (ids.size() > 1 && ids.stream().anyMatch(Regex::isSimpleMatchPattern)) { + throw new IllegalArgumentException( + "wildcard only supports a single value, please use comma-separated values or a single wildcard value" + ); + } + + List results = new ArrayList<>(); + for (String id : ids) { + results.addAll(getLocalDatabases(databaseNodeService, id)); + } + return new GetDatabaseConfigurationAction.NodeResponse(transportService.getLocalNode(), results); } + /* + * This returns information about the databases that users have put in the config/ingest-geoip directory on the node. + */ + private static List getLocalDatabases(DatabaseNodeService databaseNodeService, String id) { + List localDatabases = new ArrayList<>(); + Map configDatabases = databaseNodeService.getConfigDatabasesDetail(); + for (DatabaseNodeService.ConfigDatabaseDetail configDatabase : configDatabases.values()) { + String databaseId = getDatabaseIdForFileName(DatabaseConfiguration.Local.NAME, configDatabase.name()); + if ((Regex.isSimpleMatchPattern(id) && Regex.simpleMatch(id, databaseId)) || id.equals(databaseId)) { + localDatabases.add( + new DatabaseConfigurationMetadata( + new DatabaseConfiguration( + databaseId, + getDatabaseNameForFileName(configDatabase.name()), + new DatabaseConfiguration.Local(configDatabase.type()) + ), + -1, + configDatabase.buildDateInMillis() == null ? -1 : configDatabase.buildDateInMillis() + ) + ); + } + } + return localDatabases; + } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/ConfigDatabasesTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/ConfigDatabasesTests.java index 83b3d2cfbbc27..7f38a37b43edf 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/ConfigDatabasesTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/ConfigDatabasesTests.java @@ -126,7 +126,7 @@ public void testDatabasesUpdateExistingConfDatabase() throws Exception { DatabaseReaderLazyLoader loader = configDatabases.getDatabase("GeoLite2-City.mmdb"); assertThat(loader.getDatabaseType(), equalTo("GeoLite2-City")); - CityResponse cityResponse = loader.getCity("89.160.20.128"); + CityResponse cityResponse = loader.getResponse("89.160.20.128", GeoIpTestUtils::getCity); assertThat(cityResponse.getCity().getName(), equalTo("Tumba")); assertThat(cache.count(), equalTo(1)); } @@ -138,7 +138,7 @@ public void testDatabasesUpdateExistingConfDatabase() throws Exception { DatabaseReaderLazyLoader loader = configDatabases.getDatabase("GeoLite2-City.mmdb"); assertThat(loader.getDatabaseType(), equalTo("GeoLite2-City")); - CityResponse cityResponse = loader.getCity("89.160.20.128"); + CityResponse cityResponse = loader.getResponse("89.160.20.128", GeoIpTestUtils::getCity); assertThat(cityResponse.getCity().getName(), equalTo("Linköping")); assertThat(cache.count(), equalTo(1)); }); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index f5c3c08579855..793754ec316b2 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.geoip.Database.Property; import org.elasticsearch.test.ESTestCase; import org.junit.After; import org.junit.Before; @@ -25,7 +24,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; @@ -40,7 +38,9 @@ public class GeoIpProcessorTests extends ESTestCase { - private static final Set ALL_PROPERTIES = Set.of(Property.values()); + private static IpDataLookup ipDataLookupAll(final Database database) { + return IpDataLookupFactories.getMaxmindLookup(database).apply(database.properties()); + } // a temporary directory that mmdb files can be copied to and read from private Path tmpDir; @@ -82,7 +82,7 @@ public void testCity() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.City), false, false, "filename" @@ -115,7 +115,7 @@ public void testNullValueWithIgnoreMissing() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.City), true, false, "filename" @@ -137,7 +137,7 @@ public void testNonExistentWithIgnoreMissing() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.City), true, false, "filename" @@ -156,7 +156,7 @@ public void testNullWithoutIgnoreMissing() { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.City), false, false, "filename" @@ -178,7 +178,7 @@ public void testNonExistentWithoutIgnoreMissing() { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.City), false, false, "filename" @@ -198,7 +198,7 @@ public void testCity_withIpV6() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.City), false, false, "filename" @@ -235,7 +235,7 @@ public void testCityWithMissingLocation() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.City), false, false, "filename" @@ -263,7 +263,7 @@ public void testCountry() throws Exception { loader("GeoLite2-Country.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.Country), false, false, "filename" @@ -295,7 +295,7 @@ public void testCountryWithMissingLocation() throws Exception { loader("GeoLite2-Country.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.Country), false, false, "filename" @@ -323,7 +323,7 @@ public void testAsn() throws Exception { loader("GeoLite2-ASN.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.Asn), false, false, "filename" @@ -354,7 +354,7 @@ public void testAnonymmousIp() throws Exception { loader("GeoIP2-Anonymous-IP-Test.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.AnonymousIp), false, false, "filename" @@ -388,7 +388,7 @@ public void testConnectionType() throws Exception { loader("GeoIP2-Connection-Type-Test.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.ConnectionType), false, false, "filename" @@ -417,7 +417,7 @@ public void testDomain() throws Exception { loader("GeoIP2-Domain-Test.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.Domain), false, false, "filename" @@ -446,7 +446,7 @@ public void testEnterprise() throws Exception { loader("GeoIP2-Enterprise-Test.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.Enterprise), false, false, "filename" @@ -497,7 +497,7 @@ public void testIsp() throws Exception { loader("GeoIP2-ISP-Test.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.Isp), false, false, "filename" @@ -531,7 +531,7 @@ public void testAddressIsNotInTheDatabase() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.City), false, false, "filename" @@ -555,7 +555,7 @@ public void testInvalid() { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.City), false, false, "filename" @@ -576,7 +576,7 @@ public void testListAllValid() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.City), false, false, "filename" @@ -603,7 +603,7 @@ public void testListPartiallyValid() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.City), false, false, "filename" @@ -630,7 +630,7 @@ public void testListNoMatches() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.City), false, false, "filename" @@ -650,7 +650,7 @@ public void testListDatabaseReferenceCounting() throws Exception { GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", () -> { loader.preLookup(); return loader; - }, () -> true, "target_field", ALL_PROPERTIES, false, false, "filename"); + }, () -> true, "target_field", ipDataLookupAll(Database.City), false, false, "filename"); Map document = new HashMap<>(); document.put("source_field", List.of("8.8.8.8", "82.171.64.0")); @@ -678,7 +678,7 @@ public void testListFirstOnly() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.City), false, true, "filename" @@ -703,7 +703,7 @@ public void testListFirstOnlyNoMatches() throws Exception { loader("GeoLite2-City.mmdb"), () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.City), false, true, "filename" @@ -725,7 +725,7 @@ public void testInvalidDatabase() throws Exception { loader("GeoLite2-City.mmdb"), () -> false, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.City), false, true, "filename" @@ -748,7 +748,7 @@ public void testNoDatabase() throws Exception { () -> null, () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.City), false, false, "GeoLite2-City" @@ -771,7 +771,7 @@ public void testNoDatabase_ignoreMissing() throws Exception { () -> null, () -> true, "target_field", - ALL_PROPERTIES, + ipDataLookupAll(Database.City), true, false, "GeoLite2-City" diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpTestUtils.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpTestUtils.java index 461983bb24488..160671fd39001 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpTestUtils.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpTestUtils.java @@ -9,6 +9,13 @@ package org.elasticsearch.ingest.geoip; +import com.maxmind.db.DatabaseRecord; +import com.maxmind.db.Reader; +import com.maxmind.geoip2.model.CityResponse; +import com.maxmind.geoip2.model.CountryResponse; + +import org.elasticsearch.common.CheckedBiFunction; +import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.core.SuppressForbidden; import java.io.FileNotFoundException; @@ -17,6 +24,7 @@ import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.List; import java.util.Set; import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; @@ -58,4 +66,28 @@ public static void copyDefaultDatabases(final Path directory, ConfigDatabases co configDatabases.updateDatabase(directory.resolve(database), true); } } + + /** + * A static city-specific responseProvider for use with {@link IpDatabase#getResponse(String, CheckedBiFunction)} in + * tests. + *

+ * Like this: {@code CityResponse city = loader.getResponse("some.ip.address", GeoIpTestUtils::getCity);} + */ + public static CityResponse getCity(Reader reader, String ip) throws IOException { + DatabaseRecord record = reader.getRecord(InetAddresses.forString(ip), CityResponse.class); + CityResponse data = record.getData(); + return data == null ? null : new CityResponse(data, ip, record.getNetwork(), List.of("en")); + } + + /** + * A static country-specific responseProvider for use with {@link IpDatabase#getResponse(String, CheckedBiFunction)} in + * tests. + *

+ * Like this: {@code CountryResponse country = loader.getResponse("some.ip.address", GeoIpTestUtils::getCountry);} + */ + public static CountryResponse getCountry(Reader reader, String ip) throws IOException { + DatabaseRecord record = reader.getRecord(InetAddresses.forString(ip), CountryResponse.class); + CountryResponse data = record.getData(); + return data == null ? null : new CountryResponse(data, ip, record.getNetwork(), List.of("en")); + } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadataTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadataTests.java index 231a2a856815c..6a98cd532604b 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadataTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadataTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.ingest.geoip; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.ingest.geoip.direct.DatabaseConfiguration; import org.elasticsearch.ingest.geoip.direct.DatabaseConfigurationMetadata; @@ -21,6 +22,12 @@ import java.util.Map; public class IngestGeoIpMetadataTests extends AbstractChunkedSerializingTestCase { + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(new IngestGeoIpPlugin().getNamedWriteables()); + } + @Override protected IngestGeoIpMetadata doParseInstance(XContentParser parser) throws IOException { return IngestGeoIpMetadata.fromXContent(parser); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MMDBUtilTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MMDBUtilTests.java index f1c7d809b98fe..46a34c2cdad56 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MMDBUtilTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MMDBUtilTests.java @@ -116,6 +116,6 @@ public void testDatabaseTypeParsing() throws IOException { } private Database parseDatabaseFromType(String databaseFile) throws IOException { - return Database.getDatabase(MMDBUtil.getDatabaseType(tmpDir.resolve(databaseFile)), null); + return IpDataLookupFactories.getDatabase(MMDBUtil.getDatabaseType(tmpDir.resolve(databaseFile))); } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java index ec05054615bd8..84ea5fd584352 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java @@ -468,36 +468,6 @@ public void testUnknownMaxMindResponseClassess() { ); } - /* - * This tests that this test has a mapping in TYPE_TO_MAX_MIND_CLASS for all MaxMind classes exposed through IpDatabase. - */ - public void testUsedMaxMindResponseClassesAreAccountedFor() { - Set> usedMaxMindResponseClasses = getUsedMaxMindResponseClasses(); - Set> supportedMaxMindClasses = new HashSet<>(TYPE_TO_MAX_MIND_CLASS.values()); - Set> usedButNotSupportedMaxMindResponseClasses = Sets.difference( - usedMaxMindResponseClasses, - supportedMaxMindClasses - ); - assertThat( - "IpDatabase exposes MaxMind response classes that this test does not know what to do with. Add mappings to " - + "TYPE_TO_MAX_MIND_CLASS for the following: " - + usedButNotSupportedMaxMindResponseClasses, - usedButNotSupportedMaxMindResponseClasses, - empty() - ); - Set> supportedButNotUsedMaxMindClasses = Sets.difference( - supportedMaxMindClasses, - usedMaxMindResponseClasses - ); - assertThat( - "This test claims to support MaxMind response classes that are not exposed in IpDatabase. Remove the following from " - + "TYPE_TO_MAX_MIND_CLASS: " - + supportedButNotUsedMaxMindClasses, - supportedButNotUsedMaxMindClasses, - empty() - ); - } - /* * This is the list of field types that causes us to stop recursing. That is, fields of these types are the lowest-level fields that * we care about. @@ -616,23 +586,4 @@ private static String getFormattedList(Set fields) { } return result.toString(); } - - /* - * This returns all AbstractResponse classes that are returned from getter methods on IpDatabase. - */ - private static Set> getUsedMaxMindResponseClasses() { - Set> result = new HashSet<>(); - Method[] methods = IpDatabase.class.getMethods(); - for (Method method : methods) { - if (method.getName().startsWith("get")) { - Class returnType = method.getReturnType(); - try { - result.add(returnType.asSubclass(AbstractResponse.class)); - } catch (ClassCastException ignore) { - // This is not what we were looking for, move on - } - } - } - return result; - } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationMetadataTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationMetadataTests.java index 847f9c5bf7d4a..476a30d86ee05 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationMetadataTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationMetadataTests.java @@ -9,7 +9,9 @@ package org.elasticsearch.ingest.geoip.direct; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.ingest.geoip.IngestGeoIpPlugin; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; @@ -21,6 +23,11 @@ public class DatabaseConfigurationMetadataTests extends AbstractXContentSerializingTestCase { + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(new IngestGeoIpPlugin().getNamedWriteables()); + } + private String id; @Override diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationTests.java index bb11f71b26d03..33356ad4235dc 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationTests.java @@ -9,8 +9,12 @@ package org.elasticsearch.ingest.geoip.direct; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.ingest.geoip.IngestGeoIpPlugin; +import org.elasticsearch.ingest.geoip.direct.DatabaseConfiguration.Local; import org.elasticsearch.ingest.geoip.direct.DatabaseConfiguration.Maxmind; +import org.elasticsearch.ingest.geoip.direct.DatabaseConfiguration.Web; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; @@ -21,6 +25,11 @@ public class DatabaseConfigurationTests extends AbstractXContentSerializingTestCase { + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(new IngestGeoIpPlugin().getNamedWriteables()); + } + private String id; @Override @@ -35,26 +44,39 @@ protected DatabaseConfiguration createTestInstance() { } public static DatabaseConfiguration randomDatabaseConfiguration(String id) { - return new DatabaseConfiguration(id, randomFrom(MAXMIND_NAMES), new Maxmind(randomAlphaOfLength(5))); + DatabaseConfiguration.Provider provider = switch (between(0, 2)) { + case 0 -> new Maxmind(randomAlphaOfLength(5)); + case 1 -> new Web(); + case 2 -> new Local(randomAlphaOfLength(10)); + default -> throw new AssertionError("failure, got illegal switch case"); + }; + return new DatabaseConfiguration(id, randomFrom(MAXMIND_NAMES), provider); } @Override protected DatabaseConfiguration mutateInstance(DatabaseConfiguration instance) { switch (between(0, 2)) { case 0: - return new DatabaseConfiguration(instance.id() + randomAlphaOfLength(2), instance.name(), instance.maxmind()); + return new DatabaseConfiguration(instance.id() + randomAlphaOfLength(2), instance.name(), instance.provider()); case 1: return new DatabaseConfiguration( instance.id(), randomValueOtherThan(instance.name(), () -> randomFrom(MAXMIND_NAMES)), - instance.maxmind() + instance.provider() ); case 2: - return new DatabaseConfiguration( - instance.id(), - instance.name(), - new Maxmind(instance.maxmind().accountId() + randomAlphaOfLength(2)) - ); + DatabaseConfiguration.Provider provider = instance.provider(); + DatabaseConfiguration.Provider modifiedProvider; + if (provider instanceof Maxmind maxmind) { + modifiedProvider = new Maxmind(((Maxmind) instance.provider()).accountId() + randomAlphaOfLength(2)); + } else if (provider instanceof Web) { + modifiedProvider = new Maxmind(randomAlphaOfLength(20)); // can't modify a Web + } else if (provider instanceof Local local) { + modifiedProvider = new Local(local.type() + randomAlphaOfLength(2)); + } else { + throw new AssertionError("Unexpected provider type: " + provider.getClass()); + } + return new DatabaseConfiguration(instance.id(), instance.name(), modifiedProvider); default: throw new AssertionError("failure, got illegal switch case"); } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationActionTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationActionTests.java new file mode 100644 index 0000000000000..988b50311186d --- /dev/null +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationActionTests.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.ingest.geoip.direct; + +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.elasticsearch.ingest.geoip.direct.GetDatabaseConfigurationAction.NodeResponse; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; + +public class TransportGetDatabaseConfigurationActionTests extends ESTestCase { + public void testDeduplicateNodeResponses() { + { + List nodeResponses = new ArrayList<>(); + Set preExistingNames = Set.of(); + Collection deduplicated = TransportGetDatabaseConfigurationAction.deduplicateNodeResponses( + nodeResponses, + preExistingNames + ); + assertTrue(deduplicated.isEmpty()); + } + { + List nodeResponses = List.of( + generateTestNodeResponse(List.of()), + generateTestNodeResponse(List.of()), + generateTestNodeResponse(List.of()) + ); + Set preExistingNames = Set.of(); + Collection deduplicated = TransportGetDatabaseConfigurationAction.deduplicateNodeResponses( + nodeResponses, + preExistingNames + ); + assertTrue(deduplicated.isEmpty()); + } + { + // 3 nodes with 3 overlapping responses. We expect the deduplicated collection to include 1, 2, 3, and 4. + List nodeResponses = List.of( + generateTestNodeResponse(List.of("1", "2", "3")), + generateTestNodeResponse(List.of("1", "2", "3")), + generateTestNodeResponse(List.of("1", "4")) + ); + Set preExistingNames = Set.of(); + Collection deduplicated = TransportGetDatabaseConfigurationAction.deduplicateNodeResponses( + nodeResponses, + preExistingNames + ); + assertThat(deduplicated.size(), equalTo(4)); + assertThat( + deduplicated.stream().map(database -> database.database().name()).collect(Collectors.toSet()), + equalTo(Set.of("1", "2", "3", "4")) + ); + } + { + /* + * 3 nodes with 3 overlapping responses, but this time we're also passing in a set of pre-existing names that overlap with + * two of them. So we expect the deduplicated collection to include 1 and 4. + */ + List nodeResponses = List.of( + generateTestNodeResponse(List.of("1", "2", "3")), + generateTestNodeResponse(List.of("1", "2", "3")), + generateTestNodeResponse(List.of("1", "4")) + ); + Set preExistingNames = Set.of("2", "3", "5"); + Collection deduplicated = TransportGetDatabaseConfigurationAction.deduplicateNodeResponses( + nodeResponses, + preExistingNames + ); + assertThat(deduplicated.size(), equalTo(2)); + assertThat( + deduplicated.stream().map(database -> database.database().name()).collect(Collectors.toSet()), + equalTo(Set.of("1", "4")) + ); + } + { + /* + * Here 3 nodes report the same database, but with different modified dates and versions. We expect the one with the highest + * modified date to win out. + */ + List nodeResponses = List.of( + generateTestNodeResponseFromDatabases(List.of(generateTestDatabase("1", 1))), + generateTestNodeResponseFromDatabases(List.of(generateTestDatabase("1", 1000))), + generateTestNodeResponseFromDatabases(List.of(generateTestDatabase("1", 3))) + ); + Set preExistingNames = Set.of("2", "3", "5"); + Collection deduplicated = TransportGetDatabaseConfigurationAction.deduplicateNodeResponses( + nodeResponses, + preExistingNames + ); + assertThat(deduplicated.size(), equalTo(1)); + DatabaseConfigurationMetadata result = deduplicated.iterator().next(); + assertThat(result, equalTo(nodeResponses.get(1).getDatabases().get(0))); + } + } + + private NodeResponse generateTestNodeResponse(List databaseNames) { + List databases = databaseNames.stream().map(this::generateTestDatabase).toList(); + return generateTestNodeResponseFromDatabases(databases); + } + + private NodeResponse generateTestNodeResponseFromDatabases(List databases) { + DiscoveryNode discoveryNode = mock(DiscoveryNode.class); + return new NodeResponse(discoveryNode, databases); + } + + private DatabaseConfigurationMetadata generateTestDatabase(String databaseName) { + return generateTestDatabase(databaseName, randomLongBetween(0, Long.MAX_VALUE)); + } + + private DatabaseConfigurationMetadata generateTestDatabase(String databaseName, long modifiedDate) { + DatabaseConfiguration databaseConfiguration = new DatabaseConfiguration( + randomAlphaOfLength(50), + databaseName, + new DatabaseConfiguration.Local(randomAlphaOfLength(20)) + ); + return new DatabaseConfigurationMetadata(databaseConfiguration, randomLongBetween(0, Long.MAX_VALUE), modifiedDate); + } +} diff --git a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/40_geoip_databases.yml b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/40_geoip_databases.yml index 6809443fdfbc3..04fd2ac6a8189 100644 --- a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/40_geoip_databases.yml +++ b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/40_geoip_databases.yml @@ -1,7 +1,7 @@ setup: - requires: - cluster_features: ["geoip.downloader.database.configuration"] - reason: "geoip downloader database configuration APIs added in 8.15" + cluster_features: ["geoip.downloader.database.configuration", "get_database_configuration_action.multi_node"] + reason: "geoip downloader database configuration APIs added in 8.15, and updated in 8.16 to return more results" --- "Test adding, getting, and removing geoip databases": @@ -41,6 +41,17 @@ setup: } - match: { acknowledged: true } + - do: + catch: /illegal_argument_exception/ + ingest.put_geoip_database: + id: "_web_TXlDdXN0b21HZW9MaXRlMi1DaXR5Lm1tZGI=" + body: > + { + "name": "GeoIP2-City", + "web": { + } + } + - do: ingest.get_geoip_database: id: "my_database_1" @@ -52,19 +63,37 @@ setup: - do: ingest.get_geoip_database: {} - - length: { databases: 2 } + - length: { databases: 6 } - do: ingest.get_geoip_database: id: "my_database_1,my_database_2" - length: { databases: 2 } + - do: + ingest.get_geoip_database: + id: "_web_TXlDdXN0b21HZW9MaXRlMi1DaXR5Lm1tZGI=" + - length: { databases: 1 } + - match: { databases.0.id: "_web_TXlDdXN0b21HZW9MaXRlMi1DaXR5Lm1tZGI=" } + - gte: { databases.0.modified_date_millis: -1 } + - match: { databases.0.database.name: "MyCustomGeoLite2-City" } + - do: ingest.delete_geoip_database: id: "my_database_1" + - do: + catch: /resource_not_found_exception/ + ingest.delete_geoip_database: + id: "_web_TXlDdXN0b21HZW9MaXRlMi1DaXR5Lm1tZGI=" + - do: ingest.get_geoip_database: {} + - length: { databases: 5 } + + - do: + ingest.get_geoip_database: + id: "my_database_2" - length: { databases: 1 } - match: { databases.0.id: "my_database_2" } - gte: { databases.0.modified_date_millis: 0 } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java index ecc84ddca2e13..9bb80d5688b5f 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java @@ -107,7 +107,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws void innerToXContent(XContentBuilder builder, Params params) throws IOException { if (hasResponse()) { - ChunkedToXContent.wrapAsToXContent(p -> response.innerToXContentChunked(p)).toXContent(builder, params); + ChunkedToXContent.wrapAsToXContent(response::innerToXContentChunked).toXContent(builder, params); } else { // we can assume the template is always json as we convert it before compiling it try (InputStream stream = source.streamInput()) { diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index 429a81b02bd5e..6b4dd5ed86e2d 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -235,7 +235,6 @@ public void testAbortRequestStats() throws Exception { } @TestIssueLogging(issueUrl = "https://github.com/elastic/elasticsearch/issues/101608", value = "com.amazonaws.request:DEBUG") - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101608") public void testMetrics() throws Exception { // Create the repository and perform some activities final String repository = createRepository(randomRepositoryName(), false); diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index f919284d8e897..af385eeac6a5b 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -140,6 +140,11 @@ class S3Repository extends MeteredBlobStoreRepository { MAX_FILE_SIZE_USING_MULTIPART ); + /** + * Maximum parts number for multipart upload. (see https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html) + */ + static final Setting MAX_MULTIPART_PARTS = Setting.intSetting("max_multipart_parts", 10_000, 1, 10_000); + /** * Sets the S3 storage class type for the backup files. Values may be standard, reduced_redundancy, * standard_ia, onezone_ia and intelligent_tiering. Defaults to standard. @@ -253,7 +258,9 @@ class S3Repository extends MeteredBlobStoreRepository { } this.bufferSize = BUFFER_SIZE_SETTING.get(metadata.settings()); - this.chunkSize = CHUNK_SIZE_SETTING.get(metadata.settings()); + var maxChunkSize = CHUNK_SIZE_SETTING.get(metadata.settings()); + var maxPartsNum = MAX_MULTIPART_PARTS.get(metadata.settings()); + this.chunkSize = objectSizeLimit(maxChunkSize, bufferSize, maxPartsNum); // We make sure that chunkSize is bigger or equal than/to bufferSize if (this.chunkSize.getBytes() < bufferSize.getBytes()) { @@ -302,6 +309,20 @@ private static Map buildLocation(RepositoryMetadata metadata) { return Map.of("base_path", BASE_PATH_SETTING.get(metadata.settings()), "bucket", BUCKET_SETTING.get(metadata.settings())); } + /** + * Calculates S3 object size limit based on 2 constraints: maximum object(chunk) size + * and maximum number of parts for multipart upload. + * https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html + * + * @param chunkSize s3 object size + * @param bufferSize s3 multipart upload part size + * @param maxPartsNum s3 multipart upload max parts number + */ + private static ByteSizeValue objectSizeLimit(ByteSizeValue chunkSize, ByteSizeValue bufferSize, int maxPartsNum) { + var bytes = Math.min(chunkSize.getBytes(), bufferSize.getBytes() * maxPartsNum); + return ByteSizeValue.ofBytes(bytes); + } + /** * Holds a reference to delayed repository operation {@link Scheduler.Cancellable} so it can be cancelled should the repository be * closed concurrently. diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java index 1eab59ebb0eb7..3817af4def888 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -175,4 +175,37 @@ public void testAnalysisFailureDetail() { } } + // ensures that chunkSize is limited to chunk_size setting, when buffer_size * parts_num is bigger + public void testChunkSizeLimit() { + var meta = new RepositoryMetadata( + "dummy-repo", + "mock", + Settings.builder() + .put(S3Repository.BUCKET_SETTING.getKey(), "bucket") + .put(S3Repository.CHUNK_SIZE_SETTING.getKey(), "1GB") + .put(S3Repository.BUFFER_SIZE_SETTING.getKey(), "100MB") + .put(S3Repository.MAX_MULTIPART_PARTS.getKey(), 10_000) // ~1TB + .build() + ); + try (var repo = createS3Repo(meta)) { + assertEquals(ByteSizeValue.ofGb(1), repo.chunkSize()); + } + } + + // ensures that chunkSize is limited to buffer_size * parts_num, when chunk_size setting is bigger + public void testPartsNumLimit() { + var meta = new RepositoryMetadata( + "dummy-repo", + "mock", + Settings.builder() + .put(S3Repository.BUCKET_SETTING.getKey(), "bucket") + .put(S3Repository.CHUNK_SIZE_SETTING.getKey(), "5TB") + .put(S3Repository.BUFFER_SIZE_SETTING.getKey(), "100MB") + .put(S3Repository.MAX_MULTIPART_PARTS.getKey(), 10_000) + .build() + ); + try (var repo = createS3Repo(meta)) { + assertEquals(ByteSizeValue.ofMb(1_000_000), repo.chunkSize()); + } + } } diff --git a/muted-tests.yml b/muted-tests.yml index 4305ebe3d2e02..8b756adce5457 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -26,9 +26,6 @@ tests: - class: org.elasticsearch.index.store.FsDirectoryFactoryTests method: testPreload issue: https://github.com/elastic/elasticsearch/issues/110211 -- class: org.elasticsearch.upgrades.SecurityIndexRolesMetadataMigrationIT - method: testMetadataMigratedAfterUpgrade - issue: https://github.com/elastic/elasticsearch/issues/110232 - class: org.elasticsearch.backwards.SearchWithMinCompatibleSearchNodeIT method: testMinVersionAsNewVersion issue: https://github.com/elastic/elasticsearch/issues/95384 @@ -121,9 +118,6 @@ tests: - class: org.elasticsearch.xpack.ml.integration.MlJobIT method: testMultiIndexDelete issue: https://github.com/elastic/elasticsearch/issues/112381 -- class: org.elasticsearch.search.retriever.RankDocRetrieverBuilderIT - method: testRankDocsRetrieverWithNestedQuery - issue: https://github.com/elastic/elasticsearch/issues/112421 - class: org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroidTests method: "testAggregateIntermediate {TestCase= #2}" issue: https://github.com/elastic/elasticsearch/issues/112461 @@ -278,9 +272,6 @@ tests: - class: org.elasticsearch.xpack.ml.integration.MlJobIT method: testCreateJobsWithIndexNameOption issue: https://github.com/elastic/elasticsearch/issues/113528 -- class: org.elasticsearch.validation.DotPrefixClientYamlTestSuiteIT - method: test {p0=dot_prefix/10_basic/Deprecated index template with a dot prefix index pattern} - issue: https://github.com/elastic/elasticsearch/issues/113529 - class: org.elasticsearch.xpack.ml.integration.MlJobIT method: testCantCreateJobWithSameID issue: https://github.com/elastic/elasticsearch/issues/113581 @@ -351,6 +342,41 @@ tests: - class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT method: testPutE5WithTrainedModelAndInference issue: https://github.com/elastic/elasticsearch/issues/114023 +- class: org.elasticsearch.xpack.rank.rrf.RRFRetrieverBuilderIT + method: testRRFWithCollapse + issue: https://github.com/elastic/elasticsearch/issues/114074 +- class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT + method: testPutE5Small_withPlatformSpecificVariant + issue: https://github.com/elastic/elasticsearch/issues/113950 +- class: org.elasticsearch.xpack.inference.services.openai.OpenAiServiceTests + method: testInfer_StreamRequest_ErrorResponse + issue: https://github.com/elastic/elasticsearch/issues/114105 +- class: org.elasticsearch.xpack.inference.InferenceCrudIT + method: testGet + issue: https://github.com/elastic/elasticsearch/issues/114135 +- class: org.elasticsearch.action.bulk.IncrementalBulkIT + method: testIncrementalBulkHighWatermarkBackOff + issue: https://github.com/elastic/elasticsearch/issues/114073 +- class: org.elasticsearch.xpack.esql.expression.function.aggregate.AvgTests + method: "testFold {TestCase= #7}" + issue: https://github.com/elastic/elasticsearch/issues/114175 +- class: org.elasticsearch.action.bulk.IncrementalBulkIT + method: testMultipleBulkPartsWithBackoff + issue: https://github.com/elastic/elasticsearch/issues/114181 +- class: org.elasticsearch.action.bulk.IncrementalBulkIT + method: testIncrementalBulkLowWatermarkBackOff + issue: https://github.com/elastic/elasticsearch/issues/114182 +- class: org.elasticsearch.aggregations.AggregationsClientYamlTestSuiteIT + method: test {yaml=aggregations/stats_metric_fail_formatting/fail formatting} + issue: https://github.com/elastic/elasticsearch/issues/114187 +- class: org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT + issue: https://github.com/elastic/elasticsearch/issues/114194 +- class: org.elasticsearch.xpack.ilm.ExplainLifecycleIT + method: testStepInfoPreservedOnAutoRetry + issue: https://github.com/elastic/elasticsearch/issues/114220 +- class: org.elasticsearch.xpack.inference.services.openai.OpenAiServiceTests + method: testInfer_StreamRequest + issue: https://github.com/elastic/elasticsearch/issues/114232 # Examples: # diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/EC2RetriesTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/EC2RetriesTests.java index aa669a45bc0c7..78ea619d81f84 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/EC2RetriesTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/EC2RetriesTests.java @@ -101,6 +101,7 @@ public void testEC2DiscoveryRetriesOnRateLimiting() throws IOException { exchange.getResponseHeaders().set("Content-Type", "text/xml; charset=UTF-8"); exchange.sendResponseHeaders(HttpStatus.SC_OK, responseBody.length); exchange.getResponseBody().write(responseBody); + exchange.getResponseBody().flush(); return; } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index 8a6f6b84fec0d..135ddcee8da44 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -59,7 +59,7 @@ public class Ec2DiscoveryTests extends AbstractEC2MockAPITestCase { private static final String PREFIX_PUBLIC_IP = "8.8.8."; private static final String PREFIX_PRIVATE_IP = "10.0.0."; - private Map poorMansDNS = new ConcurrentHashMap<>(); + private final Map poorMansDNS = new ConcurrentHashMap<>(); protected MockTransportService createTransportService() { final Transport transport = new Netty4Transport( @@ -133,7 +133,7 @@ protected List buildDynamicHosts(Settings nodeSettings, int no .stream() .filter(t -> t.getKey().equals(entry.getKey())) .map(Tag::getValue) - .collect(Collectors.toList()) + .toList() .containsAll(entry.getValue()) ) ) @@ -144,6 +144,7 @@ protected List buildDynamicHosts(Settings nodeSettings, int no exchange.getResponseHeaders().set("Content-Type", "text/xml; charset=UTF-8"); exchange.sendResponseHeaders(HttpStatus.SC_OK, responseBody.length); exchange.getResponseBody().write(responseBody); + exchange.getResponseBody().flush(); return; } } @@ -160,14 +161,14 @@ protected List buildDynamicHosts(Settings nodeSettings, int no } } - public void testDefaultSettings() throws InterruptedException { + public void testDefaultSettings() { int nodes = randomInt(10); Settings nodeSettings = Settings.builder().build(); List discoveryNodes = buildDynamicHosts(nodeSettings, nodes); assertThat(discoveryNodes, hasSize(nodes)); } - public void testPrivateIp() throws InterruptedException { + public void testPrivateIp() { int nodes = randomInt(10); for (int i = 0; i < nodes; i++) { poorMansDNS.put(PREFIX_PRIVATE_IP + (i + 1), buildNewFakeTransportAddress()); @@ -183,7 +184,7 @@ public void testPrivateIp() throws InterruptedException { } } - public void testPublicIp() throws InterruptedException { + public void testPublicIp() { int nodes = randomInt(10); for (int i = 0; i < nodes; i++) { poorMansDNS.put(PREFIX_PUBLIC_IP + (i + 1), buildNewFakeTransportAddress()); @@ -199,7 +200,7 @@ public void testPublicIp() throws InterruptedException { } } - public void testPrivateDns() throws InterruptedException { + public void testPrivateDns() { int nodes = randomInt(10); for (int i = 0; i < nodes; i++) { String instanceId = "node" + (i + 1); @@ -217,7 +218,7 @@ public void testPrivateDns() throws InterruptedException { } } - public void testPublicDns() throws InterruptedException { + public void testPublicDns() { int nodes = randomInt(10); for (int i = 0; i < nodes; i++) { String instanceId = "node" + (i + 1); @@ -235,14 +236,14 @@ public void testPublicDns() throws InterruptedException { } } - public void testInvalidHostType() throws InterruptedException { + public void testInvalidHostType() { Settings nodeSettings = Settings.builder().put(AwsEc2Service.HOST_TYPE_SETTING.getKey(), "does_not_exist").build(); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { buildDynamicHosts(nodeSettings, 1); }); assertThat(exception.getMessage(), containsString("does_not_exist is unknown for discovery.ec2.host_type")); } - public void testFilterByTags() throws InterruptedException { + public void testFilterByTags() { int nodes = randomIntBetween(5, 10); Settings nodeSettings = Settings.builder().put(AwsEc2Service.TAG_SETTING.getKey() + "stage", "prod").build(); @@ -265,7 +266,7 @@ public void testFilterByTags() throws InterruptedException { assertThat(dynamicHosts, hasSize(prodInstances)); } - public void testFilterByMultipleTags() throws InterruptedException { + public void testFilterByMultipleTags() { int nodes = randomIntBetween(5, 10); Settings nodeSettings = Settings.builder().putList(AwsEc2Service.TAG_SETTING.getKey() + "stage", "prod", "preprod").build(); diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 9255281e7e5da..8570662f7b523 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -1602,10 +1602,6 @@ public void testResize() throws Exception { @SuppressWarnings("unchecked") public void testSystemIndexMetadataIsUpgraded() throws Exception { - - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // assumeTrue can be removed (condition always true) - var originalClusterTaskIndexIsSystemIndex = oldClusterHasFeature(RestTestLegacyFeatures.TASK_INDEX_SYSTEM_INDEX); - assumeTrue(".tasks became a system index in 7.10.0", originalClusterTaskIndexIsSystemIndex); final String systemIndexWarning = "this request accesses system indices: [.tasks], but in a future major version, direct " + "access to system indices will be prevented by default"; if (isRunningAgainstOldCluster()) { @@ -1665,29 +1661,6 @@ public void testSystemIndexMetadataIsUpgraded() throws Exception { throw new AssertionError(".tasks index does not exist yet"); } }); - - // If we are on 7.x create an alias that includes both a system index and a non-system index so we can be sure it gets - // upgraded properly. If we're already on 8.x, skip this part of the test. - if (clusterHasFeature(RestTestLegacyFeatures.SYSTEM_INDICES_REST_ACCESS_ENFORCED) == false) { - // Create an alias to make sure it gets upgraded properly - Request putAliasRequest = newXContentRequest(HttpMethod.POST, "/_aliases", (builder, params) -> { - builder.startArray("actions"); - for (var index : List.of(".tasks", "test_index_reindex")) { - builder.startObject() - .startObject("add") - .field("index", index) - .field("alias", "test-system-alias") - .endObject() - .endObject(); - } - return builder.endArray(); - }); - putAliasRequest.setOptions(expectVersionSpecificWarnings(v -> { - v.current(systemIndexWarning); - v.compatible(systemIndexWarning); - })); - assertThat(client().performRequest(putAliasRequest).getStatusLine().getStatusCode(), is(200)); - } } else { assertBusy(() -> { Request clusterStateRequest = new Request("GET", "/_cluster/state/metadata"); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index f588b78c78cc8..2a3e0c16fdc2f 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -170,7 +170,9 @@ public void test012SecurityCanBeDisabled() throws Exception { public void test020PluginsListWithNoPlugins() { assumeTrue( "Only applies to non-Cloud images", - distribution.packaging != Packaging.DOCKER_CLOUD && distribution().packaging != Packaging.DOCKER_CLOUD_ESS + distribution.packaging != Packaging.DOCKER_CLOUD + && distribution().packaging != Packaging.DOCKER_CLOUD_ESS + && distribution().packaging != Packaging.DOCKER_WOLFI_ESS ); final Installation.Executables bin = installation.executables(); @@ -201,7 +203,10 @@ public void test021InstallPlugin() { * Checks that ESS images can install plugins from the local archive. */ public void test022InstallPluginsFromLocalArchive() { - assumeTrue("Only ESS images have a local archive", distribution().packaging == Packaging.DOCKER_CLOUD_ESS); + assumeTrue( + "Only ESS images have a local archive", + distribution().packaging == Packaging.DOCKER_CLOUD_ESS || distribution().packaging == Packaging.DOCKER_WOLFI_ESS + ); final String plugin = "analysis-icu"; final Installation.Executables bin = installation.executables(); @@ -254,7 +259,10 @@ public void test023InstallPluginUsingConfigFile() { * Checks that ESS images can manage plugins from the local archive by deploying a plugins config file. */ public void test024InstallPluginFromArchiveUsingConfigFile() { - assumeTrue("Only ESS image has a plugin archive", distribution().packaging == Packaging.DOCKER_CLOUD_ESS); + assumeTrue( + "Only ESS image has a plugin archive", + distribution().packaging == Packaging.DOCKER_CLOUD_ESS || distribution().packaging == Packaging.DOCKER_WOLFI_ESS + ); final String filename = "elasticsearch-plugins.yml"; append(tempDir.resolve(filename), """ @@ -386,7 +394,7 @@ public void test040JavaUsesTheOsProvidedKeystore() { if (distribution.packaging == Packaging.DOCKER_UBI || distribution.packaging == Packaging.DOCKER_IRON_BANK) { // In these images, the `cacerts` file ought to be a symlink here assertThat(path, equalTo("/etc/pki/ca-trust/extracted/java/cacerts")); - } else if (distribution.packaging == Packaging.DOCKER_WOLFI) { + } else if (distribution.packaging == Packaging.DOCKER_WOLFI || distribution.packaging == Packaging.DOCKER_WOLFI_ESS) { // In these images, the `cacerts` file ought to be a symlink here assertThat(path, equalTo("/etc/ssl/certs/java/cacerts")); } else { @@ -1113,8 +1121,10 @@ public void test170DefaultShellIsBash() { */ public void test171AdditionalCliOptionsAreForwarded() throws Exception { assumeTrue( - "Does not apply to Cloud images, because they don't use the default entrypoint", - distribution.packaging != Packaging.DOCKER_CLOUD && distribution().packaging != Packaging.DOCKER_CLOUD_ESS + "Does not apply to Cloud and wolfi ess images, because they don't use the default entrypoint", + distribution.packaging != Packaging.DOCKER_CLOUD + && distribution().packaging != Packaging.DOCKER_CLOUD_ESS + && distribution().packaging != Packaging.DOCKER_WOLFI_ESS ); runContainer(distribution(), builder().runArgs("bin/elasticsearch", "-Ecluster.name=kimchy").envVar("ELASTIC_PASSWORD", PASSWORD)); @@ -1201,7 +1211,11 @@ public void test310IronBankImageHasNoAdditionalLabels() throws Exception { * Check that the Cloud image contains the required Beats */ public void test400CloudImageBundlesBeats() { - assumeTrue(distribution.packaging == Packaging.DOCKER_CLOUD || distribution.packaging == Packaging.DOCKER_CLOUD_ESS); + assumeTrue( + distribution.packaging == Packaging.DOCKER_CLOUD + || distribution.packaging == Packaging.DOCKER_CLOUD_ESS + || distribution.packaging == Packaging.DOCKER_WOLFI_ESS + ); final List contents = listContents("/opt"); assertThat("Expected beats in /opt", contents, hasItems("filebeat", "metricbeat")); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java index a988a446f561f..2aff1f258ed65 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java @@ -436,7 +436,7 @@ private void verifyKeystorePermissions() { switch (distribution.packaging) { case TAR, ZIP -> assertThat(keystore, file(File, ARCHIVE_OWNER, ARCHIVE_OWNER, p660)); case DEB, RPM -> assertThat(keystore, file(File, "root", "elasticsearch", p660)); - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> assertThat( + case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI, DOCKER_WOLFI_ESS -> assertThat( keystore, DockerFileMatcher.file(p660) ); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java index 644990105f60f..487a00bdecac9 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java @@ -245,7 +245,7 @@ protected static void install() throws Exception { installation = Packages.installPackage(sh, distribution); Packages.verifyPackageInstallation(installation, distribution, sh); } - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> { + case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI, DOCKER_WOLFI_ESS -> { installation = Docker.runContainer(distribution); Docker.verifyContainerInstallation(installation); } @@ -338,6 +338,7 @@ public Shell.Result runElasticsearchStartCommand(String password, boolean daemon case DOCKER_CLOUD: case DOCKER_CLOUD_ESS: case DOCKER_WOLFI: + case DOCKER_WOLFI_ESS: // nothing, "installing" docker image is running it return Shell.NO_OP; default: @@ -361,6 +362,7 @@ public void stopElasticsearch() throws Exception { case DOCKER_CLOUD: case DOCKER_CLOUD_ESS: case DOCKER_WOLFI: + case DOCKER_WOLFI_ESS: // nothing, "installing" docker image is running it break; default: @@ -373,7 +375,8 @@ public void awaitElasticsearchStartup(Shell.Result result) throws Exception { switch (distribution.packaging) { case TAR, ZIP -> Archives.assertElasticsearchStarted(installation); case DEB, RPM -> Packages.assertElasticsearchStarted(sh, installation); - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> Docker.waitForElasticsearchToStart(); + case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI, DOCKER_WOLFI_ESS -> Docker + .waitForElasticsearchToStart(); default -> throw new IllegalStateException("Unknown Elasticsearch packaging type."); } } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java index 05cef4a0818ba..d63d956dc5199 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java @@ -39,6 +39,8 @@ public Distribution(Path path) { this.packaging = Packaging.DOCKER_CLOUD_ESS; } else if (filename.endsWith(".wolfi.tar")) { this.packaging = Packaging.DOCKER_WOLFI; + } else if (filename.endsWith(".wolfi-ess.tar")) { + this.packaging = Packaging.DOCKER_WOLFI_ESS; } else { int lastDot = filename.lastIndexOf('.'); this.packaging = Packaging.valueOf(filename.substring(lastDot + 1).toUpperCase(Locale.ROOT)); @@ -63,7 +65,7 @@ public boolean isPackage() { */ public boolean isDocker() { return switch (packaging) { - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> true; + case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI, DOCKER_WOLFI_ESS -> true; default -> false; }; } @@ -79,7 +81,8 @@ public enum Packaging { DOCKER_IRON_BANK(".ironbank.tar", Platforms.isDocker()), DOCKER_CLOUD(".cloud.tar", Platforms.isDocker()), DOCKER_CLOUD_ESS(".cloud-ess.tar", Platforms.isDocker()), - DOCKER_WOLFI(".wolfi.tar", Platforms.isDocker()); + DOCKER_WOLFI(".wolfi.tar", Platforms.isDocker()), + DOCKER_WOLFI_ESS(".wolfi-ess.tar", Platforms.isDocker()); /** The extension of this distribution's file */ public final String extension; diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java index c38eaa58f0552..6f7827663d46c 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java @@ -532,7 +532,9 @@ public static void verifyContainerInstallation(Installation es) { ) ); - if (es.distribution.packaging == Packaging.DOCKER_CLOUD || es.distribution.packaging == Packaging.DOCKER_CLOUD_ESS) { + if (es.distribution.packaging == Packaging.DOCKER_CLOUD + || es.distribution.packaging == Packaging.DOCKER_CLOUD_ESS + || es.distribution.packaging == Packaging.DOCKER_WOLFI_ESS) { verifyCloudContainerInstallation(es); } } @@ -541,7 +543,7 @@ private static void verifyCloudContainerInstallation(Installation es) { final String pluginArchive = "/opt/plugins/archive"; final List plugins = listContents(pluginArchive); - if (es.distribution.packaging == Packaging.DOCKER_CLOUD_ESS) { + if (es.distribution.packaging == Packaging.DOCKER_CLOUD_ESS || es.distribution.packaging == Packaging.DOCKER_WOLFI_ESS) { assertThat("ESS image should come with plugins in " + pluginArchive, plugins, not(empty())); final List repositoryPlugins = plugins.stream() diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java index e562e7591564e..a1529de825804 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java @@ -168,6 +168,7 @@ public static String getImageName(Distribution distribution) { case DOCKER_CLOUD -> "-cloud"; case DOCKER_CLOUD_ESS -> "-cloud-ess"; case DOCKER_WOLFI -> "-wolfi"; + case DOCKER_WOLFI_ESS -> "-wolfi-ess"; default -> throw new IllegalStateException("Unexpected distribution packaging type: " + distribution.packaging); }; diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java index dd8ecdd82ca7b..6a526a6dbfded 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.test.XContentTestUtils.JsonMapView; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import java.util.Map; @@ -87,25 +86,6 @@ public void testSystemIndicesUpgrades() throws Exception { throw new AssertionError(".tasks index does not exist yet"); } }); - - // If we are on 7.x create an alias that includes both a system index and a non-system index so we can be sure it gets - // upgraded properly. If we're already on 8.x, skip this part of the test. - if (clusterHasFeature(RestTestLegacyFeatures.SYSTEM_INDICES_REST_ACCESS_ENFORCED) == false) { - // Create an alias to make sure it gets upgraded properly - Request putAliasRequest = new Request("POST", "/_aliases"); - putAliasRequest.setJsonEntity(""" - { - "actions": [ - {"add": {"index": ".tasks", "alias": "test-system-alias"}}, - {"add": {"index": "test_index_reindex", "alias": "test-system-alias"}} - ] - }"""); - putAliasRequest.setOptions(expectVersionSpecificWarnings(v -> { - v.current(systemIndexWarning); - v.compatible(systemIndexWarning); - })); - assertThat(client().performRequest(putAliasRequest).getStatusLine().getStatusCode(), is(200)); - } } else if (isUpgradedCluster()) { assertBusy(() -> { Request clusterStateRequest = new Request("GET", "/_cluster/state/metadata"); diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml index 9c6a1ca2e96d2..b4672b1d8924d 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml @@ -606,3 +606,204 @@ setup: - match: { docs.0.doc._source.foo: "FOO" } - match: { docs.0.doc.executed_pipelines: [] } - not_exists: docs.0.doc.error + +--- +"Test ingest simulate with component template substitutions for data streams": + # In this test, we make sure that when the index template is a data stream template, simulte ingest works the same whether the data stream + # has been created or not -- either way, we expect it to use the template rather than the data stream / index mappings and settings. + + - skip: + features: + - headers + - allowed_warnings + + - requires: + cluster_features: ["simulate.component.template.substitutions"] + reason: "ingest simulate component template substitutions added in 8.16" + + - do: + headers: + Content-Type: application/json + ingest.put_pipeline: + id: "foo-pipeline" + body: > + { + "processors": [ + { + "set": { + "field": "foo", + "value": true + } + } + ] + } + - match: { acknowledged: true } + + - do: + cluster.put_component_template: + name: mappings_template + body: + template: + mappings: + dynamic: strict + properties: + foo: + type: keyword + + - do: + cluster.put_component_template: + name: settings_template + body: + template: + settings: + index: + default_pipeline: "foo-pipeline" + + - do: + allowed_warnings: + - "index template [test-composable-1] has index patterns [foo*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [test-composable-1] will take precedence during new index creation" + indices.put_index_template: + name: test-composable-1 + body: + index_patterns: + - foo* + composed_of: + - mappings_template + - settings_template + + - do: + allowed_warnings: + - "index template [my-template1] has index patterns [simple-data-stream1] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template1] will take precedence during new index creation" + indices.put_index_template: + name: my-template1 + body: + index_patterns: [simple-data-stream1] + composed_of: + - mappings_template + - settings_template + data_stream: {} + + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: simple-data-stream1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "@timestamp": 1234, + "foo": false + } + } + ], + "pipeline_substitutions": { + "foo-pipeline-2": { + "processors": [ + { + "set": { + "field": "foo", + "value": "FOO" + } + } + ] + } + }, + "component_template_substitutions": { + "settings_template": { + "template": { + "settings": { + "index": { + "default_pipeline": "foo-pipeline-2" + } + } + } + }, + "mappings_template": { + "template": { + "mappings": { + "dynamic": "strict", + "properties": { + "foo": { + "type": "keyword" + } + } + } + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "simple-data-stream1" } + - match: { docs.0.doc._source.foo: "FOO" } + - match: { docs.0.doc.executed_pipelines: ["foo-pipeline-2"] } + - not_exists: docs.0.doc.error + + - do: + indices.create_data_stream: + name: simple-data-stream1 + - is_true: acknowledged + + - do: + cluster.health: + wait_for_status: yellow + + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: simple-data-stream1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "@timestamp": 1234, + "foo": false + } + } + ], + "pipeline_substitutions": { + "foo-pipeline-2": { + "processors": [ + { + "set": { + "field": "foo", + "value": "FOO" + } + } + ] + } + }, + "component_template_substitutions": { + "settings_template": { + "template": { + "settings": { + "index": { + "default_pipeline": "foo-pipeline-2" + } + } + } + }, + "mappings_template": { + "template": { + "mappings": { + "dynamic": "strict", + "properties": { + "foo": { + "type": "keyword" + } + } + } + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "simple-data-stream1" } + - match: { docs.0.doc._source.foo: "FOO" } + - match: { docs.0.doc.executed_pipelines: ["foo-pipeline-2"] } + - not_exists: docs.0.doc.error diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index ed1cf905f7e9d..a742e83255bbb 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -57,5 +57,4 @@ tasks.named("precommit").configure { tasks.named("yamlRestCompatTestTransform").configure({task -> task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") - task.skipTestsByFilePattern("indices.create/synthetic_source*.yml", "@UpdateForV9 -> tests do not pass after bumping API version to 9 [ES-9597]") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml index a999bb7816065..a871d2ac0ae15 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml @@ -197,7 +197,7 @@ empty object with unmapped fields: --- disabled root object: - requires: - cluster_features: ["mapper.track_ignored_source"] + cluster_features: ["mapper.ignored_source.dont_expand_dots"] reason: requires tracking ignored source - do: @@ -222,17 +222,19 @@ disabled root object: index: test - match: { hits.total.value: 1 } - - match: { hits.hits.0._source.name: aaaa } - - match: { hits.hits.0._source.some_string: AaAa } - - match: { hits.hits.0._source.some_int: 1000 } - - match: { hits.hits.0._source.some_double: 123.456789 } - - match: { hits.hits.0._source.a.very.deeply.nested.field: AAAA } - + - match: + hits.hits.0._source: + name: aaaa + some_string: AaAa + some_int: 1000 + some_double: 123.456789 + some_bool: true + a.very.deeply.nested.field: AAAA --- disabled object: - requires: - cluster_features: ["mapper.track_ignored_source"] + cluster_features: ["mapper.ignored_source.dont_expand_dots"] reason: requires tracking ignored source - do: @@ -261,14 +263,15 @@ disabled object: - match: { hits.total.value: 1 } - match: { hits.hits.0._source.name: aaaa } - - match: { hits.hits.0._source.path.some_int: 1000 } - - match: { hits.hits.0._source.path.to.a.very.deeply.nested.field: AAAA } - + - match: + hits.hits.0._source.path: + some_int: 1000 + to.a.very.deeply.nested.field: AAAA --- disabled object contains array: - requires: - cluster_features: ["mapper.track_ignored_source"] + cluster_features: ["mapper.ignored_source.dont_expand_dots"] reason: requires tracking ignored source - do: @@ -297,10 +300,12 @@ disabled object contains array: - match: { hits.total.value: 1 } - match: { hits.hits.0._source.name: aaaa } - - match: { hits.hits.0._source.path.0.some_int: 1000 } - - match: { hits.hits.0._source.path.0.to.a.very.deeply.nested.field: AAAA } - - match: { hits.hits.0._source.path.1.some_double: 10.0 } - - match: { hits.hits.0._source.path.1.some_bool: true } + - match: + hits.hits.0._source.path: + - some_int: 1000 + to.a.very.deeply.nested.field: AAAA + - some_double: 10.0 + some_bool: true --- @@ -429,7 +434,7 @@ mixed disabled and enabled objects: --- object with dynamic override: - requires: - cluster_features: ["mapper.track_ignored_source"] + cluster_features: ["mapper.ignored_source.dont_expand_dots"] reason: requires tracking ignored source - do: @@ -467,7 +472,7 @@ object with dynamic override: - match: { hits.hits.0._source.name: a } - match: { hits.hits.0._source.path_no.name: foo } - match: { hits.hits.0._source.path_no.some_int: 10 } - - match: { hits.hits.0._source.path_no.to.a.very.deeply.nested.field: A } + - match: { hits.hits.0._source.path_no.to: { a.very.deeply.nested.field: A } } - match: { hits.hits.0._source.path_runtime.name: bar } - match: { hits.hits.0._source.path_runtime.some_int: 20 } - match: { hits.hits.0._source.path_runtime.to.a.very.deeply.nested.field: B } @@ -524,7 +529,7 @@ subobject with dynamic override: --- object array in object with dynamic override: - requires: - cluster_features: ["mapper.track_ignored_source"] + cluster_features: ["mapper.synthetic_source_keep"] reason: requires tracking ignored source - do: @@ -895,7 +900,7 @@ doubly nested object: --- subobjects auto: - requires: - cluster_features: ["mapper.subobjects_auto"] + cluster_features: ["mapper.subobjects_auto", "mapper.bwc_workaround_9_0"] reason: requires tracking ignored source and supporting subobjects auto setting - do: @@ -920,7 +925,7 @@ subobjects auto: id: type: keyword stored: - store_array_source: true + synthetic_source_keep: arrays properties: span: properties: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml index 7d7be765631e5..dfe6c9820a16a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml @@ -1,7 +1,71 @@ +--- +object param - store complex object: + - requires: + cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] + reason: requires tracking ignored source + + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + id: + type: integer + stored: + synthetic_source_keep: all + properties: + object_array: + properties: + trace: + type: keyword + nested: + type: nested + kw: + type: keyword + + - do: + bulk: + index: test + refresh: true + body: + - '{ "create": { } }' + - '{ "id": 1, "stored": { "object_array": [ {"trace": "B"}, {"trace": "A"} ], "nested": [ {"foo": 20}, {"foo": 10} ], "kw": 100 } }' + - '{ "create": { } }' + - '{ "id": 2, "stored": { "object_array": { "trace": ["D", "C"] }, "nested": { "bar": [ 40, 30] }, "kw": 200, "baz": "2000" } }' + - '{ "create": { } }' + - '{ "id": 3, "stored": [ { "object_array": { "trace": "E" } }, { "nested": { "bar": [ 60, 50] } }, { "kw": 300 } ] }' + + - do: + search: + index: test + sort: id + + - match: { hits.hits.0._source.id: 1 } + - match: { hits.hits.0._source.stored.object_array.0.trace: B } + - match: { hits.hits.0._source.stored.object_array.1.trace: A } + - match: { hits.hits.0._source.stored.nested.0.foo: 20 } + - match: { hits.hits.0._source.stored.nested.1.foo: 10 } + - match: { hits.hits.0._source.stored.kw: 100 } + + - match: { hits.hits.1._source.id: 2 } + - match: { hits.hits.1._source.stored.object_array.trace: [D, C] } + - match: { hits.hits.1._source.stored.nested.bar: [40, 30] } + - match: { hits.hits.1._source.stored.kw: 200 } + - match: { hits.hits.1._source.stored.baz: "2000" } + + - match: { hits.hits.2._source.id: 3 } + - match: { hits.hits.2._source.stored.0.object_array.trace: E } + - match: { hits.hits.2._source.stored.1.nested.bar: [ 60, 50 ] } + - match: { hits.hits.2._source.stored.2.kw: 300 } + + --- object param - object array: - requires: - cluster_features: ["mapper.track_ignored_source"] + cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] reason: requires tracking ignored source - do: @@ -25,7 +89,7 @@ object param - object array: id: type: keyword stored: - store_array_source: true + synthetic_source_keep: arrays properties: span: properties: @@ -65,7 +129,7 @@ object param - object array: --- object param - object array within array: - requires: - cluster_features: ["mapper.track_ignored_source"] + cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] reason: requires tracking ignored source - do: @@ -77,10 +141,10 @@ object param - object array within array: mode: synthetic properties: stored: - store_array_source: true + synthetic_source_keep: arrays properties: path: - store_array_source: true + synthetic_source_keep: arrays properties: to: properties: @@ -108,7 +172,7 @@ object param - object array within array: --- object param - no object array: - requires: - cluster_features: ["mapper.track_ignored_source"] + cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] reason: requires tracking ignored source - do: @@ -120,7 +184,7 @@ object param - no object array: mode: synthetic properties: stored: - store_array_source: true + synthetic_source_keep: arrays properties: span: properties: @@ -150,7 +214,7 @@ object param - no object array: --- object param - field ordering in object array: - requires: - cluster_features: ["mapper.track_ignored_source"] + cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] reason: requires tracking ignored source - do: @@ -164,7 +228,7 @@ object param - field ordering in object array: a: type: keyword b: - store_array_source: true + synthetic_source_keep: arrays properties: aa: type: keyword @@ -173,7 +237,7 @@ object param - field ordering in object array: c: type: keyword d: - store_array_source: true + synthetic_source_keep: arrays properties: aa: type: keyword @@ -199,7 +263,7 @@ object param - field ordering in object array: --- object param - nested object array next to other fields: - requires: - cluster_features: ["mapper.track_ignored_source"] + cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] reason: requires tracking ignored source - do: @@ -215,7 +279,7 @@ object param - nested object array next to other fields: b: properties: c: - store_array_source: true + synthetic_source_keep: arrays properties: aa: type: keyword @@ -255,7 +319,7 @@ object param - nested object array next to other fields: --- object param - nested object with stored array: - requires: - cluster_features: ["mapper.track_ignored_source"] + cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] reason: requires tracking ignored source - do: @@ -272,7 +336,7 @@ object param - nested object with stored array: type: nested nested_array_stored: type: nested - store_array_source: true + synthetic_source_keep: all - do: bulk: @@ -304,7 +368,7 @@ object param - nested object with stored array: --- index param - nested array within array: - requires: - cluster_features: ["mapper.synthetic_source_keep"] + cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] reason: requires tracking ignored source - do: @@ -322,7 +386,7 @@ index param - nested array within array: to: properties: some: - store_array_source: true + synthetic_source_keep: arrays properties: id: type: integer @@ -351,7 +415,7 @@ index param - nested array within array: # 112156 stored field under object with store_array_source: - requires: - cluster_features: ["mapper.source.synthetic_source_stored_fields_advance_fix"] + cluster_features: ["mapper.source.synthetic_source_stored_fields_advance_fix", "mapper.bwc_workaround_9_0"] reason: requires bug fix to be implemented - do: @@ -369,7 +433,7 @@ stored field under object with store_array_source: name: type: keyword obj: - store_array_source: true + synthetic_source_keep: arrays properties: foo: type: keyword @@ -740,7 +804,7 @@ field param - nested array within array: --- index param - root arrays: - requires: - cluster_features: ["mapper.synthetic_source_keep"] + cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] reason: requires keeping array source - do: @@ -772,6 +836,9 @@ index param - root arrays: properties: id: type: keyword + obj_default: + type: object + synthetic_source_keep: none - do: bulk: @@ -782,6 +849,8 @@ index param - root arrays: - '{ "id": 1, "leaf": [30, 20, 10], "leaf_default": [30, 20, 10], "obj": [ { "trace": { "id": "a" }, "span": { "id": "1" } }, { "trace": { "id": "b" }, "span": { "id": "1" } } ] }' - '{ "create": { } }' - '{ "id": 2, "leaf": [130, 120, 110], "leaf_default": [130, 120, 110], "obj": [ { "trace": { "id": "aa" }, "span": { "id": "2" } }, { "trace": { "id": "bb" }, "span": { "id": "2" } } ] }' + - '{ "create": { } }' + - '{ "id": 3, "obj_default": [ { "trace": { "id": "bb" }, "span": { "id": "2" } }, { "trace": { "id": "aa" }, "span": { "id": "2" } } ] }' - do: search: @@ -799,13 +868,17 @@ index param - root arrays: - match: { hits.hits.1._source.id: 2 } - match: { hits.hits.1._source.leaf: [ 130, 120, 110 ] } - - match: { hits.hits.0._source.leaf_default: [10, 20, 30] } + - match: { hits.hits.1._source.leaf_default: [110, 120, 130] } - length: { hits.hits.1._source.obj: 2 } - match: { hits.hits.1._source.obj.0.trace.id: aa } - match: { hits.hits.1._source.obj.0.span.id: "2" } - match: { hits.hits.1._source.obj.1.trace.id: bb } - match: { hits.hits.1._source.obj.1.span.id: "2" } + - match: { hits.hits.2._source.id: 3 } + - match: { hits.hits.2._source.obj_default.trace.id: [aa, bb] } + - match: { hits.hits.2._source.obj_default.span.id: "2" } + --- index param - dynamic root arrays: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/600_flattened_ignore_above.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/600_flattened_ignore_above.yml index e2c3006232c53..a4a9b1aaecb22 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/600_flattened_ignore_above.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/600_flattened_ignore_above.yml @@ -9,6 +9,8 @@ flattened ignore_above single-value field: body: mappings: properties: + name: + type: keyword keyword: type: keyword ignore_above: 5 @@ -22,6 +24,7 @@ flattened ignore_above single-value field: id: "1" refresh: true body: + name: "A" keyword: "foo" flat: { "value": "foo", "key": "foo key" } @@ -31,12 +34,14 @@ flattened ignore_above single-value field: id: "2" refresh: true body: + name: "B" keyword: "foo bar" flat: { "value": "foo bar", "key": "foo bar key"} - do: search: index: test + sort: name body: fields: - keyword @@ -69,6 +74,8 @@ flattened ignore_above multi-value field: body: mappings: properties: + name: + type: keyword keyword: type: keyword ignore_above: 5 @@ -82,6 +89,7 @@ flattened ignore_above multi-value field: id: "1" refresh: true body: + name: "A" keyword: ["foo","bar"] flat: { "value": ["foo", "bar"], "key": "foo bar array key" } @@ -91,12 +99,14 @@ flattened ignore_above multi-value field: id: "2" refresh: true body: + name: "B" keyword: ["foobar", "foo", "bar"] flat: { "value": ["foobar", "foo"], "key": ["foo key", "bar key"]} - do: search: index: test + sort: name body: fields: - keyword diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java index 60ea4138e923d..cde8d41b292b7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java @@ -233,6 +233,9 @@ public void testIncrementalBulkHighWatermarkBackOff() throws Exception { handlers.add(handlerThrottled); + // Wait until we are ready for the next page + assertBusy(() -> assertTrue(nextPage.get())); + for (IncrementalBulkService.Handler h : handlers) { refCounted.incRef(); PlainActionFuture future = new PlainActionFuture<>(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/IndicesMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/IndicesMetricsIT.java index b72257b884f08..4a060eadc735b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/IndicesMetricsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/IndicesMetricsIT.java @@ -9,23 +9,38 @@ package org.elasticsearch.monitor.metrics; +import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.OnScriptError; +import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.script.LongFieldScript; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.telemetry.Measurement; import org.elasticsearch.telemetry.TestTelemetryPlugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; import org.hamcrest.Matcher; +import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.Set; import static org.elasticsearch.index.mapper.DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) @@ -42,7 +57,7 @@ public List> getSettings() { @Override protected Collection> nodePlugins() { - return List.of(TestTelemetryPlugin.class, TestAPMInternalSettings.class); + return List.of(TestTelemetryPlugin.class, TestAPMInternalSettings.class, FailingFieldPlugin.class); } @Override @@ -54,27 +69,57 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { } static final String STANDARD_INDEX_COUNT = "es.indices.standard.total"; + static final String STANDARD_BYTES_SIZE = "es.indices.standard.size"; static final String STANDARD_DOCS_COUNT = "es.indices.standard.docs.total"; - static final String STANDARD_BYTES_SIZE = "es.indices.standard.bytes.total"; + static final String STANDARD_QUERY_COUNT = "es.indices.standard.query.total"; + static final String STANDARD_QUERY_TIME = "es.indices.standard.query.time"; + static final String STANDARD_QUERY_FAILURE = "es.indices.standard.query.failure.total"; + static final String STANDARD_FETCH_COUNT = "es.indices.standard.fetch.total"; + static final String STANDARD_FETCH_TIME = "es.indices.standard.fetch.time"; + static final String STANDARD_FETCH_FAILURE = "es.indices.standard.fetch.failure.total"; + static final String STANDARD_INDEXING_COUNT = "es.indices.standard.indexing.total"; + static final String STANDARD_INDEXING_TIME = "es.indices.standard.indexing.time"; + static final String STANDARD_INDEXING_FAILURE = "es.indices.standard.indexing.failure.total"; static final String TIME_SERIES_INDEX_COUNT = "es.indices.time_series.total"; + static final String TIME_SERIES_BYTES_SIZE = "es.indices.time_series.size"; static final String TIME_SERIES_DOCS_COUNT = "es.indices.time_series.docs.total"; - static final String TIME_SERIES_BYTES_SIZE = "es.indices.time_series.bytes.total"; + static final String TIME_SERIES_QUERY_COUNT = "es.indices.time_series.query.total"; + static final String TIME_SERIES_QUERY_TIME = "es.indices.time_series.query.time"; + static final String TIME_SERIES_QUERY_FAILURE = "es.indices.time_series.query.failure.total"; + static final String TIME_SERIES_FETCH_COUNT = "es.indices.time_series.fetch.total"; + static final String TIME_SERIES_FETCH_TIME = "es.indices.time_series.fetch.time"; + static final String TIME_SERIES_FETCH_FAILURE = "es.indices.time_series.fetch.failure.total"; + static final String TIME_SERIES_INDEXING_COUNT = "es.indices.time_series.indexing.total"; + static final String TIME_SERIES_INDEXING_TIME = "es.indices.time_series.indexing.time"; + static final String TIME_SERIES_INDEXING_FAILURE = "es.indices.time_series.indexing.failure.total"; static final String LOGSDB_INDEX_COUNT = "es.indices.logsdb.total"; + static final String LOGSDB_BYTES_SIZE = "es.indices.logsdb.size"; static final String LOGSDB_DOCS_COUNT = "es.indices.logsdb.docs.total"; - static final String LOGSDB_BYTES_SIZE = "es.indices.logsdb.bytes.total"; + static final String LOGSDB_QUERY_COUNT = "es.indices.logsdb.query.total"; + static final String LOGSDB_QUERY_TIME = "es.indices.logsdb.query.time"; + static final String LOGSDB_QUERY_FAILURE = "es.indices.logsdb.query.failure.total"; + static final String LOGSDB_FETCH_COUNT = "es.indices.logsdb.fetch.total"; + static final String LOGSDB_FETCH_TIME = "es.indices.logsdb.fetch.time"; + static final String LOGSDB_FETCH_FAILURE = "es.indices.logsdb.fetch.failure.total"; + static final String LOGSDB_INDEXING_COUNT = "es.indices.logsdb.indexing.total"; + static final String LOGSDB_INDEXING_TIME = "es.indices.logsdb.indexing.time"; + static final String LOGSDB_INDEXING_FAILURE = "es.indices.logsdb.indexing.failure.total"; - public void testIndicesMetrics() { + public void testIndicesMetrics() throws Exception { String node = internalCluster().startNode(); ensureStableCluster(1); final TestTelemetryPlugin telemetry = internalCluster().getInstance(PluginsService.class, node) .filterPlugins(TestTelemetryPlugin.class) .findFirst() .orElseThrow(); + final IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node); + var indexing0 = indicesService.stats(CommonStatsFlags.ALL, false).getIndexing().getTotal(); telemetry.resetMeter(); long numStandardIndices = randomIntBetween(1, 5); long numStandardDocs = populateStandardIndices(numStandardIndices); + var indexing1 = indicesService.stats(CommonStatsFlags.ALL, false).getIndexing().getTotal(); collectThenAssertMetrics( telemetry, 1, @@ -104,6 +149,7 @@ public void testIndicesMetrics() { long numTimeSeriesIndices = randomIntBetween(1, 5); long numTimeSeriesDocs = populateTimeSeriesIndices(numTimeSeriesIndices); + var indexing2 = indicesService.stats(CommonStatsFlags.ALL, false).getIndexing().getTotal(); collectThenAssertMetrics( telemetry, 2, @@ -133,6 +179,7 @@ public void testIndicesMetrics() { long numLogsdbIndices = randomIntBetween(1, 5); long numLogsdbDocs = populateLogsdbIndices(numLogsdbIndices); + var indexing3 = indicesService.stats(CommonStatsFlags.ALL, false).getIndexing().getTotal(); collectThenAssertMetrics( telemetry, 3, @@ -159,6 +206,142 @@ public void testIndicesMetrics() { greaterThan(0L) ) ); + // indexing stats + collectThenAssertMetrics( + telemetry, + 4, + Map.of( + STANDARD_INDEXING_COUNT, + equalTo(numStandardDocs), + STANDARD_INDEXING_TIME, + greaterThanOrEqualTo(0L), + STANDARD_INDEXING_FAILURE, + equalTo(indexing1.getIndexFailedCount() - indexing0.getIndexCount()), + + TIME_SERIES_INDEXING_COUNT, + equalTo(numTimeSeriesDocs), + TIME_SERIES_INDEXING_TIME, + greaterThanOrEqualTo(0L), + TIME_SERIES_INDEXING_FAILURE, + equalTo(indexing2.getIndexFailedCount() - indexing1.getIndexFailedCount()), + + LOGSDB_INDEXING_COUNT, + equalTo(numLogsdbDocs), + LOGSDB_INDEXING_TIME, + greaterThanOrEqualTo(0L), + LOGSDB_INDEXING_FAILURE, + equalTo(indexing3.getIndexFailedCount() - indexing2.getIndexFailedCount()) + ) + ); + telemetry.resetMeter(); + + // search and fetch + client().prepareSearch("standard*").setSize(100).get().decRef(); + var nodeStats1 = indicesService.stats(CommonStatsFlags.ALL, false).getSearch().getTotal(); + collectThenAssertMetrics( + telemetry, + 1, + Map.of( + STANDARD_QUERY_COUNT, + equalTo(numStandardIndices), + STANDARD_QUERY_TIME, + equalTo(nodeStats1.getQueryTimeInMillis()), + STANDARD_FETCH_COUNT, + equalTo(nodeStats1.getFetchCount()), + STANDARD_FETCH_TIME, + equalTo(nodeStats1.getFetchTimeInMillis()), + + TIME_SERIES_QUERY_COUNT, + equalTo(0L), + TIME_SERIES_QUERY_TIME, + equalTo(0L), + + LOGSDB_QUERY_COUNT, + equalTo(0L), + LOGSDB_QUERY_TIME, + equalTo(0L) + ) + ); + + client().prepareSearch("time*").setSize(100).get().decRef(); + var nodeStats2 = indicesService.stats(CommonStatsFlags.ALL, false).getSearch().getTotal(); + collectThenAssertMetrics( + telemetry, + 2, + Map.of( + STANDARD_QUERY_COUNT, + equalTo(numStandardIndices), + STANDARD_QUERY_TIME, + equalTo(nodeStats1.getQueryTimeInMillis()), + + TIME_SERIES_QUERY_COUNT, + equalTo(numTimeSeriesIndices), + TIME_SERIES_QUERY_TIME, + equalTo(nodeStats2.getQueryTimeInMillis() - nodeStats1.getQueryTimeInMillis()), + TIME_SERIES_FETCH_COUNT, + equalTo(nodeStats2.getFetchCount() - nodeStats1.getFetchCount()), + TIME_SERIES_FETCH_TIME, + equalTo(nodeStats2.getFetchTimeInMillis() - nodeStats1.getFetchTimeInMillis()), + + LOGSDB_QUERY_COUNT, + equalTo(0L), + LOGSDB_QUERY_TIME, + equalTo(0L) + ) + ); + client().prepareSearch("logs*").setSize(100).get().decRef(); + var nodeStats3 = indicesService.stats(CommonStatsFlags.ALL, false).getSearch().getTotal(); + collectThenAssertMetrics( + telemetry, + 3, + Map.of( + STANDARD_QUERY_COUNT, + equalTo(numStandardIndices), + STANDARD_QUERY_TIME, + equalTo(nodeStats1.getQueryTimeInMillis()), + + TIME_SERIES_QUERY_COUNT, + equalTo(numTimeSeriesIndices), + TIME_SERIES_QUERY_TIME, + equalTo(nodeStats2.getQueryTimeInMillis() - nodeStats1.getQueryTimeInMillis()), + + LOGSDB_QUERY_COUNT, + equalTo(numLogsdbIndices), + LOGSDB_QUERY_TIME, + equalTo(nodeStats3.getQueryTimeInMillis() - nodeStats2.getQueryTimeInMillis()), + LOGSDB_FETCH_COUNT, + equalTo(nodeStats3.getFetchCount() - nodeStats2.getFetchCount()), + LOGSDB_FETCH_TIME, + equalTo(nodeStats3.getFetchTimeInMillis() - nodeStats2.getFetchTimeInMillis()) + ) + ); + // search failures + expectThrows(Exception.class, () -> { client().prepareSearch("logs*").setRuntimeMappings(parseMapping(""" + { + "fail_me": { + "type": "long", + "script": {"source": "<>", "lang": "failing_field"} + } + } + """)).setQuery(new RangeQueryBuilder("fail_me").gte(0)).setAllowPartialSearchResults(true).get(); }); + collectThenAssertMetrics( + telemetry, + 4, + Map.of( + STANDARD_QUERY_FAILURE, + equalTo(0L), + STANDARD_FETCH_FAILURE, + equalTo(0L), + TIME_SERIES_QUERY_FAILURE, + equalTo(0L), + TIME_SERIES_FETCH_FAILURE, + equalTo(0L), + LOGSDB_QUERY_FAILURE, + equalTo(numLogsdbIndices), + LOGSDB_FETCH_FAILURE, + equalTo(0L) + ) + ); } void collectThenAssertMetrics(TestTelemetryPlugin telemetry, int times, Map> matchers) { @@ -175,7 +358,7 @@ int populateStandardIndices(long numIndices) { int totalDocs = 0; for (int i = 0; i < numIndices; i++) { String indexName = "standard-" + i; - createIndex(indexName); + createIndex(indexName, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).build()); int numDocs = between(1, 5); for (int d = 0; d < numDocs; d++) { indexDoc(indexName, Integer.toString(d), "f", Integer.toString(d)); @@ -190,7 +373,11 @@ int populateTimeSeriesIndices(long numIndices) { int totalDocs = 0; for (int i = 0; i < numIndices; i++) { String indexName = "time_series-" + i; - Settings settings = Settings.builder().put("mode", "time_series").putList("routing_path", List.of("host")).build(); + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put("mode", "time_series") + .putList("routing_path", List.of("host")) + .build(); client().admin() .indices() .prepareCreate(indexName) @@ -214,6 +401,7 @@ int populateTimeSeriesIndices(long numIndices) { } totalDocs += numDocs; flush(indexName); + refresh(indexName); } return totalDocs; } @@ -222,7 +410,7 @@ int populateLogsdbIndices(long numIndices) { int totalDocs = 0; for (int i = 0; i < numIndices; i++) { String indexName = "logsdb-" + i; - Settings settings = Settings.builder().put("mode", "logsdb").build(); + Settings settings = Settings.builder().put("mode", "logsdb").put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).build(); client().admin() .indices() .prepareCreate(indexName) @@ -237,9 +425,75 @@ int populateLogsdbIndices(long numIndices) { .setSource("@timestamp", timestamp, "host.name", randomFrom("prod", "qa"), "cpu", randomIntBetween(1, 100)) .get(); } + int numFailures = between(0, 2); + for (int d = 0; d < numFailures; d++) { + expectThrows(Exception.class, () -> { + client().prepareIndex(indexName) + .setSource( + "@timestamp", + "malformed-timestamp", + "host.name", + randomFrom("prod", "qa"), + "cpu", + randomIntBetween(1, 100) + ) + .get(); + }); + } totalDocs += numDocs; flush(indexName); + refresh(indexName); } return totalDocs; } + + private Map parseMapping(String mapping) throws IOException { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, mapping)) { + return parser.map(); + } + } + + public static class FailingFieldPlugin extends Plugin implements ScriptPlugin { + + @Override + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + return new ScriptEngine() { + @Override + public String getType() { + return "failing_field"; + } + + @Override + @SuppressWarnings("unchecked") + public FactoryType compile( + String name, + String code, + ScriptContext context, + Map params + ) { + return (FactoryType) new LongFieldScript.Factory() { + @Override + public LongFieldScript.LeafFactory newFactory( + String fieldName, + Map params, + SearchLookup searchLookup, + OnScriptError onScriptError + ) { + return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { + @Override + public void execute() { + throw new IllegalStateException("Accessing failing field"); + } + }; + } + }; + } + + @Override + public Set> getSupportedContexts() { + return Set.of(LongFieldScript.CONTEXT); + } + }; + } + } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java index 891096dfa67a9..b78448bfd873f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java @@ -53,6 +53,7 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.hamcrest.Matchers.equalTo; @@ -112,7 +113,7 @@ public void setup() throws Exception { } } """; - createIndex(INDEX, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).build()); + createIndex(INDEX, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0).build()); admin().indices().preparePutMapping(INDEX).setSource(mapping, XContentType.JSON).get(); indexDoc( INDEX, @@ -417,8 +418,7 @@ public void testRankDocsRetrieverWithNestedQuery() { SearchSourceBuilder source = new SearchSourceBuilder(); StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder(); // this one retrieves docs 1, 4, and 6 - standard0.queryBuilder = QueryBuilders.nestedQuery("views", QueryBuilders.rangeQuery(LAST_30D_FIELD).gt(10L), ScoreMode.Avg) - .innerHit(new InnerHitBuilder("a").addSort(new FieldSortBuilder(DOC_FIELD).order(SortOrder.DESC)).setSize(10)); + standard0.queryBuilder = QueryBuilders.nestedQuery("views", QueryBuilders.rangeQuery(LAST_30D_FIELD).gt(10L), ScoreMode.Avg); StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder(); // this one retrieves docs 2 and 6 due to prefilter standard1.queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(ID_FIELD, "doc_2", "doc_3", "doc_6")).boost(20L); @@ -455,9 +455,9 @@ public void testRankDocsRetrieverWithNestedQuery() { assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_1")); - assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_7")); + assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_3")); assertThat(resp.getHits().getAt(4).getId(), equalTo("doc_4")); - assertThat(resp.getHits().getAt(5).getId(), equalTo("doc_3")); + assertThat(resp.getHits().getAt(5).getId(), equalTo("doc_7")); }); } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index b82afe2a22fa6..f6e4649aa4807 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -233,6 +233,8 @@ static TransportVersion def(int id) { public static final TransportVersion REGEX_AND_RANGE_INTERVAL_QUERIES = def(8_757_00_0); public static final TransportVersion RRF_QUERY_REWRITE = def(8_758_00_0); public static final TransportVersion SEARCH_FAILURE_STATS = def(8_759_00_0); + public static final TransportVersion INGEST_GEO_DATABASE_PROVIDERS = def(8_760_00_0); + public static final TransportVersion DATE_TIME_DOC_VALUES_LOCALES = def(8_761_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java index 9581279201be2..4aa6ed60afe43 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java @@ -21,6 +21,8 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV10; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.xcontent.ToXContent; @@ -43,6 +45,8 @@ public class ClusterRerouteResponse extends ActionResponse implements IsAcknowle /** * To be removed when REST compatibility with {@link org.elasticsearch.Version#V_8_6_0} / {@link RestApiVersion#V_8} no longer needed */ + @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // to remove from the v9 API only + @UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION) // to remove entirely private final ClusterState state; private final RoutingExplanations explanations; private final boolean acknowledged; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java index 3561a4d0e2cb4..fdced5fc18ac9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.AliasMetadata; -import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; @@ -157,8 +156,7 @@ protected void masterOperation( xContentRegistry, indicesService, systemIndices, - indexSettingProviders, - Map.of() + indexSettingProviders ); final Map> overlapping = new HashMap<>(); @@ -235,8 +233,7 @@ public static Template resolveTemplate( final NamedXContentRegistry xContentRegistry, final IndicesService indicesService, final SystemIndices systemIndices, - Set indexSettingProviders, - Map componentTemplateSubstitutions + Set indexSettingProviders ) throws Exception { var metadata = simulatedState.getMetadata(); Settings templateSettings = resolveSettings(simulatedState.metadata(), matchingTemplate); @@ -266,7 +263,6 @@ public static Template resolveTemplate( null, // empty request mapping as the user can't specify any explicit mappings via the simulate api simulatedState, matchingTemplate, - componentTemplateSubstitutions, xContentRegistry, simulatedIndexName ); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java index af7a253b5a042..30bbad0b57df0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java @@ -170,8 +170,7 @@ protected void masterOperation( xContentRegistry, indicesService, systemIndices, - indexSettingProviders, - Map.of() + indexSettingProviders ); if (request.includeDefaults()) { listener.onResponse( diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java index b02d7acf66d14..88a9fb56b8edb 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.ToXContent; @@ -157,14 +158,13 @@ public void writeTo(StreamOutput out) throws IOException { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return Iterators.concat(Iterators.single((builder, p) -> { - builder.startObject(); - builder.field(ERRORS, hasFailures()); - builder.field(TOOK, tookInMillis); + return ChunkedToXContent.builder(params).object(ob -> { + ob.field(ERRORS, hasFailures()); + ob.field(TOOK, tookInMillis); if (ingestTookInMillis != BulkResponse.NO_INGEST_TOOK) { - builder.field(INGEST_TOOK, ingestTookInMillis); + ob.field(INGEST_TOOK, ingestTookInMillis); } - return builder.startArray(ITEMS); - }), Iterators.forArray(responses), Iterators.single((builder, p) -> builder.endArray().endObject())); + ob.array(ITEMS, Iterators.forArray(responses)); + }); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java b/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java index d8c2389dd7d69..d5ad3aa2d29a1 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java @@ -105,6 +105,7 @@ public static class Handler implements Releasable { private boolean closed = false; private boolean globalFailure = false; private boolean incrementalRequestSubmitted = false; + private boolean bulkInProgress = false; private ThreadContext.StoredContext requestContext; private Exception bulkActionLevelFailure = null; private long currentBulkSize = 0L; @@ -130,6 +131,7 @@ protected Handler( public void addItems(List> items, Releasable releasable, Runnable nextItems) { assert closed == false; + assert bulkInProgress == false; if (bulkActionLevelFailure != null) { shortCircuitDueToTopLevelFailure(items, releasable); nextItems.run(); @@ -143,6 +145,7 @@ public void addItems(List> items, Releasable releasable, Runn requestContext.restore(); final ArrayList toRelease = new ArrayList<>(releasables); releasables.clear(); + bulkInProgress = true; client.bulk(bulkRequest, ActionListener.runAfter(new ActionListener<>() { @Override @@ -158,6 +161,7 @@ public void onFailure(Exception e) { handleBulkFailure(isFirstRequest, e); } }, () -> { + bulkInProgress = false; requestContext = threadContext.newStoredContext(); toRelease.forEach(Releasable::close); nextItems.run(); @@ -177,6 +181,7 @@ private boolean shouldBackOff() { } public void lastItems(List> items, Releasable releasable, ActionListener listener) { + assert bulkInProgress == false; if (bulkActionLevelFailure != null) { shortCircuitDueToTopLevelFailure(items, releasable); errorResponse(listener); @@ -187,7 +192,9 @@ public void lastItems(List> items, Releasable releasable, Act requestContext.restore(); final ArrayList toRelease = new ArrayList<>(releasables); releasables.clear(); - client.bulk(bulkRequest, ActionListener.runBefore(new ActionListener<>() { + // We do not need to set this back to false as this will be the last request. + bulkInProgress = true; + client.bulk(bulkRequest, ActionListener.runAfter(new ActionListener<>() { private final boolean isFirstRequest = incrementalRequestSubmitted == false; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportAbstractBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportAbstractBulkAction.java index 5eae1c660d7d0..8c6565e52daa7 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportAbstractBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportAbstractBulkAction.java @@ -41,6 +41,7 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.concurrent.Executor; @@ -180,12 +181,48 @@ protected void doRun() throws IOException { private boolean applyPipelines(Task task, BulkRequest bulkRequest, Executor executor, ActionListener listener) throws IOException { boolean hasIndexRequestsWithPipelines = false; - final Metadata metadata = clusterService.state().getMetadata(); - Map templateSubstitutions = bulkRequest.getComponentTemplateSubstitutions(); + final Metadata metadata; + Map componentTemplateSubstitutions = bulkRequest.getComponentTemplateSubstitutions(); + if (bulkRequest.isSimulated() && componentTemplateSubstitutions.isEmpty() == false) { + /* + * If this is a simulated request, and there are template substitutions, then we want to create and use a new metadata that has + * those templates. That is, we want to add the new templates (which will replace any that already existed with the same name), + * and remove the indices and data streams that are referred to from the bulkRequest so that we get settings from the templates + * rather than from the indices/data streams. + */ + Metadata.Builder simulatedMetadataBuilder = Metadata.builder(clusterService.state().getMetadata()); + if (componentTemplateSubstitutions.isEmpty() == false) { + Map updatedComponentTemplates = new HashMap<>(); + updatedComponentTemplates.putAll(clusterService.state().metadata().componentTemplates()); + updatedComponentTemplates.putAll(componentTemplateSubstitutions); + simulatedMetadataBuilder.componentTemplates(updatedComponentTemplates); + } + /* + * We now remove the index from the simulated metadata to force the templates to be used. Note that simulated requests are + * always index requests -- no other type of request is supported. + */ + for (DocWriteRequest actionRequest : bulkRequest.requests) { + assert actionRequest != null : "Requests cannot be null in simulate mode"; + assert actionRequest instanceof IndexRequest + : "Only IndexRequests are supported in simulate mode, but got " + actionRequest.getClass(); + if (actionRequest != null) { + IndexRequest indexRequest = (IndexRequest) actionRequest; + String indexName = indexRequest.index(); + if (indexName != null) { + simulatedMetadataBuilder.remove(indexName); + simulatedMetadataBuilder.removeDataStream(indexName); + } + } + } + metadata = simulatedMetadataBuilder.build(); + } else { + metadata = clusterService.state().getMetadata(); + } + for (DocWriteRequest actionRequest : bulkRequest.requests) { IndexRequest indexRequest = getIndexWriteRequest(actionRequest); if (indexRequest != null) { - IngestService.resolvePipelinesAndUpdateIndexRequest(actionRequest, indexRequest, metadata, templateSubstitutions); + IngestService.resolvePipelinesAndUpdateIndexRequest(actionRequest, indexRequest, metadata); hasIndexRequestsWithPipelines |= IngestService.hasPipeline(indexRequest); } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java index c860c49809cb5..713116c4cf98e 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java @@ -51,6 +51,7 @@ import org.elasticsearch.xcontent.NamedXContentRegistry; import java.io.IOException; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -197,12 +198,33 @@ private Exception validateMappings(Map componentTempl * path for when the index does not exist). And it does not deal with system indices since we do not intend for users to * simulate writing to system indices. */ - // First, we remove the index from the cluster state if necessary (since we're going to use the templates) - ClusterState simulatedState = indexAbstraction == null - ? state - : new ClusterState.Builder(state).metadata(Metadata.builder(state.metadata()).remove(request.index()).build()).build(); + ClusterState.Builder simulatedClusterStateBuilder = new ClusterState.Builder(state); + Metadata.Builder simulatedMetadata = Metadata.builder(state.metadata()); + if (indexAbstraction != null) { + /* + * We remove the index or data stream from the cluster state so that we are forced to fall back to the templates to get + * mappings. + */ + String indexRequest = request.index(); + assert indexRequest != null : "Index requests cannot be null in a simulate bulk call"; + if (indexRequest != null) { + simulatedMetadata.remove(indexRequest); + simulatedMetadata.removeDataStream(indexRequest); + } + } + if (componentTemplateSubstitutions.isEmpty() == false) { + /* + * We put the template substitutions into the cluster state. If they have the same name as an existing one, the + * existing one is replaced. + */ + Map updatedComponentTemplates = new HashMap<>(); + updatedComponentTemplates.putAll(state.metadata().componentTemplates()); + updatedComponentTemplates.putAll(componentTemplateSubstitutions); + simulatedMetadata.componentTemplates(updatedComponentTemplates); + } + ClusterState simulatedState = simulatedClusterStateBuilder.metadata(simulatedMetadata).build(); - String matchingTemplate = findV2Template(state.metadata(), request.index(), false); + String matchingTemplate = findV2Template(simulatedState.metadata(), request.index(), false); if (matchingTemplate != null) { final Template template = TransportSimulateIndexTemplateAction.resolveTemplate( matchingTemplate, @@ -212,8 +234,7 @@ private Exception validateMappings(Map componentTempl xContentRegistry, indicesService, systemIndices, - indexSettingProviders, - componentTemplateSubstitutions + indexSettingProviders ); CompressedXContent mappings = template.mappings(); if (mappings != null) { @@ -247,7 +268,7 @@ private Exception validateMappings(Map componentTempl }); } } else { - List matchingTemplates = findV1Templates(state.metadata(), request.index(), false); + List matchingTemplates = findV1Templates(simulatedState.metadata(), request.index(), false); final Map mappingsMap = MetadataCreateIndexService.parseV1Mappings( "{}", matchingTemplates.stream().map(IndexTemplateMetadata::getMappings).collect(toList()), diff --git a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java index 7ee9be25b3d59..5457ca60d0da4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java @@ -10,6 +10,7 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.InnerHitBuilder; @@ -82,8 +83,15 @@ private void doRun() { CollapseBuilder innerCollapseBuilder = innerHitBuilder.getInnerCollapseBuilder(); SearchSourceBuilder sourceBuilder = buildExpandSearchSourceBuilder(innerHitBuilder, innerCollapseBuilder).query(groupQuery) .postFilter(searchRequest.source().postFilter()) - .runtimeMappings(searchRequest.source().runtimeMappings()); + .runtimeMappings(searchRequest.source().runtimeMappings()) + .pointInTimeBuilder(searchRequest.source().pointInTimeBuilder()); SearchRequest groupRequest = new SearchRequest(searchRequest); + if (searchRequest.pointInTimeBuilder() != null) { + // if the original request has a point in time, we propagate it to the inner search request + // and clear the indices and preference from the inner search request + groupRequest.indices(Strings.EMPTY_ARRAY); + groupRequest.preference(null); + } groupRequest.source(sourceBuilder); multiRequest.add(groupRequest); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index eb272a9302e85..83ee6c216ad49 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -14,13 +14,12 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RefCounted; @@ -383,39 +382,17 @@ public Clusters getClusters() { @Override public Iterator toXContentChunked(ToXContent.Params params) { assert hasReferences(); - return Iterators.concat( - ChunkedToXContentHelper.startObject(), - this.innerToXContentChunked(params), - ChunkedToXContentHelper.endObject() - ); + return ChunkedToXContent.builder(params).xContentObject(innerToXContentChunked(params)); } public Iterator innerToXContentChunked(ToXContent.Params params) { - return Iterators.concat( - ChunkedToXContentHelper.singleChunk(SearchResponse.this::headerToXContent), - Iterators.single(clusters), - Iterators.concat( - Iterators.flatMap(Iterators.single(hits), r -> r.toXContentChunked(params)), - Iterators.single((ToXContent) (b, p) -> { - if (aggregations != null) { - aggregations.toXContent(b, p); - } - return b; - }), - Iterators.single((b, p) -> { - if (suggest != null) { - suggest.toXContent(b, p); - } - return b; - }), - Iterators.single((b, p) -> { - if (profileResults != null) { - profileResults.toXContent(b, p); - } - return b; - }) - ) - ); + return ChunkedToXContent.builder(params) + .append(SearchResponse.this::headerToXContent) + .append(clusters) + .append(hits) + .appendIfPresent(aggregations) + .appendIfPresent(suggest) + .appendIfPresent(profileResults); } public XContentBuilder headerToXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/ChunkedBroadcastResponse.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/ChunkedBroadcastResponse.java index e07ad7bac4037..4b65af23c622a 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/ChunkedBroadcastResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/ChunkedBroadcastResponse.java @@ -9,8 +9,8 @@ package org.elasticsearch.action.support.broadcast; import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.xcontent.ToXContent; @@ -35,11 +35,8 @@ public ChunkedBroadcastResponse( @Override public final Iterator toXContentChunked(ToXContent.Params params) { - return Iterators.concat(Iterators.single((b, p) -> { - b.startObject(); - RestActions.buildBroadcastShardsHeader(b, p, this); - return b; - }), customXContentChunks(params), Iterators.single((builder, p) -> builder.endObject())); + return ChunkedToXContent.builder(params) + .object(ob -> ob.append((b, p) -> RestActions.buildBroadcastShardsHeader(b, p, this)).append(this::customXContentChunks)); } protected abstract Iterator customXContentChunks(ToXContent.Params params); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java b/server/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java index e07e0608c1383..845303abe6baf 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java @@ -16,6 +16,7 @@ import java.io.IOException; import java.net.SocketPermission; import java.net.URL; +import java.security.AllPermission; import java.security.CodeSource; import java.security.Permission; import java.security.PermissionCollection; @@ -39,6 +40,7 @@ final class ESPolicy extends Policy { static final String UNTRUSTED_RESOURCE = "untrusted.policy"; private static final String ALL_FILE_MASK = "read,readlink,write,delete,execute"; + private static final AllPermission ALL_PERMISSION = new AllPermission(); final Policy template; final Policy untrusted; @@ -124,7 +126,7 @@ public boolean implies(ProtectionDomain domain, Permission permission) { * It's helpful to use the infrastructure around FilePermission here to do the directory structure check with implies * so we use ALL_FILE_MASK mask to check if we can do something with this file, whatever the actual operation we're requesting */ - return canAccessSecuredFile(location, new FilePermission(permission.getName(), ALL_FILE_MASK)); + return canAccessSecuredFile(domain, new FilePermission(permission.getName(), ALL_FILE_MASK)); } if (location != null) { @@ -157,15 +159,24 @@ public boolean implies(ProtectionDomain domain, Permission permission) { } @SuppressForbidden(reason = "We get given an URL by the security infrastructure") - private boolean canAccessSecuredFile(URL location, FilePermission permission) { - if (location == null) { + private boolean canAccessSecuredFile(ProtectionDomain domain, FilePermission permission) { + if (domain == null || domain.getCodeSource() == null || domain.getCodeSource().getLocation() == null) { return false; } + // If the domain in question has AllPermission - only true of sources built into the JDK, as we prevent AllPermission from being + // configured in Elasticsearch - then it has access to this file. + + if (system.implies(domain, ALL_PERMISSION)) { + return true; + } + URL location = domain.getCodeSource().getLocation(); + // check the source Set accessibleSources = securedFiles.get(permission); if (accessibleSources != null) { // simple case - single-file referenced directly + return accessibleSources.contains(location); } else { // there's a directory reference in there somewhere diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Security.java b/server/src/main/java/org/elasticsearch/bootstrap/Security.java index f22413f9abd12..dc6de9a6b2c91 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -47,6 +47,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.function.Consumer; @@ -236,17 +237,25 @@ private static Map> readSecuredConfigFiles( for (Map.Entry> ps : settingPatterns) { if (ps.getKey().matcher(setting).matches()) { // add the setting value to the secured files for these codebase URLs - Path file = environment.configFile().resolve(environment.settings().get(setting)); - if (file.startsWith(environment.configFile()) == false) { - throw new IllegalStateException(ps.getValue() + " tried to grant access to file outside config directory " + file); - } - if (logger.isDebugEnabled()) { - ps.getValue() - .forEach( - url -> logger.debug("Jar {} securing access to config file {} through setting {}", url, file, setting) + String settingValue = environment.settings().get(setting); + // Some settings can also be an HTTPS URL in addition to a file path; if that's the case just skip this one. + // If the setting shouldn't be an HTTPS URL, that'll be caught by that setting's validation later in the process. + // HTTP (no S) URLs are not supported. + if (settingValue.toLowerCase(Locale.ROOT).startsWith("https://") == false) { + Path file = environment.configFile().resolve(settingValue); + if (file.startsWith(environment.configFile()) == false) { + throw new IllegalStateException( + ps.getValue() + " tried to grant access to file outside config directory " + file ); + } + if (logger.isDebugEnabled()) { + ps.getValue() + .forEach( + url -> logger.debug("Jar {} securing access to config file {} through setting {}", url, file, setting) + ); + } + securedConfigFiles.computeIfAbsent(file.toString(), k -> new HashSet<>()).addAll(ps.getValue()); } - securedConfigFiles.computeIfAbsent(file.toString(), k -> new HashSet<>()).addAll(ps.getValue()); } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplateMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplateMetadata.java index fda66e230c52a..1151a99a24403 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplateMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplateMetadata.java @@ -17,7 +17,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -102,8 +102,8 @@ public static ComponentTemplateMetadata fromXContent(XContentParser parser) thro } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return ChunkedToXContentHelper.xContentValuesMap(COMPONENT_TEMPLATE.getPreferredName(), componentTemplates); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).xContentObjectFields(COMPONENT_TEMPLATE.getPreferredName(), componentTemplates); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateMetadata.java index c786d9c6ea71c..e798b0f6add4f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateMetadata.java @@ -17,7 +17,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -103,8 +103,8 @@ public void writeTo(StreamOutput out) throws IOException { } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return ChunkedToXContentHelper.xContentValuesMap(INDEX_TEMPLATE.getPreferredName(), indexTemplates); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).xContentObjectFields(INDEX_TEMPLATE.getPreferredName(), indexTemplates); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java index 44f3101395b88..3f5e7a2e0c4aa 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java @@ -233,7 +233,7 @@ public static DataStreamMetadata fromXContent(XContentParser parser) throws IOEx @Override public Iterator toXContentChunked(ToXContent.Params params) { return ChunkedToXContent.builder(params) - .object(DATA_STREAM.getPreferredName(), b -> b.appendXContentFields(dataStreams)) + .xContentObjectFields(DATA_STREAM.getPreferredName(), dataStreams) .xContentObject(DATA_STREAM_ALIASES.getPreferredName(), dataStreamAliases.values().iterator()); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 4cdf1508a7987..f43f1c6b05a15 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -665,7 +665,6 @@ private ClusterState applyCreateIndexRequestWithV2Template( request.mappings(), currentState, templateName, - Map.of(), xContentRegistry, request.index() ); @@ -824,7 +823,6 @@ private static List collectSystemV2Mappings( List templateMappings = MetadataIndexTemplateService.collectMappings( composableIndexTemplate, componentTemplates, - Map.of(), indexName ); return collectV2Mappings(null, templateMappings, xContentRegistry); @@ -834,16 +832,10 @@ public static List collectV2Mappings( @Nullable final String requestMappings, final ClusterState currentState, final String templateName, - Map componentTemplateSubstitutions, final NamedXContentRegistry xContentRegistry, final String indexName ) throws Exception { - List templateMappings = MetadataIndexTemplateService.collectMappings( - currentState, - templateName, - componentTemplateSubstitutions, - indexName - ); + List templateMappings = MetadataIndexTemplateService.collectMappings(currentState, templateName, indexName); return collectV2Mappings(requestMappings, templateMappings, xContentRegistry); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index 1f9f6f636c1cf..abeb3279b7b50 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -698,7 +698,7 @@ private void validateIndexTemplateV2(String name, ComposableIndexTemplate indexT final var now = Instant.now(); final var metadata = currentState.getMetadata(); - final var combinedMappings = collectMappings(indexTemplate, metadata.componentTemplates(), Map.of(), "tmp_idx"); + final var combinedMappings = collectMappings(indexTemplate, metadata.componentTemplates(), "tmp_idx"); final var combinedSettings = resolveSettings(indexTemplate, metadata.componentTemplates()); // First apply settings sourced from index setting providers: for (var provider : indexSettingProviders) { @@ -1341,12 +1341,7 @@ private static boolean isGlobalAndHasIndexHiddenSetting(Metadata metadata, Compo /** * Collect the given v2 template into an ordered list of mappings. */ - public static List collectMappings( - final ClusterState state, - final String templateName, - Map componentTemplateSubstitutions, - final String indexName - ) { + public static List collectMappings(final ClusterState state, final String templateName, final String indexName) { final ComposableIndexTemplate template = state.metadata().templatesV2().get(templateName); assert template != null : "attempted to resolve mappings for a template [" + templateName + "] that did not exist in the cluster state"; @@ -1355,7 +1350,7 @@ public static List collectMappings( } final Map componentTemplates = state.metadata().componentTemplates(); - return collectMappings(template, componentTemplates, componentTemplateSubstitutions, indexName); + return collectMappings(template, componentTemplates, indexName); } /** @@ -1364,7 +1359,6 @@ public static List collectMappings( public static List collectMappings( final ComposableIndexTemplate template, final Map componentTemplates, - final Map componentTemplateSubstitutions, final String indexName ) { Objects.requireNonNull(template, "Composable index template must be provided"); @@ -1375,12 +1369,9 @@ public static List collectMappings( ComposableIndexTemplate.DataStreamTemplate.DATA_STREAM_MAPPING_SNIPPET ); } - final Map combinedComponentTemplates = new HashMap<>(); - combinedComponentTemplates.putAll(componentTemplates); - combinedComponentTemplates.putAll(componentTemplateSubstitutions); List mappings = template.composedOf() .stream() - .map(combinedComponentTemplates::get) + .map(componentTemplates::get) .filter(Objects::nonNull) .map(ComponentTemplate::template) .map(Template::mappings) @@ -1716,7 +1707,7 @@ private static void validateCompositeTemplate( String indexName = DataStream.BACKING_INDEX_PREFIX + temporaryIndexName; // Parse mappings to ensure they are valid after being composed - List mappings = collectMappings(stateWithIndex, templateName, Map.of(), indexName); + List mappings = collectMappings(stateWithIndex, templateName, indexName); try { MapperService mapperService = tempIndexService.mapperService(); mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mappings, MapperService.MergeReason.INDEX_TEMPLATE); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/NodesShutdownMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/NodesShutdownMetadata.java index eb7e36a8e9c7d..272f321e386fc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/NodesShutdownMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/NodesShutdownMetadata.java @@ -17,7 +17,7 @@ import org.elasticsearch.cluster.SimpleDiffable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -190,8 +190,8 @@ public int hashCode() { } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return ChunkedToXContentHelper.xContentValuesMap(NODES_FIELD.getPreferredName(), nodes); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).xContentObjectFields(NODES_FIELD.getPreferredName(), nodes); } /** diff --git a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java index 4b0a0c5e77ebb..b059113b4098c 100644 --- a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java +++ b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java @@ -80,6 +80,7 @@ public enum ReferenceDocs { FLOOD_STAGE_WATERMARK, X_OPAQUE_ID, FORMING_SINGLE_NODE_CLUSTERS, + CIRCUIT_BREAKER_ERRORS, // this comment keeps the ';' on the next line so every entry above has a trailing ',' which makes the diff for adding new links cleaner ; diff --git a/server/src/main/java/org/elasticsearch/common/breaker/ChildMemoryCircuitBreaker.java b/server/src/main/java/org/elasticsearch/common/breaker/ChildMemoryCircuitBreaker.java index 6d8510d27f27a..9669e78a119b9 100644 --- a/server/src/main/java/org/elasticsearch/common/breaker/ChildMemoryCircuitBreaker.java +++ b/server/src/main/java/org/elasticsearch/common/breaker/ChildMemoryCircuitBreaker.java @@ -10,6 +10,7 @@ package org.elasticsearch.common.breaker; import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.indices.breaker.BreakerSettings; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; @@ -87,7 +88,8 @@ public void circuitBreak(String fieldName, long bytesNeeded) { + memoryBytesLimit + "/" + ByteSizeValue.ofBytes(memoryBytesLimit) - + "]"; + + "]; for more information, see " + + ReferenceDocs.CIRCUIT_BREAKER_ERRORS; logger.debug(() -> format("%s", message)); throw new CircuitBreakingException(message, bytesNeeded, memoryBytesLimit, durability); } diff --git a/server/src/main/java/org/elasticsearch/common/util/AbstractHash.java b/server/src/main/java/org/elasticsearch/common/util/AbstractHash.java index e975f138e8b7d..687e9cb3fd9dc 100644 --- a/server/src/main/java/org/elasticsearch/common/util/AbstractHash.java +++ b/server/src/main/java/org/elasticsearch/common/util/AbstractHash.java @@ -32,7 +32,17 @@ public long id(long index) { return ids.get(index) - 1; } - protected final long id(long index, long id) { + /** + * Set the id provided key at 0 <= index <= capacity() . + */ + protected final void setId(long index, long id) { + ids.set(index, id + 1); + } + + /** + * Set the id provided key at 0 <= index <= capacity() and get the previous value or -1 if this slot is unused. + */ + protected final long getAndSetId(long index, long id) { return ids.getAndSet(index, id + 1) - 1; } diff --git a/server/src/main/java/org/elasticsearch/common/util/ArrayUtils.java b/server/src/main/java/org/elasticsearch/common/util/ArrayUtils.java index 96b694e04bd5e..be40bf16e20e4 100644 --- a/server/src/main/java/org/elasticsearch/common/util/ArrayUtils.java +++ b/server/src/main/java/org/elasticsearch/common/util/ArrayUtils.java @@ -126,4 +126,20 @@ public static void reverseSubArray(long[] array, int offset, int length) { end--; } } + + /** + * Reverse the {@code length} values on the array starting from {@code offset}. + */ + public static void reverseArray(byte[] array, int offset, int length) { + int start = offset; + int end = offset + length; + while (start < end) { + final byte temp = array[start]; + array[start] = array[end - 1]; + array[end - 1] = temp; + start++; + end--; + } + } + } diff --git a/server/src/main/java/org/elasticsearch/common/util/BigArrays.java b/server/src/main/java/org/elasticsearch/common/util/BigArrays.java index a33ee4c2edeac..4f0aae9380a01 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BigArrays.java +++ b/server/src/main/java/org/elasticsearch/common/util/BigArrays.java @@ -452,7 +452,13 @@ public T get(long index) { } @Override - public T set(long index, T value) { + public void set(long index, T value) { + assert index >= 0 && index < size(); + array[(int) index] = value; + } + + @Override + public T getAndSet(long index, T value) { assert index >= 0 && index < size(); @SuppressWarnings("unchecked") T ret = (T) array[(int) index]; diff --git a/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java b/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java index 019ef341de8dc..95707b64b9a1e 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java @@ -46,7 +46,15 @@ public T get(long index) { } @Override - public T set(long index, T value) { + public void set(long index, T value) { + final int pageIndex = pageIndex(index); + final int indexInPage = indexInPage(index); + final Object[] page = pages[pageIndex]; + page[indexInPage] = value; + } + + @Override + public T getAndSet(long index, T value) { final int pageIndex = pageIndex(index); final int indexInPage = indexInPage(index); final Object[] page = pages[pageIndex]; diff --git a/server/src/main/java/org/elasticsearch/common/util/BytesRefHash.java b/server/src/main/java/org/elasticsearch/common/util/BytesRefHash.java index 48a810789308f..208d29edad71d 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BytesRefHash.java +++ b/server/src/main/java/org/elasticsearch/common/util/BytesRefHash.java @@ -169,7 +169,7 @@ private long set(BytesRef key, int code, long id) { for (long index = slot;; index = nextSlot(index, mask)) { final long curId = id(index); if (curId == -1) { // means unset - id(index, id); + setId(index, id); append(id, key, code); ++size; return id; @@ -197,7 +197,7 @@ private void reset(int code, long id) { for (long index = slot;; index = nextSlot(index, mask)) { final long curId = id(index); if (curId == -1) { // means unset - id(index, id); + setId(index, id); break; } } @@ -223,7 +223,7 @@ public long add(BytesRef key) { @Override protected void removeAndAdd(long index) { - final long id = id(index, -1); + final long id = getAndSetId(index, -1); assert id >= 0; final int code = hashes.get(id); reset(code, id); diff --git a/server/src/main/java/org/elasticsearch/common/util/Int3Hash.java b/server/src/main/java/org/elasticsearch/common/util/Int3Hash.java index f2a4288bf7c9b..dc49b39a031a1 100644 --- a/server/src/main/java/org/elasticsearch/common/util/Int3Hash.java +++ b/server/src/main/java/org/elasticsearch/common/util/Int3Hash.java @@ -79,7 +79,7 @@ private long set(int key1, int key2, int key3, long id) { while (true) { final long curId = id(index); if (curId == -1) { // means unset - id(index, id); + setId(index, id); append(id, key1, key2, key3); ++size; return id; @@ -106,7 +106,7 @@ private void reset(int key1, int key2, int key3, long id) { while (true) { final long curId = id(index); if (curId == -1) { // means unset - id(index, id); + setId(index, id); append(id, key1, key2, key3); break; } @@ -130,7 +130,7 @@ public long add(int key1, int key2, int key3) { @Override protected void removeAndAdd(long index) { - final long id = id(index, -1); + final long id = getAndSetId(index, -1); assert id >= 0; long keyOffset = id * 3; final int key1 = keys.getAndSet(keyOffset, 0); diff --git a/server/src/main/java/org/elasticsearch/common/util/LocaleUtils.java b/server/src/main/java/org/elasticsearch/common/util/LocaleUtils.java index 86e82886ed263..bdbf843ad2b53 100644 --- a/server/src/main/java/org/elasticsearch/common/util/LocaleUtils.java +++ b/server/src/main/java/org/elasticsearch/common/util/LocaleUtils.java @@ -68,16 +68,16 @@ private static Locale parseParts(String[] parts) { switch (parts.length) { case 3: // lang, country, variant - return new Locale(parts[0], parts[1], parts[2]); + return Locale.of(parts[0], parts[1], parts[2]); case 2: // lang, country - return new Locale(parts[0], parts[1]); + return Locale.of(parts[0], parts[1]); case 1: if ("ROOT".equalsIgnoreCase(parts[0])) { return Locale.ROOT; } // lang - return new Locale(parts[0]); + return Locale.of(parts[0]); default: throw new IllegalArgumentException( "Locales can have at most 3 parts but got " + parts.length + ": " + Arrays.asList(parts) diff --git a/server/src/main/java/org/elasticsearch/common/util/LongHash.java b/server/src/main/java/org/elasticsearch/common/util/LongHash.java index 4de6772d22447..0c681063c50b0 100644 --- a/server/src/main/java/org/elasticsearch/common/util/LongHash.java +++ b/server/src/main/java/org/elasticsearch/common/util/LongHash.java @@ -67,7 +67,7 @@ private long set(long key, long id) { for (long index = slot;; index = nextSlot(index, mask)) { final long curId = id(index); if (curId == -1) { // means unset - id(index, id); + setId(index, id); append(id, key); ++size; return id; @@ -82,13 +82,13 @@ private void append(long id, long key) { keys.set(id, key); } - private void reset(long key, long id) { + private void reset(long id) { + final long key = keys.get(id); final long slot = slot(hash(key), mask); for (long index = slot;; index = nextSlot(index, mask)) { final long curId = id(index); if (curId == -1) { // means unset - id(index, id); - append(id, key); + setId(index, id); break; } } @@ -109,10 +109,9 @@ public long add(long key) { @Override protected void removeAndAdd(long index) { - final long id = id(index, -1); + final long id = getAndSetId(index, -1); assert id >= 0; - final long key = keys.getAndSet(id, 0); - reset(key, id); + reset(id); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/util/LongLongHash.java b/server/src/main/java/org/elasticsearch/common/util/LongLongHash.java index f160ecdaa7079..f7708af59dde2 100644 --- a/server/src/main/java/org/elasticsearch/common/util/LongLongHash.java +++ b/server/src/main/java/org/elasticsearch/common/util/LongLongHash.java @@ -84,7 +84,7 @@ private long set(long key1, long key2, long id) { for (long index = slot;; index = nextSlot(index, mask)) { final long curId = id(index); if (curId == -1) { // means unset - id(index, id); + setId(index, id); append(id, key1, key2); ++size; return id; @@ -104,13 +104,16 @@ private void append(long id, long key1, long key2) { keys.set(keyOffset + 1, key2); } - private void reset(long key1, long key2, long id) { + private void reset(long id) { + final LongArray keys = this.keys; + final long keyOffset = id * 2; + final long key1 = keys.get(keyOffset); + final long key2 = keys.get(keyOffset + 1); final long slot = slot(hash(key1, key2), mask); for (long index = slot;; index = nextSlot(index, mask)) { final long curId = id(index); if (curId == -1) { // means unset - id(index, id); - append(id, key1, key2); + setId(index, id); break; } } @@ -132,12 +135,9 @@ public long add(long key1, long key2) { @Override protected void removeAndAdd(long index) { - final long id = id(index, -1); + final long id = getAndSetId(index, -1); assert id >= 0; - long keyOffset = id * 2; - final long key1 = keys.getAndSet(keyOffset, 0); - final long key2 = keys.getAndSet(keyOffset + 1, 0); - reset(key1, key2, id); + reset(id); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java b/server/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java index 8ef3b568b0396..d955863caa091 100644 --- a/server/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java +++ b/server/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java @@ -77,7 +77,7 @@ public T put(long key, T value) { */ public T remove(long key) { for (long i = slot(hash(key), mask);; i = nextSlot(i, mask)) { - final T previous = values.set(i, null); + final T previous = values.getAndSet(i, null); if (previous == null) { return null; } else if (keys.get(i) == key) { @@ -98,7 +98,7 @@ private T set(long key, T value) { throw new IllegalArgumentException("Null values are not supported"); } for (long i = slot(hash(key), mask);; i = nextSlot(i, mask)) { - final T previous = values.set(i, value); + final T previous = values.getAndSet(i, value); if (previous == null) { // slot was free keys.set(i, key); @@ -180,7 +180,7 @@ protected boolean used(long bucket) { @Override protected void removeAndAdd(long index) { final long key = keys.get(index); - final T value = values.set(index, null); + final T value = values.getAndSet(index, null); --size; final T removed = set(key, value); assert removed == null; diff --git a/server/src/main/java/org/elasticsearch/common/util/ObjectArray.java b/server/src/main/java/org/elasticsearch/common/util/ObjectArray.java index 24b010eb62aad..034b7b3c85692 100644 --- a/server/src/main/java/org/elasticsearch/common/util/ObjectArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/ObjectArray.java @@ -19,9 +19,14 @@ public interface ObjectArray extends BigArray { */ T get(long index); + /** + * Set a value at the given index. + */ + void set(long index, T value); + /** * Set a value at the given index and return the previous value. */ - T set(long index, T value); + T getAndSet(long index, T value); } diff --git a/server/src/main/java/org/elasticsearch/common/util/ObjectObjectPagedHashMap.java b/server/src/main/java/org/elasticsearch/common/util/ObjectObjectPagedHashMap.java index 58722c18b2434..298f910d65a9f 100644 --- a/server/src/main/java/org/elasticsearch/common/util/ObjectObjectPagedHashMap.java +++ b/server/src/main/java/org/elasticsearch/common/util/ObjectObjectPagedHashMap.java @@ -82,7 +82,7 @@ public V put(K key, V value) { public V remove(K key) { final long slot = slot(key.hashCode(), mask); for (long index = slot;; index = nextSlot(index, mask)) { - final V previous = values.set(index, null); + final V previous = values.getAndSet(index, null); if (previous == null) { return null; } else if (keys.get(index).equals(key)) { @@ -104,7 +104,7 @@ private V set(K key, int code, V value) { assert size < maxSize; final long slot = slot(code, mask); for (long index = slot;; index = nextSlot(index, mask)) { - final V previous = values.set(index, value); + final V previous = values.getAndSet(index, value); if (previous == null) { // slot was free keys.set(index, key); @@ -186,7 +186,7 @@ protected boolean used(long bucket) { @Override protected void removeAndAdd(long index) { final K key = keys.get(index); - final V value = values.set(index, null); + final V value = values.getAndSet(index, null); --size; final V removed = set(key, key.hashCode(), value); assert removed == null; diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java index ce0c2bd30f160..0868a7fa303ae 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java @@ -110,6 +110,46 @@ public ChunkedToXContentBuilder xContentObject(String name, Iterator map) { + startObject(); + map.forEach(this::field); + endObject(); + return this; + } + + /** + * Creates an object named {@code name}, with the contents of each field created from each entry in {@code map} + */ + public ChunkedToXContentBuilder xContentObjectFields(String name, Map map) { + startObject(name); + map.forEach(this::field); + endObject(); + return this; + } + + /** + * Creates an object with the contents of each field each another object created from each entry in {@code map} + */ + public ChunkedToXContentBuilder xContentObjectFieldObjects(Map map) { + startObject(); + map.forEach(this::xContentObject); + endObject(); + return this; + } + + /** + * Creates an object named {@code name}, with the contents of each field each another object created from each entry in {@code map} + */ + public ChunkedToXContentBuilder xContentObjectFieldObjects(String name, Map map) { + startObject(name); + map.forEach(this::xContentObject); + endObject(); + return this; + } + /** * Creates an object, with the contents set by {@code contents} */ @@ -172,6 +212,26 @@ public ChunkedToXContentBuilder object(String name, Iterator items, Funct return this; } + /** + * Creates an object with the contents of each field set by {@code map} + */ + public ChunkedToXContentBuilder object(Map map) { + startObject(); + map.forEach(this::field); + endObject(); + return this; + } + + /** + * Creates an object named {@code name}, with the contents of each field set by {@code map} + */ + public ChunkedToXContentBuilder object(String name, Map map) { + startObject(name); + map.forEach(this::field); + endObject(); + return this; + } + private void startArray() { addChunk((b, p) -> b.startArray()); } @@ -223,7 +283,7 @@ public ChunkedToXContentBuilder array(String name, Iterator items, BiCons /** * Creates an array named {@code name}, with the contents set by appending together the contents of {@code items} */ - public ChunkedToXContentBuilder array(String name, Iterator items) { + public ChunkedToXContentBuilder array(String name, Iterator items) { startArray(name); items.forEachRemaining(this::append); endArray(); @@ -246,16 +306,51 @@ public ChunkedToXContentBuilder field(String name) { return this; } + public ChunkedToXContentBuilder field(String name, boolean value) { + addChunk((b, p) -> b.field(name, value)); + return this; + } + + public ChunkedToXContentBuilder field(String name, Boolean value) { + addChunk((b, p) -> b.field(name, value)); + return this; + } + + public ChunkedToXContentBuilder field(String name, int value) { + addChunk((b, p) -> b.field(name, value)); + return this; + } + + public ChunkedToXContentBuilder field(String name, Integer value) { + addChunk((b, p) -> b.field(name, value)); + return this; + } + public ChunkedToXContentBuilder field(String name, long value) { addChunk((b, p) -> b.field(name, value)); return this; } + public ChunkedToXContentBuilder field(String name, Long value) { + addChunk((b, p) -> b.field(name, value)); + return this; + } + public ChunkedToXContentBuilder field(String name, String value) { addChunk((b, p) -> b.field(name, value)); return this; } + public ChunkedToXContentBuilder field(String name, Enum value) { + addChunk((b, p) -> b.field(name, value)); + return this; + } + + public ChunkedToXContentBuilder field(String name, ToXContent value) { + addChunk((b, p) -> b.field(name, value, p)); + return this; + } + public ChunkedToXContentBuilder field(String name, Object value) { addChunk((b, p) -> b.field(name, value)); return this; @@ -289,30 +384,6 @@ public ChunkedToXContentBuilder forEach(Iterator items, Function - * Note that any {@link ToXContent} objects in {@code map} will be passed an empty {@link ToXContent.Params}, - * rather than the {@code params} given to this builder in the constructor. - */ - public ChunkedToXContentBuilder appendEntries(Map map) { - return forEach(map.entrySet().iterator(), (b, e) -> b.field(e.getKey(), e.getValue())); - } - - /** - * Each value in {@code map} is added to the builder as a separate object, named by its key. - */ - public ChunkedToXContentBuilder appendXContentObjects(Map map) { - return forEach(map.entrySet().iterator(), (b, e) -> b.xContentObject(e.getKey(), e.getValue())); - } - - /** - * Each value in {@code map} is added to the builder as a separate field, named by its key. - */ - public ChunkedToXContentBuilder appendXContentFields(Map map) { - return forEach(map.entrySet().iterator(), (b, e) -> b.field(e.getKey()).append(e.getValue())); - } - public ChunkedToXContentBuilder append(ToXContent xContent) { if (xContent != ToXContent.EMPTY) { addChunk(xContent); diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java index 8755139ad84b7..940d4495ae909 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java @@ -13,8 +13,6 @@ import org.elasticsearch.xcontent.ToXContent; import java.util.Iterator; -import java.util.Map; -import java.util.function.Function; public enum ChunkedToXContentHelper { ; @@ -43,36 +41,6 @@ public static Iterator endArray() { return Iterators.single(((builder, params) -> builder.endArray())); } - public static Iterator map(String name, Map map) { - return map(name, map, entry -> (ToXContent) (builder, params) -> builder.field(entry.getKey(), entry.getValue())); - } - - public static Iterator xContentFragmentValuesMap(String name, Map map) { - return map( - name, - map, - entry -> (ToXContent) (builder, params) -> entry.getValue().toXContent(builder.startObject(entry.getKey()), params).endObject() - ); - } - - public static Iterator xContentValuesMap(String name, Map map) { - return map( - name, - map, - entry -> (ToXContent) (builder, params) -> entry.getValue().toXContent(builder.field(entry.getKey()), params) - ); - } - - /** - * Like xContentFragmentValuesMap, but allows the underlying XContent object to define its own "name" with startObject(string) - * and endObject, rather than assuming that the key in the map should be the name in the XContent output. - * @param name name to use in the XContent for the outer object wrapping the map being rendered to XContent - * @param map map being rendered to XContent - */ - public static Iterator xContentFragmentValuesMapCreateOwnName(String name, Map map) { - return map(name, map, entry -> (ToXContent) (builder, params) -> entry.getValue().toXContent(builder, params)); - } - public static Iterator field(String name, boolean value) { return Iterators.single(((builder, params) -> builder.field(name, value))); } @@ -101,30 +69,10 @@ public static Iterator array(String name, Iterator array(String name, Iterator contents, ToXContent.Params params) { - return Iterators.concat( - ChunkedToXContentHelper.startArray(name), - Iterators.flatMap(contents, c -> c.toXContentChunked(params)), - ChunkedToXContentHelper.endArray() - ); - } - public static Iterator wrapWithObject(String name, Iterator iterator) { return Iterators.concat(startObject(name), iterator, endObject()); } - public static Iterator map(String name, Map map, Function, ToXContent> toXContent) { - return wrapWithObject(name, Iterators.map(map.entrySet().iterator(), toXContent)); - } - /** * Creates an Iterator of a single ToXContent object that serializes the given object as a single chunk. Just wraps {@link * Iterators#single}, but still useful because it avoids any type ambiguity. diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java index f3d758f4fc8b7..ae372ea8194bc 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java @@ -67,7 +67,7 @@ public Elasticsearch814Codec() { */ public Elasticsearch814Codec(Zstd814StoredFieldsFormat.Mode mode) { super("Elasticsearch814", lucene99Codec); - this.storedFieldsFormat = new Zstd814StoredFieldsFormat(mode); + this.storedFieldsFormat = mode.getFormat(); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java index 00711c7ecc306..27ff19a9d8e40 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java @@ -28,9 +28,13 @@ */ public class Elasticsearch816Codec extends CodecService.DeduplicateFieldInfosCodec { + private static final Lucene912Codec LUCENE_912_CODEC = new Lucene912Codec(); + private static final PostingsFormat defaultPostingsFormat = new Lucene912PostingsFormat(); + private static final DocValuesFormat defaultDVFormat = new Lucene90DocValuesFormat(); + private static final KnnVectorsFormat defaultKnnVectorsFormat = new Lucene99HnswVectorsFormat(); + private final StoredFieldsFormat storedFieldsFormat; - private final PostingsFormat defaultPostingsFormat; private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() { @Override public PostingsFormat getPostingsFormatForField(String field) { @@ -38,7 +42,6 @@ public PostingsFormat getPostingsFormatForField(String field) { } }; - private final DocValuesFormat defaultDVFormat; private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { @Override public DocValuesFormat getDocValuesFormatForField(String field) { @@ -46,7 +49,6 @@ public DocValuesFormat getDocValuesFormatForField(String field) { } }; - private final KnnVectorsFormat defaultKnnVectorsFormat; private final KnnVectorsFormat knnVectorsFormat = new PerFieldKnnVectorsFormat() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { @@ -64,11 +66,8 @@ public Elasticsearch816Codec() { * worse space-efficiency or vice-versa. */ public Elasticsearch816Codec(Zstd814StoredFieldsFormat.Mode mode) { - super("Elasticsearch816", new Lucene912Codec()); - this.storedFieldsFormat = new Zstd814StoredFieldsFormat(mode); - this.defaultPostingsFormat = new Lucene912PostingsFormat(); - this.defaultDVFormat = new Lucene90DocValuesFormat(); - this.defaultKnnVectorsFormat = new Lucene99HnswVectorsFormat(); + super("Elasticsearch816", LUCENE_912_CODEC); + this.storedFieldsFormat = mode.getFormat(); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java index 671931ac7154a..648913098ff0d 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java @@ -21,17 +21,10 @@ public class DocValuesForUtil { private static final int BITS_IN_FIVE_BYTES = 5 * Byte.SIZE; private static final int BITS_IN_SIX_BYTES = 6 * Byte.SIZE; private static final int BITS_IN_SEVEN_BYTES = 7 * Byte.SIZE; - private final int blockSize; - private final byte[] encoded; + private static final int blockSize = ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; + private final byte[] encoded = new byte[1024]; - public DocValuesForUtil() { - this(ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE); - } - - private DocValuesForUtil(int blockSize) { - this.blockSize = blockSize; - this.encoded = new byte[1024]; - } + public DocValuesForUtil() {} public static int roundBits(int bitsPerValue) { if (bitsPerValue > 24 && bitsPerValue <= 32) { @@ -74,7 +67,7 @@ private void encodeFiveSixOrSevenBytesPerValue(long[] in, int bitsPerValue, fina out.writeBytes(this.encoded, bytesPerValue * in.length); } - public void decode(int bitsPerValue, final DataInput in, long[] out) throws IOException { + public static void decode(int bitsPerValue, final DataInput in, long[] out) throws IOException { if (bitsPerValue <= 24) { ForUtil.decode(bitsPerValue, in, out); } else if (bitsPerValue <= 32) { @@ -88,7 +81,7 @@ public void decode(int bitsPerValue, final DataInput in, long[] out) throws IOEx } } - private void decodeFiveSixOrSevenBytesPerValue(int bitsPerValue, final DataInput in, long[] out) throws IOException { + private static void decodeFiveSixOrSevenBytesPerValue(int bitsPerValue, final DataInput in, long[] out) throws IOException { // NOTE: we expect multibyte values to be written "least significant byte" first int bytesPerValue = bitsPerValue / Byte.SIZE; long mask = (1L << bitsPerValue) - 1; diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoder.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoder.java index f152a0b0601a2..4e95ce34dc410 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoder.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoder.java @@ -275,7 +275,7 @@ void decodeOrdinals(DataInput in, long[] out, int bitsPerOrd) throws IOException Arrays.fill(out, runLen, out.length, v2); } else if (encoding == 2) { // bit-packed - forUtil.decode(bitsPerOrd, in, out); + DocValuesForUtil.decode(bitsPerOrd, in, out); } else if (encoding == 3) { // cycle encoding int cycleLength = (int) v1; @@ -299,7 +299,7 @@ void decode(DataInput in, long[] out) throws IOException { final int bitsPerValue = token >>> 3; if (bitsPerValue != 0) { - forUtil.decode(bitsPerValue, in, out); + DocValuesForUtil.decode(bitsPerValue, in, out); } else { Arrays.fill(out, 0L); } diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java index a887516e5e7cc..e3c2daddba80e 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java @@ -355,7 +355,7 @@ public TermsEnum termsEnum() throws IOException { } } - private abstract class BaseSortedSetDocValues extends SortedSetDocValues { + private abstract static class BaseSortedSetDocValues extends SortedSetDocValues { final SortedSetEntry entry; final IndexInput data; diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java index 7a8d09c02ba3b..b1e91ad75e9a2 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java @@ -36,7 +36,7 @@ public class ES813FlatVectorFormat extends KnnVectorsFormat { static final String NAME = "ES813FlatVectorFormat"; - private final FlatVectorsFormat format = new Lucene99FlatVectorsFormat(DefaultFlatVectorScorer.INSTANCE); + private static final FlatVectorsFormat format = new Lucene99FlatVectorsFormat(DefaultFlatVectorScorer.INSTANCE); /** * Sole constructor diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java index 4313aa40cf13e..4bf396e8d5ad1 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java @@ -49,6 +49,10 @@ public class ES814ScalarQuantizedVectorsFormat extends FlatVectorsFormat { private static final FlatVectorsFormat rawVectorFormat = new Lucene99FlatVectorsFormat(DefaultFlatVectorScorer.INSTANCE); + static final FlatVectorsScorer flatVectorScorer = new ESFlatVectorsScorer( + new ScalarQuantizedVectorScorer(DefaultFlatVectorScorer.INSTANCE) + ); + /** The minimum confidence interval */ private static final float MINIMUM_CONFIDENCE_INTERVAL = 0.9f; @@ -60,7 +64,6 @@ public class ES814ScalarQuantizedVectorsFormat extends FlatVectorsFormat { * calculated as `1-1/(vector_dimensions + 1)` */ public final Float confidenceInterval; - final FlatVectorsScorer flatVectorScorer; private final byte bits; private final boolean compress; @@ -83,7 +86,6 @@ public ES814ScalarQuantizedVectorsFormat(Float confidenceInterval, int bits, boo throw new IllegalArgumentException("bits must be one of: 4, 7, 8; bits=" + bits); } this.confidenceInterval = confidenceInterval; - this.flatVectorScorer = new ESFlatVectorsScorer(new ScalarQuantizedVectorScorer(DefaultFlatVectorScorer.INSTANCE)); this.bits = (byte) bits; this.compress = compress; } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormat.java index 2df0757a8b8ee..af771b6a27f19 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormat.java @@ -22,7 +22,7 @@ public class ES815BitFlatVectorFormat extends KnnVectorsFormat { static final String NAME = "ES815BitFlatVectorFormat"; - private final FlatVectorsFormat format = new ES815BitFlatVectorsFormat(); + private static final FlatVectorsFormat format = new ES815BitFlatVectorsFormat(); /** * Sole constructor diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java index f1ae4e3fdeded..5969c9d5db6d7 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java @@ -27,7 +27,7 @@ class ES815BitFlatVectorsFormat extends FlatVectorsFormat { - private final FlatVectorsFormat delegate = new Lucene99FlatVectorsFormat(FlatBitVectorScorer.INSTANCE); + private static final FlatVectorsFormat delegate = new Lucene99FlatVectorsFormat(FlatBitVectorScorer.INSTANCE); protected ES815BitFlatVectorsFormat() { super("ES815BitFlatVectorsFormat"); diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormat.java index 55271719a4574..5e4656ea94c5b 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormat.java @@ -30,7 +30,7 @@ public class ES815HnswBitVectorsFormat extends KnnVectorsFormat { private final int maxConn; private final int beamWidth; - private final FlatVectorsFormat flatVectorsFormat = new ES815BitFlatVectorsFormat(); + private static final FlatVectorsFormat flatVectorsFormat = new ES815BitFlatVectorsFormat(); public ES815HnswBitVectorsFormat() { this(16, 100); diff --git a/server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java index 84871b5c811dd..6aa77b7222696 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java @@ -52,17 +52,23 @@ public enum Mode { BEST_COMPRESSION(3, BEST_COMPRESSION_BLOCK_SIZE, 2048); final int level, blockSizeInBytes, blockDocCount; + final Zstd814StoredFieldsFormat format; Mode(int level, int blockSizeInBytes, int blockDocCount) { this.level = level; this.blockSizeInBytes = blockSizeInBytes; this.blockDocCount = blockDocCount; + this.format = new Zstd814StoredFieldsFormat(this); + } + + public Zstd814StoredFieldsFormat getFormat() { + return format; } } private final Mode mode; - public Zstd814StoredFieldsFormat(Mode mode) { + private Zstd814StoredFieldsFormat(Mode mode) { super("ZstdStoredFields814", new ZstdCompressionMode(mode.level), mode.blockSizeInBytes, mode.blockDocCount, 10); this.mode = mode; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DataStreamTimestampFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DataStreamTimestampFieldMapper.java index 651d9e76e84a2..481901f7c03ce 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DataStreamTimestampFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DataStreamTimestampFieldMapper.java @@ -165,10 +165,11 @@ public void doValidate(MappingLookup lookup) { Map configuredSettings = XContentHelper.convertToMap(BytesReference.bytes(builder), false, XContentType.JSON).v2(); configuredSettings = (Map) configuredSettings.values().iterator().next(); - // Only type, meta and format attributes are allowed: + // Only type, meta, format, and locale attributes are allowed: configuredSettings.remove("type"); configuredSettings.remove("meta"); configuredSettings.remove("format"); + configuredSettings.remove("locale"); // ignoring malformed values is disallowed (see previous check), // however if `index.mapping.ignore_malformed` has been set to true then diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 2e602033442c7..7be5ee2200b5c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -79,11 +79,16 @@ public final class DateFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "date"; public static final String DATE_NANOS_CONTENT_TYPE = "date_nanos"; - public static final DateFormatter DEFAULT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time||epoch_millis"); + public static final Locale DEFAULT_LOCALE = Locale.ENGLISH; + // although the locale doesn't affect the results, tests still check formatter equality, which does include locale + public static final DateFormatter DEFAULT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time||epoch_millis") + .withLocale(DEFAULT_LOCALE); public static final DateFormatter DEFAULT_DATE_TIME_NANOS_FORMATTER = DateFormatter.forPattern( "strict_date_optional_time_nanos||epoch_millis" - ); - private static final DateMathParser EPOCH_MILLIS_PARSER = DateFormatter.forPattern("epoch_millis").toDateMathParser(); + ).withLocale(DEFAULT_LOCALE); + private static final DateMathParser EPOCH_MILLIS_PARSER = DateFormatter.forPattern("epoch_millis") + .withLocale(DEFAULT_LOCALE) + .toDateMathParser(); public enum Resolution { MILLISECONDS(CONTENT_TYPE, NumericType.DATE, DateMillisDocValuesField::new) { @@ -232,7 +237,7 @@ public static final class Builder extends FieldMapper.Builder { private final Parameter locale = new Parameter<>( "locale", false, - () -> Locale.ROOT, + () -> DEFAULT_LOCALE, (n, c, o) -> LocaleUtils.parse(o.toString()), m -> toType(m).locale, (xContentBuilder, n, v) -> xContentBuilder.field(n, v.toString()), diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/DateScriptFieldType.java index e519fec09ce78..341944c3d687a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateScriptFieldType.java @@ -69,7 +69,7 @@ private static class Builder extends AbstractScriptFieldType.Builder o == null ? null : LocaleUtils.parse(o.toString()), RuntimeField.initializerNotSupported(), (b, n, v) -> { - if (v != null && false == v.equals(Locale.ROOT)) { + if (v != null && false == v.equals(DateFieldMapper.DEFAULT_LOCALE)) { b.field(n, v.toString()); } }, @@ -97,7 +97,7 @@ protected AbstractScriptFieldType createFieldType( OnScriptError onScriptError ) { String pattern = format.getValue() == null ? DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.pattern() : format.getValue(); - Locale locale = this.locale.getValue() == null ? Locale.ROOT : this.locale.getValue(); + Locale locale = this.locale.getValue() == null ? DateFieldMapper.DEFAULT_LOCALE : this.locale.getValue(); DateFormatter dateTimeFormatter = DateFormatter.forPattern(pattern, supportedVersion).withLocale(locale); return new DateScriptFieldType(name, factory, dateTimeFormatter, script, meta, onScriptError); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index c82621baa717a..19bd4f9980baf 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -135,7 +135,7 @@ private void internalParseDocument(MetadataFieldMapper[] metadataFieldsMappers, new IgnoredSourceFieldMapper.NameValue( MapperService.SINGLE_MAPPING_NAME, 0, - XContentDataHelper.encodeToken(context.parser()), + context.encodeFlattenedToken(), context.doc() ) ); @@ -236,7 +236,7 @@ private static List parseDocForMissingValues var leaf = fields.get(fullName); // There may be multiple matches for array elements, don't use #remove. if (leaf != null) { parser.nextToken(); // Advance the parser to the value to be read. - result.add(leaf.cloneWithValue(XContentDataHelper.encodeToken(parser))); + result.add(leaf.cloneWithValue(context.encodeFlattenedToken())); parser.nextToken(); // Skip the token ending the value. fieldName = null; } @@ -402,7 +402,7 @@ static void parseObjectOrNested(DocumentParserContext context) throws IOExceptio new IgnoredSourceFieldMapper.NameValue( context.parent().fullPath(), context.parent().fullPath().lastIndexOf(context.parent().leafName()), - XContentDataHelper.encodeToken(parser), + context.encodeFlattenedToken(), context.doc() ) ); @@ -421,20 +421,21 @@ static void parseObjectOrNested(DocumentParserContext context) throws IOExceptio throwOnConcreteValue(context.parent(), currentFieldName, context); } + if (context.canAddIgnoredField() && getSourceKeepMode(context, context.parent().sourceKeepMode()) == Mapper.SourceKeepMode.ALL) { + context = context.addIgnoredFieldFromContext( + new IgnoredSourceFieldMapper.NameValue( + context.parent().fullPath(), + context.parent().fullPath().lastIndexOf(context.parent().leafName()), + null, + context.doc() + ) + ); + token = context.parser().currentToken(); + parser = context.parser(); + } + if (context.parent().isNested()) { // Handle a nested object that doesn't contain an array. Arrays are handled in #parseNonDynamicArray. - if (context.parent().storeArraySource() && context.canAddIgnoredField()) { - context = context.addIgnoredFieldFromContext( - new IgnoredSourceFieldMapper.NameValue( - context.parent().fullPath(), - context.parent().fullPath().lastIndexOf(context.parent().leafName()), - null, - context.doc() - ) - ); - token = context.parser().currentToken(); - parser = context.parser(); - } context = context.createNestedContext((NestedObjectMapper) context.parent()); } @@ -651,12 +652,11 @@ private static void parseObjectDynamic(DocumentParserContext context, String cur if (context.dynamic() == ObjectMapper.Dynamic.FALSE) { failIfMatchesRoutingPath(context, currentFieldName); if (context.canAddIgnoredField()) { - // read everything up to end object and store it context.addIgnoredField( IgnoredSourceFieldMapper.NameValue.fromContext( context, context.path().pathAsText(currentFieldName), - XContentDataHelper.encodeToken(context.parser()) + context.encodeFlattenedToken() ) ); } else { @@ -742,7 +742,7 @@ private static void parseArrayDynamic(DocumentParserContext context, String curr IgnoredSourceFieldMapper.NameValue.fromContext( context, context.path().pathAsText(currentFieldName), - XContentDataHelper.encodeToken(context.parser()) + context.encodeFlattenedToken() ) ); } else { @@ -760,7 +760,7 @@ private static void parseArrayDynamic(DocumentParserContext context, String curr IgnoredSourceFieldMapper.NameValue.fromContext( context, context.path().pathAsText(currentFieldName), - XContentDataHelper.encodeToken(context.parser()) + context.encodeFlattenedToken() ) ); } catch (IOException e) { @@ -801,8 +801,8 @@ private static void parseNonDynamicArray( // Check if we need to record the array source. This only applies to synthetic source. if (context.canAddIgnoredField()) { boolean objectRequiresStoringSource = mapper instanceof ObjectMapper objectMapper - && (objectMapper.storeArraySource() - || (context.sourceKeepModeFromIndexSettings() == Mapper.SourceKeepMode.ARRAYS + && (getSourceKeepMode(context, objectMapper.sourceKeepMode()) == Mapper.SourceKeepMode.ALL + || (getSourceKeepMode(context, objectMapper.sourceKeepMode()) == Mapper.SourceKeepMode.ARRAYS && objectMapper instanceof NestedObjectMapper == false)); boolean fieldWithFallbackSyntheticSource = mapper instanceof FieldMapper fieldMapper && fieldMapper.syntheticSourceMode() == FieldMapper.SyntheticSourceMode.FALLBACK; @@ -817,17 +817,15 @@ private static void parseNonDynamicArray( } else if (mapper instanceof ObjectMapper objectMapper && (objectMapper.isEnabled() == false)) { // No need to call #addIgnoredFieldFromContext as both singleton and array instances of this object // get tracked through ignored source. - context.addIgnoredField( - IgnoredSourceFieldMapper.NameValue.fromContext(context, fullPath, XContentDataHelper.encodeToken(context.parser())) - ); + context.addIgnoredField(IgnoredSourceFieldMapper.NameValue.fromContext(context, fullPath, context.encodeFlattenedToken())); return; } } // In synthetic source, if any array element requires storing its source as-is, it takes precedence over // elements from regular source loading that are then skipped from the synthesized array source. - // To prevent this, we track each array name, to check if it contains any sub-arrays in its elements. - context = context.cloneForArray(fullPath); + // To prevent this, we track that parsing sub-context is within array scope. + context = context.maybeCloneForArray(mapper); XContentParser parser = context.parser(); XContentParser.Token token; @@ -933,7 +931,7 @@ private static void parseDynamicValue(DocumentParserContext context, String curr IgnoredSourceFieldMapper.NameValue.fromContext( context, context.path().pathAsText(currentFieldName), - XContentDataHelper.encodeToken(context.parser()) + context.encodeFlattenedToken() ) ); } @@ -944,7 +942,7 @@ private static void parseDynamicValue(DocumentParserContext context, String curr IgnoredSourceFieldMapper.NameValue.fromContext( context, context.path().pathAsText(currentFieldName), - XContentDataHelper.encodeToken(context.parser()) + context.encodeFlattenedToken() ) ); } @@ -1043,7 +1041,7 @@ protected void parseCreateField(DocumentParserContext context) { if (context.dynamic() == ObjectMapper.Dynamic.RUNTIME && context.canAddIgnoredField()) { try { context.addIgnoredField( - IgnoredSourceFieldMapper.NameValue.fromContext(context, path, XContentDataHelper.encodeToken(context.parser())) + IgnoredSourceFieldMapper.NameValue.fromContext(context, path, context.encodeFlattenedToken()) ); } catch (IOException e) { throw new IllegalArgumentException( @@ -1115,15 +1113,7 @@ protected SyntheticSourceSupport syntheticSourceSupport() { private static class NoOpObjectMapper extends ObjectMapper { NoOpObjectMapper(String name, String fullPath) { - super( - name, - fullPath, - Explicit.IMPLICIT_TRUE, - Optional.empty(), - Explicit.IMPLICIT_FALSE, - Dynamic.RUNTIME, - Collections.emptyMap() - ); + super(name, fullPath, Explicit.IMPLICIT_TRUE, Optional.empty(), Optional.empty(), Dynamic.RUNTIME, Collections.emptyMap()); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index c2970d8716147..eebe95e260dcf 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -12,6 +12,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.index.IndexableField; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexMode; @@ -110,7 +111,7 @@ public int get() { private final Set ignoredFields; private final List ignoredFieldValues; private final List ignoredFieldsMissingValues; - private String parentArrayField; + private boolean inArrayScope; private final Map> dynamicMappers; private final DynamicMapperSize dynamicMappersSize; @@ -142,7 +143,7 @@ private DocumentParserContext( Set ignoreFields, List ignoredFieldValues, List ignoredFieldsWithNoSource, - String parentArrayField, + boolean inArrayScope, Map> dynamicMappers, Map dynamicObjectMappers, Map> dynamicRuntimeFields, @@ -163,7 +164,7 @@ private DocumentParserContext( this.ignoredFields = ignoreFields; this.ignoredFieldValues = ignoredFieldValues; this.ignoredFieldsMissingValues = ignoredFieldsWithNoSource; - this.parentArrayField = parentArrayField; + this.inArrayScope = inArrayScope; this.dynamicMappers = dynamicMappers; this.dynamicObjectMappers = dynamicObjectMappers; this.dynamicRuntimeFields = dynamicRuntimeFields; @@ -187,7 +188,7 @@ private DocumentParserContext(ObjectMapper parent, ObjectMapper.Dynamic dynamic, in.ignoredFields, in.ignoredFieldValues, in.ignoredFieldsMissingValues, - in.parentArrayField, + in.inArrayScope, in.dynamicMappers, in.dynamicObjectMappers, in.dynamicRuntimeFields, @@ -218,7 +219,7 @@ protected DocumentParserContext( new HashSet<>(), new ArrayList<>(), new ArrayList<>(), - null, + false, new HashMap<>(), new HashMap<>(), new HashMap<>(), @@ -323,10 +324,7 @@ public final void deduplicateIgnoredFieldValues(final Set fullNames) { public final DocumentParserContext addIgnoredFieldFromContext(IgnoredSourceFieldMapper.NameValue ignoredFieldWithNoSource) throws IOException { if (canAddIgnoredField()) { - if (parentArrayField != null - && parent != null - && parentArrayField.equals(parent.fullPath()) - && parent instanceof NestedObjectMapper == false) { + if (inArrayScope) { // The field is an array within an array, store all sub-array elements. ignoredFieldsMissingValues.add(ignoredFieldWithNoSource); return cloneWithRecordedSource(); @@ -341,6 +339,20 @@ public final DocumentParserContext addIgnoredFieldFromContext(IgnoredSourceField return this; } + /** + * Wraps {@link XContentDataHelper#encodeToken}, disabling dot expansion from {@link DotExpandingXContentParser}. + * This helps avoid producing duplicate names in the same scope, due to expanding dots to objects. + * For instance: { "a.b": "b", "a.c": "c" } => { "a": { "b": "b" }, "a": { "c": "c" } } + * This can happen when storing parts of document source that are not indexed (e.g. disabled objects). + */ + BytesRef encodeFlattenedToken() throws IOException { + boolean old = path().isWithinLeafObject(); + path().setWithinLeafObject(true); + BytesRef encoded = XContentDataHelper.encodeToken(parser()); + path().setWithinLeafObject(old); + return encoded; + } + /** * Return the collection of fields that are missing their source values. */ @@ -349,14 +361,17 @@ public final Collection getIgnoredFieldsMiss } /** - * Clones the current context to mark it as an array. Records the full name of the array field, to check for sub-arrays. + * Clones the current context to mark it as an array, if it's not already marked, or restore it if it's within a nested object. * Applies to synthetic source only. */ - public final DocumentParserContext cloneForArray(String fullName) throws IOException { - if (canAddIgnoredField()) { - DocumentParserContext subcontext = switchParser(parser()); - subcontext.parentArrayField = fullName; - return subcontext; + public final DocumentParserContext maybeCloneForArray(Mapper mapper) throws IOException { + if (canAddIgnoredField() && mapper instanceof ObjectMapper) { + boolean isNested = mapper instanceof NestedObjectMapper; + if ((inArrayScope == false && isNested == false) || (inArrayScope && isNested)) { + DocumentParserContext subcontext = switchParser(parser()); + subcontext.inArrayScope = inArrayScope == false; + return subcontext; + } } return this; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java index d57edb757ba10..296c2c5311d9a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java @@ -57,6 +57,7 @@ public class IgnoredSourceFieldMapper extends MetadataFieldMapper { public static final TypeParser PARSER = new FixedTypeParser(context -> new IgnoredSourceFieldMapper(context.getIndexSettings())); static final NodeFeature TRACK_IGNORED_SOURCE = new NodeFeature("mapper.track_ignored_source"); + static final NodeFeature DONT_EXPAND_DOTS_IN_IGNORED_SOURCE = new NodeFeature("mapper.ignored_source.dont_expand_dots"); /* Setting to disable encoding and writing values for this field. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index cf8f391813c09..4f90bd6e6f2c9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -21,9 +21,15 @@ * Spec for mapper-related features. */ public class MapperFeatures implements FeatureSpecification { + + // Used to avoid noise in mixed cluster and rest compatibility tests. Must not be backported to 8.x branch. + // This label gets added to tests with such failures before merging with main, then removed when backported to 8.x. + public static final NodeFeature BWC_WORKAROUND_9_0 = new NodeFeature("mapper.bwc_workaround_9_0"); + @Override public Set getFeatures() { return Set.of( + BWC_WORKAROUND_9_0, IgnoredSourceFieldMapper.TRACK_IGNORED_SOURCE, PassThroughObjectMapper.PASS_THROUGH_PRIORITY, RangeFieldMapper.NULL_VALUES_OFF_BY_ONE_FIX, @@ -52,6 +58,6 @@ public Set getFeatures() { @Override public Set getTestFeatures() { - return Set.of(RangeFieldMapper.DATE_RANGE_INDEXING_FIX); + return Set.of(RangeFieldMapper.DATE_RANGE_INDEXING_FIX, IgnoredSourceFieldMapper.DONT_EXPAND_DOTS_IN_IGNORED_SOURCE); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java index fc5f28dd51c9d..d0e0dcb6b97ba 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java @@ -98,6 +98,17 @@ public NestedObjectMapper build(MapperBuilderContext context) { } else { nestedTypePath = fullPath; } + if (sourceKeepMode.orElse(SourceKeepMode.NONE) == SourceKeepMode.ARRAYS) { + throw new MapperException( + "parameter [ " + + Mapper.SYNTHETIC_SOURCE_KEEP_PARAM + + " ] can't be set to [" + + SourceKeepMode.ARRAYS + + "] for nested object [" + + fullPath + + "]" + ); + } final Query nestedTypeFilter = NestedPathFieldMapper.filter(indexCreatedVersion, nestedTypePath); NestedMapperBuilderContext nestedContext = new NestedMapperBuilderContext( context.buildFullName(leafName()), @@ -115,7 +126,7 @@ public NestedObjectMapper build(MapperBuilderContext context) { buildMappers(nestedContext), enabled, dynamic, - storeArraySource, + sourceKeepMode, includeInParent, includeInRoot, parentTypeFilter, @@ -213,7 +224,7 @@ public MapperBuilderContext createChildContext(String name, Dynamic dynamic) { Map mappers, Explicit enabled, ObjectMapper.Dynamic dynamic, - Explicit storeArraySource, + Optional sourceKeepMode, Explicit includeInParent, Explicit includeInRoot, Query parentTypeFilter, @@ -222,7 +233,7 @@ public MapperBuilderContext createChildContext(String name, Dynamic dynamic) { Function bitsetProducer, IndexSettings indexSettings ) { - super(name, fullPath, enabled, Optional.empty(), storeArraySource, dynamic, mappers); + super(name, fullPath, enabled, Optional.empty(), sourceKeepMode, dynamic, mappers); this.parentTypeFilter = parentTypeFilter; this.nestedTypePath = nestedTypePath; this.nestedTypeFilter = nestedTypeFilter; @@ -283,7 +294,7 @@ NestedObjectMapper withoutMappers() { Map.of(), enabled, dynamic, - storeArraySource, + sourceKeepMode, includeInParent, includeInRoot, parentTypeFilter, @@ -310,8 +321,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (isEnabled() != Defaults.ENABLED) { builder.field("enabled", enabled.value()); } - if (storeArraySource != Defaults.STORE_ARRAY_SOURCE) { - builder.field(STORE_ARRAY_SOURCE_PARAM, storeArraySource.value()); + if (sourceKeepMode.isPresent()) { + builder.field(Mapper.SYNTHETIC_SOURCE_KEEP_PARAM, sourceKeepMode.get()); } serializeMappers(builder, params); return builder.endObject(); @@ -359,7 +370,7 @@ public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContex mergeResult.mappers(), mergeResult.enabled(), mergeResult.dynamic(), - mergeResult.trackArraySource(), + mergeResult.sourceKeepMode(), incInParent, incInRoot, parentTypeFilter, @@ -393,8 +404,8 @@ protected MapperMergeContext createChildContext(MapperMergeContext mapperMergeCo @Override public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { - if (storeArraySource()) { - // IgnoredSourceFieldMapper integration takes care of writing the source for nested objects that enabled store_array_source. + if (sourceKeepMode.orElse(SourceKeepMode.NONE) == SourceKeepMode.ALL) { + // IgnoredSourceFieldMapper integration takes care of writing the source for the nested object. return SourceLoader.SyntheticFieldLoader.NOTHING; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 40019566adaa8..5e63fee8c5adc 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -45,6 +45,7 @@ public class ObjectMapper extends Mapper { public static final String CONTENT_TYPE = "object"; static final String STORE_ARRAY_SOURCE_PARAM = "store_array_source"; static final NodeFeature SUBOBJECTS_AUTO = new NodeFeature("mapper.subobjects_auto"); + // No-op. All uses of this feature were reverted but node features can't be removed. static final NodeFeature SUBOBJECTS_AUTO_FIXES = new NodeFeature("mapper.subobjects_auto_fixes"); /** @@ -62,7 +63,7 @@ public enum Subobjects { this.printedValue = printedValue; } - static Subobjects from(Object node) { + public static Subobjects from(Object node) { if (node instanceof Boolean value) { return value ? Subobjects.ENABLED : Subobjects.DISABLED; } @@ -127,7 +128,7 @@ static Dynamic getRootDynamic(MappingLookup mappingLookup) { public static class Builder extends Mapper.Builder { protected Optional subobjects; protected Explicit enabled = Explicit.IMPLICIT_TRUE; - protected Explicit storeArraySource = Defaults.STORE_ARRAY_SOURCE; + protected Optional sourceKeepMode = Optional.empty(); protected Dynamic dynamic; protected final List mappersBuilders = new ArrayList<>(); @@ -141,8 +142,8 @@ public Builder enabled(boolean enabled) { return this; } - public Builder storeArraySource(boolean value) { - this.storeArraySource = Explicit.explicitBoolean(value); + public Builder sourceKeepMode(SourceKeepMode sourceKeepMode) { + this.sourceKeepMode = Optional.of(sourceKeepMode); return this; } @@ -245,7 +246,7 @@ public ObjectMapper build(MapperBuilderContext context) { context.buildFullName(leafName()), enabled, subobjects, - storeArraySource, + sourceKeepMode, dynamic, buildMappers(context.createChildContext(leafName(), dynamic)) ); @@ -307,7 +308,10 @@ protected static boolean parseObjectOrDocumentTypeProperties( builder.enabled(XContentMapValues.nodeBooleanValue(fieldNode, fieldName + ".enabled")); return true; } else if (fieldName.equals(STORE_ARRAY_SOURCE_PARAM)) { - builder.storeArraySource(XContentMapValues.nodeBooleanValue(fieldNode, fieldName + ".store_array_source")); + builder.sourceKeepMode(SourceKeepMode.ARRAYS); + return true; + } else if (fieldName.equals(Mapper.SYNTHETIC_SOURCE_KEEP_PARAM)) { + builder.sourceKeepMode(SourceKeepMode.from(fieldNode.toString())); return true; } else if (fieldName.equals("properties")) { if (fieldNode instanceof Collection && ((Collection) fieldNode).isEmpty()) { @@ -434,7 +438,7 @@ private static void validateFieldName(String fieldName, IndexVersion indexCreate protected final Explicit enabled; protected final Optional subobjects; - protected final Explicit storeArraySource; + protected final Optional sourceKeepMode; protected final Dynamic dynamic; protected final Map mappers; @@ -444,7 +448,7 @@ private static void validateFieldName(String fieldName, IndexVersion indexCreate String fullPath, Explicit enabled, Optional subobjects, - Explicit storeArraySource, + Optional sourceKeepMode, Dynamic dynamic, Map mappers ) { @@ -454,7 +458,7 @@ private static void validateFieldName(String fieldName, IndexVersion indexCreate this.fullPath = internFieldName(fullPath); this.enabled = enabled; this.subobjects = subobjects; - this.storeArraySource = storeArraySource; + this.sourceKeepMode = sourceKeepMode; this.dynamic = dynamic; if (mappers == null) { this.mappers = Map.of(); @@ -482,7 +486,7 @@ public Builder newBuilder(IndexVersion indexVersionCreated) { * This is typically used in the context of a mapper merge when there's not enough budget to add the entire object. */ ObjectMapper withoutMappers() { - return new ObjectMapper(leafName(), fullPath, enabled, subobjects, storeArraySource, dynamic, Map.of()); + return new ObjectMapper(leafName(), fullPath, enabled, subobjects, sourceKeepMode, dynamic, Map.of()); } @Override @@ -520,8 +524,8 @@ public final Subobjects subobjects() { return subobjects.orElse(Subobjects.ENABLED); } - public final boolean storeArraySource() { - return storeArraySource.value(); + public final Optional sourceKeepMode() { + return sourceKeepMode; } @Override @@ -550,7 +554,7 @@ public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContex fullPath, mergeResult.enabled, mergeResult.subObjects, - mergeResult.trackArraySource, + mergeResult.sourceKeepMode, mergeResult.dynamic, mergeResult.mappers ); @@ -559,7 +563,7 @@ public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContex protected record MergeResult( Explicit enabled, Optional subObjects, - Explicit trackArraySource, + Optional sourceKeepMode, Dynamic dynamic, Map mappers ) { @@ -593,26 +597,31 @@ static MergeResult build(ObjectMapper existing, ObjectMapper mergeWithObject, Ma } else { subObjects = existing.subobjects; } - final Explicit trackArraySource; - if (mergeWithObject.storeArraySource.explicit()) { + final Optional sourceKeepMode; + if (mergeWithObject.sourceKeepMode.isPresent()) { if (reason == MergeReason.INDEX_TEMPLATE) { - trackArraySource = mergeWithObject.storeArraySource; - } else if (existing.storeArraySource != mergeWithObject.storeArraySource) { + sourceKeepMode = mergeWithObject.sourceKeepMode; + } else if (existing.sourceKeepMode.isEmpty() || existing.sourceKeepMode.get() != mergeWithObject.sourceKeepMode.get()) { throw new MapperException( - "the [store_array_source] parameter can't be updated for the object mapping [" + existing.fullPath() + "]" + "the [ " + + Mapper.SYNTHETIC_SOURCE_KEEP_PARAM + + " ] parameter can't be updated for the object mapping [" + + existing.fullPath() + + "]" ); } else { - trackArraySource = existing.storeArraySource; + sourceKeepMode = existing.sourceKeepMode; } } else { - trackArraySource = existing.storeArraySource; + sourceKeepMode = existing.sourceKeepMode; } + MapperMergeContext objectMergeContext = existing.createChildContext(parentMergeContext, existing.leafName()); Map mergedMappers = buildMergedMappers(existing, mergeWithObject, objectMergeContext, subObjects); return new MergeResult( enabled, subObjects, - trackArraySource, + sourceKeepMode, mergeWithObject.dynamic != null ? mergeWithObject.dynamic : existing.dynamic, mergedMappers ); @@ -733,6 +742,12 @@ private void ensureFlattenable(MapperBuilderContext context, ContentPath path) { + ")" ); } + if (sourceKeepMode.isPresent()) { + throwAutoFlatteningException( + path, + "the value of [" + Mapper.SYNTHETIC_SOURCE_KEEP_PARAM + "] is [ " + sourceKeepMode.get() + " ]" + ); + } if (isEnabled() == false) { throwAutoFlatteningException(path, "the value of [enabled] is [false]"); } @@ -774,8 +789,8 @@ void toXContent(XContentBuilder builder, Params params, ToXContent custom) throw if (subobjects.isPresent()) { builder.field("subobjects", subobjects.get().printedValue); } - if (storeArraySource != Defaults.STORE_ARRAY_SOURCE) { - builder.field(STORE_ARRAY_SOURCE_PARAM, storeArraySource.value()); + if (sourceKeepMode.isPresent()) { + builder.field("synthetic_source_keep", sourceKeepMode.get()); } if (custom != null) { custom.toXContent(builder, params); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java index 9ef36b99a57c5..80f845d626a2f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java @@ -82,6 +82,7 @@ public PassThroughObjectMapper build(MapperBuilderContext context) { leafName(), context.buildFullName(leafName()), enabled, + sourceKeepMode, dynamic, buildMappers(context.createChildContext(leafName(), timeSeriesDimensionSubFields.value(), dynamic)), timeSeriesDimensionSubFields, @@ -99,13 +100,14 @@ public PassThroughObjectMapper build(MapperBuilderContext context) { String name, String fullPath, Explicit enabled, + Optional sourceKeepMode, Dynamic dynamic, Map mappers, Explicit timeSeriesDimensionSubFields, int priority ) { // Subobjects are not currently supported. - super(name, fullPath, enabled, Optional.of(Subobjects.DISABLED), Explicit.IMPLICIT_FALSE, dynamic, mappers); + super(name, fullPath, enabled, Optional.of(Subobjects.DISABLED), sourceKeepMode, dynamic, mappers); this.timeSeriesDimensionSubFields = timeSeriesDimensionSubFields; this.priority = priority; if (priority < 0) { @@ -115,7 +117,16 @@ public PassThroughObjectMapper build(MapperBuilderContext context) { @Override PassThroughObjectMapper withoutMappers() { - return new PassThroughObjectMapper(leafName(), fullPath(), enabled, dynamic, Map.of(), timeSeriesDimensionSubFields, priority); + return new PassThroughObjectMapper( + leafName(), + fullPath(), + enabled, + sourceKeepMode, + dynamic, + Map.of(), + timeSeriesDimensionSubFields, + priority + ); } @Override @@ -158,6 +169,7 @@ public PassThroughObjectMapper merge(Mapper mergeWith, MapperMergeContext parent leafName(), fullPath(), mergeResult.enabled(), + mergeResult.sourceKeepMode(), mergeResult.dynamic(), mergeResult.mappers(), containsDimensions, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java index af0dc0c0ad7fe..6ca30304201b2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java @@ -59,6 +59,7 @@ public class RangeFieldMapper extends FieldMapper { public static class Defaults { public static final DateFormatter DATE_FORMATTER = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; + public static final Locale LOCALE = DateFieldMapper.DEFAULT_LOCALE; } // this is private since it has a different default @@ -83,7 +84,7 @@ public static class Builder extends FieldMapper.Builder { private final Parameter locale = new Parameter<>( "locale", false, - () -> Locale.ROOT, + () -> Defaults.LOCALE, (n, c, o) -> LocaleUtils.parse(o.toString()), m -> toType(m).locale, (xContentBuilder, n, v) -> xContentBuilder.field(n, v.toString()), diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java index 878f9c92fa552..ce983e8a327c9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java @@ -113,7 +113,7 @@ public RootObjectMapper build(MapperBuilderContext context) { leafName(), enabled, subobjects, - storeArraySource, + sourceKeepMode, dynamic, buildMappers(context.createChildContext(null, dynamic)), new HashMap<>(runtimeFields), @@ -135,7 +135,7 @@ public RootObjectMapper build(MapperBuilderContext context) { String name, Explicit enabled, Optional subobjects, - Explicit trackArraySource, + Optional sourceKeepMode, Dynamic dynamic, Map mappers, Map runtimeFields, @@ -144,12 +144,17 @@ public RootObjectMapper build(MapperBuilderContext context) { Explicit dateDetection, Explicit numericDetection ) { - super(name, name, enabled, subobjects, trackArraySource, dynamic, mappers); + super(name, name, enabled, subobjects, sourceKeepMode, dynamic, mappers); this.runtimeFields = runtimeFields; this.dynamicTemplates = dynamicTemplates; this.dynamicDateTimeFormatters = dynamicDateTimeFormatters; this.dateDetection = dateDetection; this.numericDetection = numericDetection; + if (sourceKeepMode.orElse(SourceKeepMode.NONE) == SourceKeepMode.ALL) { + throw new MapperParsingException( + "root object can't be configured with [" + Mapper.SYNTHETIC_SOURCE_KEEP_PARAM + ":" + SourceKeepMode.ALL + "]" + ); + } } @Override @@ -166,7 +171,7 @@ RootObjectMapper withoutMappers() { leafName(), enabled, subobjects, - storeArraySource, + sourceKeepMode, dynamic, Map.of(), Map.of(), @@ -282,7 +287,7 @@ public RootObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeCo leafName(), mergeResult.enabled(), mergeResult.subObjects(), - mergeResult.trackArraySource(), + mergeResult.sourceKeepMode(), mergeResult.dynamic(), mergeResult.mappers(), Map.copyOf(runtimeFields), diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java index f086526eec78e..b60feb4d5746e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java @@ -179,7 +179,7 @@ public static boolean parseMultiField( public static DateFormatter parseDateTimeFormatter(Object node) { if (node instanceof String) { - return DateFormatter.forPattern((String) node); + return DateFormatter.forPattern((String) node).withLocale(DateFieldMapper.DEFAULT_LOCALE); } throw new IllegalArgumentException("Invalid format: [" + node.toString() + "]: expected string value"); } diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java index aeffff28269dd..20874a736b1ec 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java @@ -10,7 +10,6 @@ package org.elasticsearch.index.query; import org.apache.lucene.index.Term; -import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.Query; @@ -19,8 +18,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.lucene.queries.SpanMatchNoDocsQuery; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -76,40 +75,37 @@ public SpanTermQueryBuilder(StreamInput in) throws IOException { } @Override - protected SpanQuery doToQuery(SearchExecutionContext context) throws IOException { + protected Query doToQuery(SearchExecutionContext context) throws IOException { MappedFieldType mapper = context.getFieldType(fieldName); - Term term; if (mapper == null) { - term = new Term(fieldName, BytesRefs.toBytesRef(value)); - } else { - if (mapper.getTextSearchInfo().hasPositions() == false) { - throw new IllegalArgumentException( - "Span term query requires position data, but field " + fieldName + " was indexed without position data" - ); - } - Query termQuery = mapper.termQuery(value, context); - List termsList = new ArrayList<>(); - termQuery.visit(new QueryVisitor() { - @Override - public QueryVisitor getSubVisitor(BooleanClause.Occur occur, Query parent) { - if (occur == BooleanClause.Occur.MUST || occur == BooleanClause.Occur.FILTER) { - return this; - } - return EMPTY_VISITOR; + return new SpanMatchNoDocsQuery(fieldName, "unmapped field: " + fieldName); + } + if (mapper.getTextSearchInfo().hasPositions() == false) { + throw new IllegalArgumentException( + "Span term query requires position data, but field " + fieldName + " was indexed without position data" + ); + } + Query termQuery = mapper.termQuery(value, context); + List termsList = new ArrayList<>(); + termQuery.visit(new QueryVisitor() { + @Override + public QueryVisitor getSubVisitor(BooleanClause.Occur occur, Query parent) { + if (occur == BooleanClause.Occur.MUST || occur == BooleanClause.Occur.FILTER) { + return this; } + return EMPTY_VISITOR; + } - @Override - public void consumeTerms(Query query, Term... terms) { - termsList.addAll(Arrays.asList(terms)); - } - }); - if (termsList.size() != 1) { - // This is for safety, but we have called mapper.termQuery above: we really should get one and only one term from the query? - throw new IllegalArgumentException("Cannot extract a term from a query of type " + termQuery.getClass() + ": " + termQuery); + @Override + public void consumeTerms(Query query, Term... terms) { + termsList.addAll(Arrays.asList(terms)); } - term = termsList.get(0); + }); + if (termsList.size() != 1) { + // This is for safety, but we have called mapper.termQuery above: we really should get one and only one term from the query? + throw new IllegalArgumentException("Cannot extract a term from a query of type " + termQuery.getClass() + ": " + termQuery); } - return new SpanTermQuery(term); + return new SpanTermQuery(termsList.get(0)); } public static SpanTermQueryBuilder fromXContent(XContentParser parser) throws IOException, ParsingException { diff --git a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index d72909806240c..b1b0f0201ebbe 100644 --- a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.breaker.ChildMemoryCircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; @@ -474,7 +475,8 @@ public void accept(String key, CircuitBreaker breaker) { appendBytesSafe(message, (long) (breaker.getUsed() * breaker.getOverhead())); } }); - message.append("]"); + message.append("]; for more information, see "); + message.append(ReferenceDocs.CIRCUIT_BREAKER_ERRORS); return message.toString(); } diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index aba644b392cec..cbbfef2cc65fa 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -191,4 +191,13 @@ default Set supportedStreamingTasks() { default boolean canStream(TaskType taskType) { return supportedStreamingTasks().contains(taskType); } + + /** + * A service can define default configurations that can be + * used out of the box without creating an endpoint first. + * @return Default configurations provided by this service + */ + default List defaultConfigs() { + return List.of(); + } } diff --git a/server/src/main/java/org/elasticsearch/inference/UnparsedModel.java b/server/src/main/java/org/elasticsearch/inference/UnparsedModel.java new file mode 100644 index 0000000000000..30a7c6aa2bf9c --- /dev/null +++ b/server/src/main/java/org/elasticsearch/inference/UnparsedModel.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.inference; + +import java.util.Map; + +/** + * Semi parsed model where inference entity id, task type and service + * are known but the settings are not parsed. + */ +public record UnparsedModel( + String inferenceEntityId, + TaskType taskType, + String service, + Map settings, + Map secrets +) {} diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index 0275e988ce39d..0f63d2a8dcc1b 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -33,7 +33,6 @@ import org.elasticsearch.cluster.ClusterStateApplier; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskListener; -import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -271,30 +270,14 @@ public static void resolvePipelinesAndUpdateIndexRequest( final IndexRequest indexRequest, final Metadata metadata ) { - resolvePipelinesAndUpdateIndexRequest(originalRequest, indexRequest, metadata, Map.of()); - } - - public static void resolvePipelinesAndUpdateIndexRequest( - final DocWriteRequest originalRequest, - final IndexRequest indexRequest, - final Metadata metadata, - Map componentTemplateSubstitutions - ) { - resolvePipelinesAndUpdateIndexRequest( - originalRequest, - indexRequest, - metadata, - System.currentTimeMillis(), - componentTemplateSubstitutions - ); + resolvePipelinesAndUpdateIndexRequest(originalRequest, indexRequest, metadata, System.currentTimeMillis()); } static void resolvePipelinesAndUpdateIndexRequest( final DocWriteRequest originalRequest, final IndexRequest indexRequest, final Metadata metadata, - final long epochMillis, - final Map componentTemplateSubstitutions + final long epochMillis ) { if (indexRequest.isPipelineResolved()) { return; @@ -302,21 +285,11 @@ static void resolvePipelinesAndUpdateIndexRequest( /* * Here we look for the pipelines associated with the index if the index exists. If the index does not exist we fall back to using - * templates to find the pipelines. But if a user has passed in component template substitutions, they want the settings from those - * used in place of the settings used to create any previous indices. So in that case we use the templates to find the pipelines -- - * we don't fall back to the existing index if we don't find any because it is possible the user has intentionally removed the - * pipeline. + * templates to find the pipelines. */ - final Pipelines pipelines; - if (componentTemplateSubstitutions.isEmpty()) { - pipelines = resolvePipelinesFromMetadata(originalRequest, indexRequest, metadata, epochMillis) // - .or(() -> resolvePipelinesFromIndexTemplates(indexRequest, metadata, Map.of())) - .orElse(Pipelines.NO_PIPELINES_DEFINED); - } else { - pipelines = resolvePipelinesFromIndexTemplates(indexRequest, metadata, componentTemplateSubstitutions).orElse( - Pipelines.NO_PIPELINES_DEFINED - ); - } + final Pipelines pipelines = resolvePipelinesFromMetadata(originalRequest, indexRequest, metadata, epochMillis).or( + () -> resolvePipelinesFromIndexTemplates(indexRequest, metadata) + ).orElse(Pipelines.NO_PIPELINES_DEFINED); // The pipeline coming as part of the request always has priority over the resolved one from metadata or templates String requestPipeline = indexRequest.getPipeline(); @@ -1466,11 +1439,7 @@ private static Optional resolvePipelinesFromMetadata( return Optional.of(new Pipelines(IndexSettings.DEFAULT_PIPELINE.get(settings), IndexSettings.FINAL_PIPELINE.get(settings))); } - private static Optional resolvePipelinesFromIndexTemplates( - IndexRequest indexRequest, - Metadata metadata, - Map componentTemplateSubstitutions - ) { + private static Optional resolvePipelinesFromIndexTemplates(IndexRequest indexRequest, Metadata metadata) { if (indexRequest.index() == null) { return Optional.empty(); } @@ -1480,7 +1449,7 @@ private static Optional resolvePipelinesFromIndexTemplates( // precedence), or if a V2 template does not match, any V1 templates String v2Template = MetadataIndexTemplateService.findV2Template(metadata, indexRequest.index(), false); if (v2Template != null) { - final Settings settings = MetadataIndexTemplateService.resolveSettings(metadata, v2Template, componentTemplateSubstitutions); + final Settings settings = MetadataIndexTemplateService.resolveSettings(metadata, v2Template); return Optional.of(new Pipelines(IndexSettings.DEFAULT_PIPELINE.get(settings), IndexSettings.FINAL_PIPELINE.get(settings))); } diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/IndicesMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/IndicesMetrics.java index 17e290283d5e0..e07f6908330df 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/IndicesMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/IndicesMetrics.java @@ -18,8 +18,10 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.shard.IllegalIndexShardStateException; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexingStats; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.telemetry.metric.LongWithAttributes; import org.elasticsearch.telemetry.metric.MeterRegistry; @@ -50,8 +52,8 @@ public IndicesMetrics(MeterRegistry meterRegistry, IndicesService indicesService } private static List registerAsyncMetrics(MeterRegistry registry, IndicesStatsCache cache) { - List metrics = new ArrayList<>(IndexMode.values().length * 3); - assert IndexMode.values().length == 3 : "index modes have changed"; + final int TOTAL_METRICS = 36; + List metrics = new ArrayList<>(TOTAL_METRICS); for (IndexMode indexMode : IndexMode.values()) { String name = indexMode.getName(); metrics.add( @@ -72,13 +74,89 @@ private static List registerAsyncMetrics(MeterRegistry registry, ); metrics.add( registry.registerLongGauge( - "es.indices." + name + ".bytes.total", + "es.indices." + name + ".size", "total size in bytes of " + name + " indices", - "unit", + "bytes", () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).numBytes) ) ); + // query (count, took, failures) - use gauges as shards can be removed + metrics.add( + registry.registerLongGauge( + "es.indices." + name + ".query.total", + "total queries of " + name + " indices", + "unit", + () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).search.getQueryCount()) + ) + ); + metrics.add( + registry.registerLongGauge( + "es.indices." + name + ".query.time", + "total query time of " + name + " indices", + "ms", + () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).search.getQueryTimeInMillis()) + ) + ); + metrics.add( + registry.registerLongGauge( + "es.indices." + name + ".query.failure.total", + "total query failures of " + name + " indices", + "unit", + () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).search.getQueryFailure()) + ) + ); + // fetch (count, took, failures) - use gauges as shards can be removed + metrics.add( + registry.registerLongGauge( + "es.indices." + name + ".fetch.total", + "total fetches of " + name + " indices", + "unit", + () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).search.getFetchCount()) + ) + ); + metrics.add( + registry.registerLongGauge( + "es.indices." + name + ".fetch.time", + "total fetch time of " + name + " indices", + "ms", + () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).search.getFetchTimeInMillis()) + ) + ); + metrics.add( + registry.registerLongGauge( + "es.indices." + name + ".fetch.failure.total", + "total fetch failures of " + name + " indices", + "unit", + () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).search.getFetchFailure()) + ) + ); + // indexing + metrics.add( + registry.registerLongGauge( + "es.indices." + name + ".indexing.total", + "total indexing operations of " + name + " indices", + "unit", + () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).indexing.getIndexCount()) + ) + ); + metrics.add( + registry.registerLongGauge( + "es.indices." + name + ".indexing.time", + "total indexing time of " + name + " indices", + "ms", + () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).indexing.getIndexTime().millis()) + ) + ); + metrics.add( + registry.registerLongGauge( + "es.indices." + name + ".indexing.failure.total", + "total indexing failures of " + name + " indices", + "unit", + () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).indexing.getIndexFailedCount()) + ) + ); } + assert metrics.size() == TOTAL_METRICS : "total number of metrics has changed"; return metrics; } @@ -107,6 +185,8 @@ static class IndexStats { int numIndices = 0; long numDocs = 0; long numBytes = 0; + SearchStats.Stats search = new SearchStats().getTotal(); + IndexingStats.Stats indexing = new IndexingStats().getTotal(); } private static class IndicesStatsCache extends SingleObjectCache> { @@ -152,6 +232,8 @@ private Map internalGetIndicesStats() { try { indexStats.numDocs += indexShard.commitStats().getNumDocs(); indexStats.numBytes += indexShard.storeStats().sizeInBytes(); + indexStats.search.add(indexShard.searchStats().getTotal()); + indexStats.indexing.add(indexShard.indexingStats().getTotal()); } catch (IllegalIndexShardStateException | AlreadyClosedException ignored) { // ignored } diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index 99fa3e0166963..2f7bb80a8d46a 100644 --- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.RestApiVersion; @@ -104,6 +105,8 @@ public final void handleRequest(RestRequest request, RestChannel channel, NodeCl // prepare the request for execution; has the side effect of touching the request parameters try (var action = prepareRequest(request, client)) { + assert assertConsumesSupportedParams(supported, request); + // validate unconsumed params, but we must exclude params used to format the response // use a sorted set so the unconsumed parameters appear in a reliable sorted order final SortedSet unconsumedParams = request.unconsumedParams() @@ -148,6 +151,20 @@ public void close() { } } + private boolean assertConsumesSupportedParams(@Nullable Set supported, RestRequest request) { + if (supported != null) { + final var supportedAndCommon = new TreeSet<>(supported); + supportedAndCommon.add("error_trace"); + supportedAndCommon.addAll(ALWAYS_SUPPORTED); + supportedAndCommon.removeAll(RestRequest.INTERNAL_MARKER_REQUEST_PARAMETERS); + final var consumed = new TreeSet<>(request.consumedParams()); + consumed.removeAll(RestRequest.INTERNAL_MARKER_REQUEST_PARAMETERS); + assert supportedAndCommon.equals(consumed) + : getName() + ": consumed params " + consumed + " while supporting " + supportedAndCommon; + } + return true; + } + protected static String unrecognized(RestRequest request, Set invalids, Set candidates, String detail) { StringBuilder message = new StringBuilder().append("request [") .append(request.path()) diff --git a/server/src/main/java/org/elasticsearch/rest/DeprecationRestHandler.java b/server/src/main/java/org/elasticsearch/rest/DeprecationRestHandler.java index 98ab7d53ffbe6..8d363f6e63511 100644 --- a/server/src/main/java/org/elasticsearch/rest/DeprecationRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/DeprecationRestHandler.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.Nullable; import java.util.Objects; @@ -29,7 +28,6 @@ public class DeprecationRestHandler extends FilterRestHandler implements RestHan private final DeprecationLogger deprecationLogger; private final boolean compatibleVersionWarning; private final String deprecationKey; - @Nullable private final Level deprecationLevel; /** @@ -39,6 +37,8 @@ public class DeprecationRestHandler extends FilterRestHandler implements RestHan * @param handler The rest handler to deprecate (it's possible that the handler is reused with a different name!) * @param method a method of a deprecated endpoint * @param path a path of a deprecated endpoint + * @param deprecationLevel The level of the deprecation warning, must be non-null + * and either {@link Level#WARN} or {@link DeprecationLogger#CRITICAL} * @param deprecationMessage The message to warn users with when they use the {@code handler} * @param deprecationLogger The deprecation logger * @param compatibleVersionWarning set to false so that a deprecation warning will be issued for the handled request, @@ -51,7 +51,7 @@ public DeprecationRestHandler( RestHandler handler, RestRequest.Method method, String path, - @Nullable Level deprecationLevel, + Level deprecationLevel, String deprecationMessage, DeprecationLogger deprecationLogger, boolean compatibleVersionWarning @@ -61,7 +61,7 @@ public DeprecationRestHandler( this.deprecationLogger = Objects.requireNonNull(deprecationLogger); this.compatibleVersionWarning = compatibleVersionWarning; this.deprecationKey = DEPRECATED_ROUTE_KEY + "_" + method + "_" + path; - if (deprecationLevel != null && (deprecationLevel != Level.WARN && deprecationLevel != DeprecationLogger.CRITICAL)) { + if (deprecationLevel != Level.WARN && deprecationLevel != DeprecationLogger.CRITICAL) { throw new IllegalArgumentException( "unexpected deprecation logger level: " + deprecationLevel + ", expected either 'CRITICAL' or 'WARN'" ); @@ -77,19 +77,18 @@ public DeprecationRestHandler( @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { if (compatibleVersionWarning == false) { - // The default value for deprecated requests without a version warning is WARN - if (deprecationLevel == null || deprecationLevel == Level.WARN) { + // emit a standard deprecation warning + if (Level.WARN == deprecationLevel) { deprecationLogger.warn(DeprecationCategory.API, deprecationKey, deprecationMessage); - } else { + } else if (DeprecationLogger.CRITICAL == deprecationLevel) { deprecationLogger.critical(DeprecationCategory.API, deprecationKey, deprecationMessage); } } else { - // The default value for deprecated requests with a version warning is CRITICAL, - // because they have a specific version where the endpoint is removed - if (deprecationLevel == null || deprecationLevel == DeprecationLogger.CRITICAL) { - deprecationLogger.compatibleCritical(deprecationKey, deprecationMessage); - } else { + // emit a compatibility warning + if (Level.WARN == deprecationLevel) { deprecationLogger.compatible(Level.WARN, deprecationKey, deprecationMessage); + } else if (DeprecationLogger.CRITICAL == deprecationLevel) { + deprecationLogger.compatibleCritical(deprecationKey, deprecationMessage); } } @@ -139,4 +138,9 @@ public static String requireValidHeader(String value) { return value; } + + // test only + Level getDeprecationLevel() { + return deprecationLevel; + } } diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 924cd361c671d..c2064fdd931de 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -144,25 +144,6 @@ public ServerlessApiProtections getApiProtections() { return apiProtections; } - /** - * Registers a REST handler to be executed when the provided {@code method} and {@code path} match the request. - * - * @param method GET, POST, etc. - * @param path Path to handle (e.g. "/{index}/{type}/_bulk") - * @param version API version to handle (e.g. RestApiVersion.V_8) - * @param handler The handler to actually execute - * @param deprecationMessage The message to log and send as a header in the response - */ - protected void registerAsDeprecatedHandler( - RestRequest.Method method, - String path, - RestApiVersion version, - RestHandler handler, - String deprecationMessage - ) { - registerAsDeprecatedHandler(method, path, version, handler, deprecationMessage, null); - } - /** * Registers a REST handler to be executed when the provided {@code method} and {@code path} match the request. * @@ -179,40 +160,23 @@ protected void registerAsDeprecatedHandler( RestApiVersion version, RestHandler handler, String deprecationMessage, - @Nullable Level deprecationLevel + Level deprecationLevel ) { assert (handler instanceof DeprecationRestHandler) == false; - if (version == RestApiVersion.current()) { - // e.g. it was marked as deprecated in 8.x, and we're currently running 8.x - registerHandler( - method, - path, - version, - new DeprecationRestHandler(handler, method, path, deprecationLevel, deprecationMessage, deprecationLogger, false) - ); - } else if (version == RestApiVersion.minimumSupported()) { - // e.g. it was marked as deprecated in 7.x, and we're currently running 8.x + if (RestApiVersion.onOrAfter(RestApiVersion.minimumSupported()).test(version)) { registerHandler( method, path, version, - new DeprecationRestHandler(handler, method, path, deprecationLevel, deprecationMessage, deprecationLogger, true) - ); - } else { - // e.g. it was marked as deprecated in 7.x, and we're currently running *9.x* - logger.debug( - "Deprecated route [" - + method - + " " - + path - + "] for handler [" - + handler.getClass() - + "] " - + "with version [" - + version - + "], which is less than the minimum supported version [" - + RestApiVersion.minimumSupported() - + "]" + new DeprecationRestHandler( + handler, + method, + path, + deprecationLevel, + deprecationMessage, + deprecationLogger, + version != RestApiVersion.current() + ) ); } } @@ -250,21 +214,12 @@ protected void registerAsReplacedHandler( RestHandler handler, RestRequest.Method replacedMethod, String replacedPath, - RestApiVersion replacedVersion + RestApiVersion replacedVersion, + String replacedMessage, + Level deprecationLevel ) { - // e.g. [POST /_optimize] is deprecated! Use [POST /_forcemerge] instead. - final String replacedMessage = "[" - + replacedMethod.name() - + " " - + replacedPath - + "] is deprecated! Use [" - + method.name() - + " " - + path - + "] instead."; - registerHandler(method, path, version, handler); - registerAsDeprecatedHandler(replacedMethod, replacedPath, replacedVersion, handler, replacedMessage); + registerAsDeprecatedHandler(replacedMethod, replacedPath, replacedVersion, handler, replacedMessage, deprecationLevel); } /** @@ -284,7 +239,15 @@ protected void registerHandler(RestRequest.Method method, String path, RestApiVe private void registerHandlerNoWrap(RestRequest.Method method, String path, RestApiVersion version, RestHandler handler) { assert RestApiVersion.minimumSupported() == version || RestApiVersion.current() == version - : "REST API compatibility is only supported for version " + RestApiVersion.minimumSupported().major; + : "REST API compatibility is only supported for version " + + RestApiVersion.minimumSupported().major + + " [method=" + + method + + ", path=" + + path + + ", handler=" + + handler.getClass().getCanonicalName() + + "]"; if (RESERVED_PATHS.contains(path)) { throw new IllegalArgumentException("path [" + path + "] is a reserved path and may not be registered"); @@ -299,7 +262,7 @@ private void registerHandlerNoWrap(RestRequest.Method method, String path, RestA } public void registerHandler(final Route route, final RestHandler handler) { - if (route.isReplacement()) { + if (route.hasReplacement()) { Route replaced = route.getReplacedRoute(); registerAsReplacedHandler( route.getMethod(), @@ -308,7 +271,9 @@ public void registerHandler(final Route route, final RestHandler handler) { handler, replaced.getMethod(), replaced.getPath(), - replaced.getRestApiVersion() + replaced.getRestApiVersion(), + replaced.getDeprecationMessage(), + replaced.getDeprecationLevel() ); } else if (route.isDeprecated()) { registerAsDeprecatedHandler( diff --git a/server/src/main/java/org/elasticsearch/rest/RestHandler.java b/server/src/main/java/org/elasticsearch/rest/RestHandler.java index ede295fee9f4d..0e3b8d37dd25c 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/RestHandler.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.Level; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.RestRequest.Method; @@ -136,11 +137,10 @@ class Route { private final Method method; private final String path; private final RestApiVersion restApiVersion; - - private final String deprecationMessage; @Nullable + private final String deprecationMessage; private final Level deprecationLevel; - + @Nullable private final Route replacedRoute; private Route( @@ -153,12 +153,16 @@ private Route( ) { this.method = Objects.requireNonNull(method); this.path = Objects.requireNonNull(path); + // the last version in which this route was fully supported this.restApiVersion = Objects.requireNonNull(restApiVersion); - // a deprecated route will have a deprecation message, and the restApiVersion - // will represent the version when the route was deprecated + // a route marked as deprecated to keep or remove will have a deprecation message and level (warn for keep, critical for remove) this.deprecationMessage = deprecationMessage; - this.deprecationLevel = deprecationLevel; + this.deprecationLevel = Objects.requireNonNull(deprecationLevel); + + if (deprecationMessage == null && deprecationLevel != Level.OFF) { + throw new IllegalArgumentException("deprecationMessage must be set if deprecationLevel is not OFF"); + } // a route that replaces another route will have a reference to the route that was replaced this.replacedRoute = replacedRoute; @@ -173,7 +177,7 @@ private Route( * @param path the path, e.g. "/" */ public Route(Method method, String path) { - this(method, path, RestApiVersion.current(), null, null, null); + this(method, path, RestApiVersion.current(), null, Level.OFF, null); } public static class RouteBuilder { @@ -183,7 +187,6 @@ public static class RouteBuilder { private RestApiVersion restApiVersion; private String deprecationMessage; - @Nullable private Level deprecationLevel; private Route replacedRoute; @@ -194,6 +197,16 @@ private RouteBuilder(Method method, String path) { this.restApiVersion = RestApiVersion.current(); } + /** + * @deprecated Use {@link #deprecatedForRemoval(String, RestApiVersion)} if the intent is deprecate the path and remove in the + * next major version. Use {@link #deprecateAndKeep(String)} if the intent is to deprecate the path but not remove it. + * This method will delegate to {@link #deprecatedForRemoval(String, RestApiVersion)}. + */ + @Deprecated(since = "9.0.0", forRemoval = true) + public RouteBuilder deprecated(String deprecationMessage, RestApiVersion lastFullySupportedVersion) { + return deprecatedForRemoval(deprecationMessage, lastFullySupportedVersion); + } + /** * Marks that the route being built has been deprecated (for some reason -- the deprecationMessage) for removal. Notes the last * major version in which the path is fully supported without compatibility headers. If this path is being replaced by another @@ -202,7 +215,7 @@ private RouteBuilder(Method method, String path) { * For example: *

 {@code
              * Route.builder(GET, "_upgrade")
-             *  .deprecated("The _upgrade API is no longer useful and will be removed.", RestApiVersion.V_7)
+             *  .deprecatedForRemoval("The _upgrade API is no longer useful and will be removed.", RestApiVersion.V_7)
              *  .build()}
* * @param deprecationMessage the user-visible explanation of this deprecation @@ -211,10 +224,12 @@ private RouteBuilder(Method method, String path) { * The next major version (i.e. 9) will have no support whatsoever for this route. * @return a reference to this object. */ - public RouteBuilder deprecated(String deprecationMessage, RestApiVersion lastFullySupportedVersion) { + public RouteBuilder deprecatedForRemoval(String deprecationMessage, RestApiVersion lastFullySupportedVersion) { assert this.replacedRoute == null; this.restApiVersion = Objects.requireNonNull(lastFullySupportedVersion); this.deprecationMessage = Objects.requireNonNull(deprecationMessage); + // if being deprecated for removal in the current version, then it's a warning, otherwise it's critical + this.deprecationLevel = lastFullySupportedVersion == RestApiVersion.current() ? Level.WARN : DeprecationLogger.CRITICAL; return this; } @@ -227,16 +242,38 @@ public RouteBuilder deprecated(String deprecationMessage, RestApiVersion lastFul * Route.builder(GET, "/_security/user/") * .replaces(GET, "/_xpack/security/user/", RestApiVersion.V_7).build()} * - * @param method the method being replaced - * @param path the path being replaced + * @param replacedMethod the method being replaced + * @param replacedPath the path being replaced * @param lastFullySupportedVersion the last {@link RestApiVersion} (i.e. 7) for which this route is fully supported. * The next major version (i.e. 8) will require compatibility header(s). (;compatible-with=7) * The next major version (i.e. 9) will have no support whatsoever for this route. * @return a reference to this object. */ - public RouteBuilder replaces(Method method, String path, RestApiVersion lastFullySupportedVersion) { + public RouteBuilder replaces(Method replacedMethod, String replacedPath, RestApiVersion lastFullySupportedVersion) { assert this.deprecationMessage == null; - this.replacedRoute = new Route(method, path, lastFullySupportedVersion, null, null, null); + + // if being replaced in the current version, then it's a warning, otherwise it's critical + Level deprecationLevel = lastFullySupportedVersion == RestApiVersion.current() ? Level.WARN : DeprecationLogger.CRITICAL; + + // e.g. [POST /_optimize] is deprecated! Use [POST /_forcemerge] instead. + final String replacedMessage = "[" + + replacedMethod.name() + + " " + + replacedPath + + "] is deprecated! Use [" + + this.method.name() + + " " + + this.path + + "] instead."; + + this.replacedRoute = new Route( + replacedMethod, + replacedPath, + lastFullySupportedVersion, + replacedMessage, + deprecationLevel, + null + ); return this; } @@ -246,7 +283,7 @@ public RouteBuilder replaces(Method method, String path, RestApiVersion lastFull * For example: *
 {@code
              * Route.builder(GET, "_upgrade")
-             *  .deprecated("The _upgrade API is no longer useful but will not be removed.")
+             *  .deprecateAndKeep("The _upgrade API is no longer useful but will not be removed.")
              *  .build()}
* * @param deprecationMessage the user-visible explanation of this deprecation @@ -261,14 +298,15 @@ public RouteBuilder deprecateAndKeep(String deprecationMessage) { } public Route build() { - if (replacedRoute != null) { - return new Route(method, path, restApiVersion, null, null, replacedRoute); - } else if (deprecationMessage != null) { - return new Route(method, path, restApiVersion, deprecationMessage, deprecationLevel, null); - } else { - // this is a little silly, but perfectly legal - return new Route(method, path, restApiVersion, null, null, null); - } + assert (deprecationMessage != null) == (deprecationLevel != null); // both must be set or neither + return new Route( + method, + path, + restApiVersion, + deprecationMessage, + deprecationLevel == null ? Level.OFF : deprecationLevel, + replacedRoute + ); } } @@ -288,11 +326,11 @@ public RestApiVersion getRestApiVersion() { return restApiVersion; } + @Nullable public String getDeprecationMessage() { return deprecationMessage; } - @Nullable public Level getDeprecationLevel() { return deprecationLevel; } @@ -301,11 +339,12 @@ public boolean isDeprecated() { return deprecationMessage != null; } + @Nullable public Route getReplacedRoute() { return replacedRoute; } - public boolean isReplacement() { + public boolean hasReplacement() { return replacedRoute != null; } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/RestActions.java b/server/src/main/java/org/elasticsearch/rest/action/RestActions.java index dc1ab92a66a84..69cf5bbb1b89d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/RestActions.java +++ b/server/src/main/java/org/elasticsearch/rest/action/RestActions.java @@ -64,9 +64,9 @@ public static long parseVersion(RestRequest request, long defaultVersion) { return (version == Versions.MATCH_ANY) ? defaultVersion : version; } - public static void buildBroadcastShardsHeader(XContentBuilder builder, Params params, BaseBroadcastResponse response) + public static XContentBuilder buildBroadcastShardsHeader(XContentBuilder builder, Params params, BaseBroadcastResponse response) throws IOException { - buildBroadcastShardsHeader( + return buildBroadcastShardsHeader( builder, params, response.getTotalShards(), @@ -77,7 +77,7 @@ public static void buildBroadcastShardsHeader(XContentBuilder builder, Params pa ); } - public static void buildBroadcastShardsHeader( + public static XContentBuilder buildBroadcastShardsHeader( XContentBuilder builder, Params params, int total, @@ -100,7 +100,7 @@ public static void buildBroadcastShardsHeader( } builder.endArray(); } - builder.endObject(); + return builder.endObject(); } /** diff --git a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java index be9ad0ed0a9cd..f1d4f678c5fb9 100644 --- a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java +++ b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateMathParser; +import org.elasticsearch.common.util.LocaleUtils; import org.elasticsearch.geometry.utils.Geohash; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; @@ -236,9 +237,12 @@ private DateTime(DateFormatter formatter, ZoneId timeZone, DateFieldMapper.Resol public DateTime(StreamInput in) throws IOException { String formatterPattern = in.readString(); + Locale locale = in.getTransportVersion().onOrAfter(TransportVersions.DATE_TIME_DOC_VALUES_LOCALES) + ? LocaleUtils.parse(in.readString()) + : DateFieldMapper.DEFAULT_LOCALE; String zoneId = in.readString(); this.timeZone = ZoneId.of(zoneId); - this.formatter = DateFormatter.forPattern(formatterPattern).withZone(this.timeZone); + this.formatter = DateFormatter.forPattern(formatterPattern).withZone(this.timeZone).withLocale(locale); this.parser = formatter.toDateMathParser(); this.resolution = DateFieldMapper.Resolution.ofOrdinal(in.readVInt()); if (in.getTransportVersion().between(TransportVersions.V_7_7_0, TransportVersions.V_8_0_0)) { @@ -259,6 +263,9 @@ public String getWriteableName() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(formatter.pattern()); + if (out.getTransportVersion().onOrAfter(TransportVersions.DATE_TIME_DOC_VALUES_LOCALES)) { + out.writeString(formatter.locale().toString()); + } out.writeString(timeZone.getId()); out.writeVInt(resolution.ordinal()); if (out.getTransportVersion().between(TransportVersions.V_7_7_0, TransportVersions.V_8_0_0)) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java index cd4a0bac7f429..6fdd41d374d0e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java @@ -76,7 +76,7 @@ public InternalStats( } private void verifyFormattingStats() { - if (format != DocValueFormat.RAW) { + if (format != DocValueFormat.RAW && count != 0) { verifyFormattingStat(Fields.MIN, format, min); verifyFormattingStat(Fields.MAX, format, max); verifyFormattingStat(Fields.AVG, format, getAvg()); diff --git a/server/src/main/java/org/elasticsearch/upgrades/FeatureMigrationResults.java b/server/src/main/java/org/elasticsearch/upgrades/FeatureMigrationResults.java index d8badd6847c93..1aeab06146834 100644 --- a/server/src/main/java/org/elasticsearch/upgrades/FeatureMigrationResults.java +++ b/server/src/main/java/org/elasticsearch/upgrades/FeatureMigrationResults.java @@ -18,7 +18,7 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -57,8 +57,8 @@ public void writeTo(StreamOutput out) throws IOException { } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return ChunkedToXContentHelper.xContentValuesMap(RESULTS_FIELD.getPreferredName(), featureStatuses); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).xContentObjectFields(RESULTS_FIELD.getPreferredName(), featureStatuses); } /** diff --git a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.json b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.json index 71be3d333ec3f..c8fa98b196c7b 100644 --- a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.json +++ b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.json @@ -2,4 +2,4 @@ "Content moved to reference-docs-links.txt", "This is a temporary placeholder to satisfy sub check_elasticsearch_links in the docs build", "Remove with @UpdateForV10 (if not before)" -] +] \ No newline at end of file diff --git a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt index 190bbd3c319b4..ab9a6b253be7a 100644 --- a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt +++ b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt @@ -42,3 +42,4 @@ MAX_SHARDS_PER_NODE size-your-shards FLOOD_STAGE_WATERMARK fix-watermark-errors.html X_OPAQUE_ID api-conventions.html#x-opaque-id FORMING_SINGLE_NODE_CLUSTERS modules-discovery-bootstrap-cluster.html#modules-discovery-bootstrap-cluster-joining +CIRCUIT_BREAKER_ERRORS circuit-breaker-errors.html \ No newline at end of file diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponseTests.java index e33f97e7cb580..144db693b8ab3 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponseTests.java @@ -68,7 +68,7 @@ public void testChunking() { 0, Collections.emptyList() ), - response -> 11 * response.getIndices().size() + 4 + response -> 11 * response.getIndices().size() + 5 ); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/FieldUsageStatsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/FieldUsageStatsResponseTests.java index 7b707eb4c31ba..a34e36e81dd83 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/FieldUsageStatsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/FieldUsageStatsResponseTests.java @@ -47,7 +47,7 @@ public void testToXContentChunkPerIndex() { AbstractChunkedSerializingTestCase.assertChunkCount( new FieldUsageStatsResponse(indices, indices, 0, List.of(), perIndex), - ignored -> 3 * indices + 2 + ignored -> 3 * indices + 3 ); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponseTests.java index 2e915083476b8..bce4d20a06fbe 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponseTests.java @@ -138,12 +138,12 @@ public void testChunkedEncodingPerIndex() { AbstractChunkedSerializingTestCase.assertChunkCount( indicesStatsResponse, new ToXContent.MapParams(Map.of("level", "cluster")), - ignored1 -> 3 + ignored1 -> 4 ); AbstractChunkedSerializingTestCase.assertChunkCount( indicesStatsResponse, new ToXContent.MapParams(Map.of("level", "indices")), - ignored -> 4 + 2 * shards + ignored -> 5 + 2 * shards ); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java index 9b1d8c15619ad..8f0ff82beab4b 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java @@ -87,8 +87,7 @@ public Settings getAdditionalIndexSettings( xContentRegistry(), indicesService, systemIndices, - indexSettingsProviders, - Map.of() + indexSettingsProviders ); assertThat(resolvedTemplate.settings().getAsInt("test-setting", -1), is(1)); diff --git a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java index 45c4b3bd2d7c7..5240d704dea3b 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java @@ -11,7 +11,10 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -19,6 +22,7 @@ import org.elasticsearch.search.AbstractSearchTestCase; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.test.ESTestCase; @@ -317,14 +321,12 @@ public void testExpandRequestOptions() throws IOException { @Override void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { final QueryBuilder postFilter = QueryBuilders.existsQuery("foo"); - assertTrue(request.requests().stream().allMatch((r) -> "foo".equals(r.preference()))); + assertTrue(request.requests().stream().allMatch((r) -> "foobar".equals(r.preference()))); assertTrue(request.requests().stream().allMatch((r) -> "baz".equals(r.routing()))); assertTrue(request.requests().stream().allMatch((r) -> version == r.source().version())); assertTrue(request.requests().stream().allMatch((r) -> seqNoAndTerm == r.source().seqNoAndPrimaryTerm())); assertTrue(request.requests().stream().allMatch((r) -> postFilter.equals(r.source().postFilter()))); - assertTrue(request.requests().stream().allMatch((r) -> r.source().fetchSource().fetchSource() == false)); - assertTrue(request.requests().stream().allMatch((r) -> r.source().fetchSource().includes().length == 0)); - assertTrue(request.requests().stream().allMatch((r) -> r.source().fetchSource().excludes().length == 0)); + assertTrue(request.requests().stream().allMatch((r) -> r.source().fetchSource() == null)); } }; mockSearchPhaseContext.getRequest() @@ -338,17 +340,72 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL .preference("foobar") .routing("baz"); - SearchHits hits = SearchHits.empty(new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f); - ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { + SearchHit hit = new SearchHit(1, "ID"); + hit.setDocumentField("someField", new DocumentField("someField", Collections.singletonList("foo"))); + SearchHits hits = new SearchHits(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); + try { + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { + @Override + public void run() { + mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); + } + }); + phase.run(); + mockSearchPhaseContext.assertNoFailure(); + } finally { + hits.decRef(); + } + } finally { + var resp = mockSearchPhaseContext.searchResponse.get(); + if (resp != null) { + resp.decRef(); + } + } + } + + public void testExpandSearchRespectsOriginalPIT() { + MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); + final PointInTimeBuilder pit = new PointInTimeBuilder(new BytesArray("foo")); + try { + boolean version = randomBoolean(); + final boolean seqNoAndTerm = randomBoolean(); + + mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { @Override - public void run() { - mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); + void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { + assertTrue(request.requests().stream().allMatch((r) -> r.preference() == null)); + assertTrue(request.requests().stream().allMatch((r) -> r.indices() == Strings.EMPTY_ARRAY)); + assertTrue(request.requests().stream().allMatch((r) -> r.source().pointInTimeBuilder().equals(pit))); } - }); - phase.run(); - mockSearchPhaseContext.assertNoFailure(); - assertNotNull(mockSearchPhaseContext.searchResponse.get()); - mockSearchPhaseContext.execute(() -> {}); + }; + mockSearchPhaseContext.getRequest() + .source( + new SearchSourceBuilder().collapse( + new CollapseBuilder("someField").setInnerHits( + new InnerHitBuilder().setName("foobarbaz").setVersion(version).setSeqNoAndPrimaryTerm(seqNoAndTerm) + ) + ).fetchSource(false).postFilter(QueryBuilders.existsQuery("foo")).pointInTimeBuilder(pit) + ) + .routing("baz"); + + SearchHit hit = new SearchHit(1, "ID"); + hit.setDocumentField("someField", new DocumentField("someField", Collections.singletonList("foo"))); + SearchHits hits = new SearchHits(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); + try { + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { + @Override + public void run() { + mockSearchPhaseContext.sendSearchResponse( + new SearchResponseSections(hits, null, null, false, null, null, 1), + new AtomicArray<>(0) + ); + } + }); + phase.run(); + mockSearchPhaseContext.assertNoFailure(); + } finally { + hits.decRef(); + } } finally { var resp = mockSearchPhaseContext.searchResponse.get(); if (resp != null) { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java index 5fadd8f263f7c..8d4b04746e7a4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java @@ -1074,7 +1074,7 @@ public void testResolveConflictingMappings() throws Exception { .build(); state = service.addIndexTemplateV2(state, true, "my-template", it); - List mappings = MetadataIndexTemplateService.collectMappings(state, "my-template", Map.of(), "my-index"); + List mappings = MetadataIndexTemplateService.collectMappings(state, "my-template", "my-index"); assertNotNull(mappings); assertThat(mappings.size(), equalTo(3)); @@ -1136,7 +1136,7 @@ public void testResolveMappings() throws Exception { .build(); state = service.addIndexTemplateV2(state, true, "my-template", it); - List mappings = MetadataIndexTemplateService.collectMappings(state, "my-template", Map.of(), "my-index"); + List mappings = MetadataIndexTemplateService.collectMappings(state, "my-template", "my-index"); assertNotNull(mappings); assertThat(mappings.size(), equalTo(3)); @@ -1190,7 +1190,6 @@ public void testDefinedTimestampMappingIsAddedForDataStreamTemplates() throws Ex List mappings = MetadataIndexTemplateService.collectMappings( state, "logs-data-stream-template", - Map.of(), DataStream.getDefaultBackingIndexName("logs", 1L) ); @@ -1242,12 +1241,7 @@ public void testDefinedTimestampMappingIsAddedForDataStreamTemplates() throws Ex .build(); state = service.addIndexTemplateV2(state, true, "timeseries-template", it); - List mappings = MetadataIndexTemplateService.collectMappings( - state, - "timeseries-template", - Map.of(), - "timeseries" - ); + List mappings = MetadataIndexTemplateService.collectMappings(state, "timeseries-template", "timeseries"); assertNotNull(mappings); assertThat(mappings.size(), equalTo(2)); @@ -1269,7 +1263,6 @@ public void testDefinedTimestampMappingIsAddedForDataStreamTemplates() throws Ex mappings = MetadataIndexTemplateService.collectMappings( state, "timeseries-template", - Map.of(), DataStream.getDefaultBackingIndexName("timeseries", 1L) ); @@ -1318,7 +1311,6 @@ public void testUserDefinedMappingTakesPrecedenceOverDefault() throws Exception List mappings = MetadataIndexTemplateService.collectMappings( state, "logs-template", - Map.of(), DataStream.getDefaultBackingIndexName("logs", 1L) ); @@ -1375,7 +1367,6 @@ public void testUserDefinedMappingTakesPrecedenceOverDefault() throws Exception List mappings = MetadataIndexTemplateService.collectMappings( state, "timeseries-template", - Map.of(), DataStream.getDefaultBackingIndexName("timeseries-template", 1L) ); @@ -2442,12 +2433,7 @@ public void testComposableTemplateWithSubobjectsFalse() throws Exception { .build(); state = service.addIndexTemplateV2(state, true, "composable-template", it); - List mappings = MetadataIndexTemplateService.collectMappings( - state, - "composable-template", - Map.of(), - "test-index" - ); + List mappings = MetadataIndexTemplateService.collectMappings(state, "composable-template", "test-index"); assertNotNull(mappings); assertThat(mappings.size(), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java index 892b6aa3bf176..00e21603ec8b4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; @@ -28,8 +27,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Predicates; @@ -71,7 +70,6 @@ import java.util.Set; import java.util.SortedMap; import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -2308,30 +2306,32 @@ public static int expectedChunkCount(ToXContent.Params params, Metadata metadata chunkCount += 2; if (custom instanceof ComponentTemplateMetadata componentTemplateMetadata) { - chunkCount += 2 + componentTemplateMetadata.componentTemplates().size(); + chunkCount += checkChunkSize(custom, params, 2 + componentTemplateMetadata.componentTemplates().size()); } else if (custom instanceof ComposableIndexTemplateMetadata composableIndexTemplateMetadata) { - chunkCount += 2 + composableIndexTemplateMetadata.indexTemplates().size(); + chunkCount += checkChunkSize(custom, params, 2 + composableIndexTemplateMetadata.indexTemplates().size()); } else if (custom instanceof DataStreamMetadata dataStreamMetadata) { - chunkCount += 4 + (dataStreamMetadata.dataStreams().size() * 2L) + dataStreamMetadata.getDataStreamAliases().size(); + chunkCount += checkChunkSize( + custom, + params, + 4 + dataStreamMetadata.dataStreams().size() + dataStreamMetadata.getDataStreamAliases().size() + ); } else if (custom instanceof DesiredNodesMetadata) { - chunkCount += 1; + chunkCount += checkChunkSize(custom, params, 1); } else if (custom instanceof FeatureMigrationResults featureMigrationResults) { - chunkCount += 2 + featureMigrationResults.getFeatureStatuses().size(); + chunkCount += checkChunkSize(custom, params, 2 + featureMigrationResults.getFeatureStatuses().size()); } else if (custom instanceof IndexGraveyard indexGraveyard) { - chunkCount += 2 + indexGraveyard.getTombstones().size(); + chunkCount += checkChunkSize(custom, params, 2 + indexGraveyard.getTombstones().size()); } else if (custom instanceof IngestMetadata ingestMetadata) { - chunkCount += 2 + ingestMetadata.getPipelines().size(); + chunkCount += checkChunkSize(custom, params, 2 + ingestMetadata.getPipelines().size()); } else if (custom instanceof NodesShutdownMetadata nodesShutdownMetadata) { - chunkCount += 2 + nodesShutdownMetadata.getAll().size(); + chunkCount += checkChunkSize(custom, params, 2 + nodesShutdownMetadata.getAll().size()); } else if (custom instanceof PersistentTasksCustomMetadata persistentTasksCustomMetadata) { - chunkCount += 3 + persistentTasksCustomMetadata.tasks().size(); + chunkCount += checkChunkSize(custom, params, 3 + persistentTasksCustomMetadata.tasks().size()); } else if (custom instanceof RepositoriesMetadata repositoriesMetadata) { - chunkCount += repositoriesMetadata.repositories().size(); + chunkCount += checkChunkSize(custom, params, repositoriesMetadata.repositories().size()); } else { // could be anything, we have to just try it - chunkCount += Iterables.size( - (Iterable) (() -> Iterators.map(custom.toXContentChunked(params), Function.identity())) - ); + chunkCount += count(custom.toXContentChunked(params)); } } @@ -2343,6 +2343,21 @@ public static int expectedChunkCount(ToXContent.Params params, Metadata metadata return Math.toIntExact(chunkCount); } + private static long count(Iterator it) { + long count = 0; + while (it.hasNext()) { + count++; + it.next(); + } + return count; + } + + private static long checkChunkSize(ChunkedToXContent custom, ToXContent.Params params, long expectedSize) { + long actualSize = count(custom.toXContentChunked(params)); + assertThat(actualSize, equalTo(expectedSize)); + return actualSize; + } + /** * With this test we ensure that we consider whether a new field added to Metadata should be checked * in Metadata.isGlobalStateEquals. We force the instance fields to be either in the checked list diff --git a/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java b/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java index 37d75d923a7a7..ca5b9295adfd7 100644 --- a/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java @@ -30,12 +30,12 @@ import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.IndexLongFieldRange; import org.elasticsearch.index.shard.ShardLongFieldRange; @@ -398,12 +398,8 @@ public void writeTo(StreamOutput out) throws IOException { } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return Iterators.concat( - Iterators.single((builder, params) -> builder.startObject()), - Iterators.single((builder, params) -> builder.field("custom_string_object", strObject)), - Iterators.single((builder, params) -> builder.endObject()) - ); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).object(b -> b.field("custom_string_object", strObject)); } @Override @@ -441,12 +437,8 @@ public void writeTo(StreamOutput out) throws IOException { } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return Iterators.concat( - Iterators.single((builder, params) -> builder.startObject()), - Iterators.single((builder, params) -> builder.field("custom_integer_object", intObject)), - Iterators.single((builder, params) -> builder.endObject()) - ); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).object(b -> b.field("custom_integer_object", intObject)); } @Override diff --git a/server/src/test/java/org/elasticsearch/common/util/BigArraysTests.java b/server/src/test/java/org/elasticsearch/common/util/BigArraysTests.java index fc3096de8b8c0..3ef010f760ab6 100644 --- a/server/src/test/java/org/elasticsearch/common/util/BigArraysTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/BigArraysTests.java @@ -135,6 +135,7 @@ public void testObjectArrayGrowth() { ref[i] = randomFrom(pool); array = bigArrays.grow(array, i + 1); array.set(i, ref[i]); + assertEquals(ref[i], array.getAndSet(i, ref[i])); } for (int i = 0; i < totalLen; ++i) { assertSame(ref[i], array.get(i)); diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java index 72295743608c3..7da5463ea46ff 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java @@ -70,11 +70,10 @@ public void testEncodeDecode() throws IOException { { // decode IndexInput in = d.openInput("test.bin", IOContext.READONCE); - final DocValuesForUtil forUtil = new DocValuesForUtil(); final long[] restored = new long[ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE]; for (int i = 0; i < iterations; ++i) { final int bitsPerValue = in.readByte(); - forUtil.decode(bitsPerValue, in, restored); + DocValuesForUtil.decode(bitsPerValue, in, restored); assertArrayEquals( Arrays.toString(restored), ArrayUtil.copyOfSubArray( diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 883723de31d46..c8ca3d17de797 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -55,7 +55,7 @@ import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; -import org.apache.lucene.search.TotalHitCountCollector; +import org.apache.lucene.search.TotalHitCountCollectorManager; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.Lock; @@ -640,9 +640,8 @@ public void testTranslogMultipleOperationsSameDocument() throws IOException { recoverFromTranslog(recoveringEngine, translogHandler, Long.MAX_VALUE); recoveringEngine.refresh("test"); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { - final TotalHitCountCollector collector = new TotalHitCountCollector(); - searcher.search(new MatchAllDocsQuery(), collector); - assertThat(collector.getTotalHits(), equalTo(operations.get(operations.size() - 1) instanceof Engine.Delete ? 0 : 1)); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + assertThat(totalHits, equalTo(operations.get(operations.size() - 1) instanceof Engine.Delete ? 0 : 1)); } } } @@ -2009,16 +2008,20 @@ public void testConcurrentOutOfOrderDocsOnReplica() throws IOException, Interrup if (lastFieldValueDoc1 != null) { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - final TotalHitCountCollector collector = new TotalHitCountCollector(); - searcher.search(new TermQuery(new Term("value", lastFieldValueDoc1)), collector); - assertThat(collector.getTotalHits(), equalTo(1)); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValueDoc1)), + new TotalHitCountCollectorManager() + ); + assertThat(totalHits, equalTo(1)); } } if (lastFieldValueDoc2 != null) { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - final TotalHitCountCollector collector = new TotalHitCountCollector(); - searcher.search(new TermQuery(new Term("value", lastFieldValueDoc2)), collector); - assertThat(collector.getTotalHits(), equalTo(1)); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValueDoc2)), + new TotalHitCountCollectorManager() + ); + assertThat(totalHits, equalTo(1)); } } @@ -2244,9 +2247,11 @@ private int assertOpsOnPrimary(List ops, long currentOpVersion // first op and it failed. if (docDeleted == false && lastFieldValue != null) { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - final TotalHitCountCollector collector = new TotalHitCountCollector(); - searcher.search(new TermQuery(new Term("value", lastFieldValue)), collector); - assertThat(collector.getTotalHits(), equalTo(1)); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValue)), + new TotalHitCountCollectorManager() + ); + assertThat(totalHits, equalTo(1)); } } } @@ -2270,9 +2275,8 @@ private int assertOpsOnPrimary(List ops, long currentOpVersion assertVisibleCount(engine, docDeleted ? 0 : 1); if (docDeleted == false) { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - final TotalHitCountCollector collector = new TotalHitCountCollector(); - searcher.search(new TermQuery(new Term("value", lastFieldValue)), collector); - assertThat(collector.getTotalHits(), equalTo(1)); + Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + assertThat(totalHits, equalTo(1)); } } return opsPerformed; @@ -2357,9 +2361,8 @@ public void testNonInternalVersioningOnPrimary() throws IOException { if (docDeleted == false) { logger.info("searching for [{}]", lastFieldValue); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - final TotalHitCountCollector collector = new TotalHitCountCollector(); - searcher.search(new TermQuery(new Term("value", lastFieldValue)), collector); - assertThat(collector.getTotalHits(), equalTo(1)); + Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + assertThat(totalHits, equalTo(1)); } } } @@ -2375,9 +2378,8 @@ public void testVersioningPromotedReplica() throws IOException { final int opsOnPrimary = assertOpsOnPrimary(primaryOps, finalReplicaVersion, deletedOnReplica, replicaEngine); final long currentSeqNo = getSequenceID(replicaEngine, new Engine.Get(false, false, Term.toString(lastReplicaOp.uid()))).v1(); try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { - final TotalHitCountCollector collector = new TotalHitCountCollector(); - searcher.search(new MatchAllDocsQuery(), collector); - if (collector.getTotalHits() > 0) { + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + if (totalHits > 0) { // last op wasn't delete assertThat(currentSeqNo, equalTo(finalReplicaSeqNo + opsOnPrimary)); } @@ -2400,9 +2402,8 @@ public void testConcurrentExternalVersioningOnPrimary() throws IOException, Inte assertVisibleCount(engine, lastFieldValue == null ? 0 : 1); if (lastFieldValue != null) { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - final TotalHitCountCollector collector = new TotalHitCountCollector(); - searcher.search(new TermQuery(new Term("value", lastFieldValue)), collector); - assertThat(collector.getTotalHits(), equalTo(1)); + Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + assertThat(totalHits, equalTo(1)); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java index d48c5550631cd..e385177b87147 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java @@ -179,7 +179,7 @@ private static ObjectMapper createObjectMapper(String name) { name, Explicit.IMPLICIT_TRUE, Optional.empty(), - Explicit.IMPLICIT_FALSE, + Optional.empty(), ObjectMapper.Dynamic.FALSE, emptyMap() ); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java index 18c4f393bc696..ae793bc3b329e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java @@ -20,6 +20,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import static java.util.Collections.emptyList; @@ -433,6 +434,7 @@ private PassThroughObjectMapper createPassThroughMapper(String name, Map { b.startArray("path"); + b.startObject().field("int_value", 20).endObject(); b.startObject().field("int_value", 10).endObject(); + b.endArray(); + b.field("bool_value", true); + }); + assertEquals(""" + {"bool_value":true,"path":[{"int_value":20},{"int_value":10}]}""", syntheticSource); + } + + public void testIndexStoredArraySourceRootObjectArrayWithBypass() throws IOException { + DocumentMapper documentMapper = createMapperServiceWithStoredArraySource(syntheticSourceMapping(b -> { + b.startObject("path"); + { + b.field("type", "object"); + b.field("synthetic_source_keep", "none"); + b.startObject("properties"); + { + b.startObject("int_value").field("type", "integer").endObject(); + } + b.endObject(); + } + b.endObject(); + b.startObject("bool_value").field("type", "boolean").endObject(); + })).documentMapper(); + var syntheticSource = syntheticSource(documentMapper, b -> { + b.startArray("path"); b.startObject().field("int_value", 20).endObject(); + b.startObject().field("int_value", 10).endObject(); b.endArray(); b.field("bool_value", true); }); assertEquals(""" - {"bool_value":true,"path":[{"int_value":10},{"int_value":20}]}""", syntheticSource); + {"bool_value":true,"path":{"int_value":[10,20]}}""", syntheticSource); } public void testIndexStoredArraySourceNestedValueArray() throws IOException { @@ -622,6 +648,12 @@ public void testIndexStoredArraySourceNestedValueArrayDisabled() throws IOExcept { b.startObject("int_value").field("type", "integer").field(Mapper.SYNTHETIC_SOURCE_KEEP_PARAM, "none").endObject(); b.startObject("bool_value").field("type", "boolean").endObject(); + b.startObject("obj").field("type", "object").field(Mapper.SYNTHETIC_SOURCE_KEEP_PARAM, "none"); + b.startObject("properties"); + { + b.startObject("foo").field("type", "integer").endObject(); + } + b.endObject().endObject(); } b.endObject(); } @@ -632,11 +664,17 @@ public void testIndexStoredArraySourceNestedValueArrayDisabled() throws IOExcept { b.array("int_value", new int[] { 30, 20, 10 }); b.field("bool_value", true); + b.startArray("obj"); + { + b.startObject().field("foo", 2).endObject(); + b.startObject().field("foo", 1).endObject(); + } + b.endArray(); } b.endObject(); }); assertEquals(""" - {"path":{"bool_value":true,"int_value":[10,20,30]}}""", syntheticSource); + {"path":{"bool_value":true,"int_value":[10,20,30],"obj":{"foo":[1,2]}}}""", syntheticSource); } public void testFieldStoredArraySourceNestedValueArray() throws IOException { @@ -674,8 +712,8 @@ public void testFieldStoredSourceNestedValue() throws IOException { b.field("type", "object"); b.startObject("properties"); { - b.startObject("default").field("type", "float").field(Mapper.SYNTHETIC_SOURCE_KEEP_PARAM, "none").endObject(); - b.startObject("source_kept").field("type", "float").field(Mapper.SYNTHETIC_SOURCE_KEEP_PARAM, "all").endObject(); + b.startObject("default").field("type", "float").field("synthetic_source_keep", "none").endObject(); + b.startObject("source_kept").field("type", "float").field("synthetic_source_keep", "all").endObject(); b.startObject("bool_value").field("type", "boolean").endObject(); } b.endObject(); @@ -738,7 +776,7 @@ public void testRootArray() throws IOException { b.startObject("path"); { b.field("type", "object"); - b.field("store_array_source", true); + b.field("synthetic_source_keep", "arrays"); b.startObject("properties"); { b.startObject("int_value").field("type", "integer").endObject(); @@ -765,7 +803,7 @@ public void testNestedArray() throws IOException { b.field("type", "object"); b.startObject("properties"); { - b.startObject("to").field("type", "object").field("store_array_source", true); + b.startObject("to").field("type", "object").field("synthetic_source_keep", "arrays"); { b.startObject("properties"); { @@ -835,10 +873,10 @@ public void testArrayWithinArray() throws IOException { DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { b.startObject("path"); { - b.field("type", "object").field("store_array_source", true); + b.field("type", "object").field("synthetic_source_keep", "arrays"); b.startObject("properties"); { - b.startObject("to").field("type", "object").field("store_array_source", true); + b.startObject("to").field("type", "object").field("synthetic_source_keep", "arrays"); { b.startObject("properties"); { @@ -893,7 +931,7 @@ public void testObjectArrayAndValue() throws IOException { { b.startObject("stored"); { - b.field("type", "object").field("store_array_source", true); + b.field("type", "object").field("synthetic_source_keep", "arrays"); b.startObject("properties").startObject("leaf").field("type", "integer").endObject().endObject(); } b.endObject(); @@ -926,6 +964,79 @@ public void testObjectArrayAndValue() throws IOException { {"path":{"stored":[{"leaf":10},{"leaf":20}]}}""", syntheticSource); } + public void testDeeplyNestedObjectArrayAndValue() throws IOException { + DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { + b.startObject("path").startObject("properties").startObject("to").startObject("properties"); + { + b.startObject("stored"); + { + b.field("type", "object").field("store_array_source", true); + b.startObject("properties").startObject("leaf").field("type", "integer").endObject().endObject(); + } + b.endObject(); + } + b.endObject().endObject().endObject().endObject(); + })).documentMapper(); + var syntheticSource = syntheticSource(documentMapper, b -> { + b.startArray("path"); + { + b.startObject(); + { + b.startObject("to").startArray("stored"); + { + b.startObject().field("leaf", 10).endObject(); + } + b.endArray().endObject(); + } + b.endObject(); + b.startObject(); + { + b.startObject("to").startObject("stored").field("leaf", 20).endObject().endObject(); + } + b.endObject(); + } + b.endArray(); + }); + assertEquals(""" + {"path":{"to":{"stored":[{"leaf":10},{"leaf":20}]}}}""", syntheticSource); + } + + public void testObjectArrayAndValueInNestedObject() throws IOException { + DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { + b.startObject("path").startObject("properties").startObject("to").startObject("properties"); + { + b.startObject("stored"); + { + b.field("type", "nested").field("dynamic", false); + } + b.endObject(); + } + b.endObject().endObject().endObject().endObject(); + })).documentMapper(); + var syntheticSource = syntheticSource(documentMapper, b -> { + b.startArray("path"); + { + b.startObject(); + { + b.startObject("to").startArray("stored"); + { + b.startObject().field("leaf", 10).endObject(); + } + b.endArray().endObject(); + } + b.endObject(); + b.startObject(); + { + b.startObject("to").startObject("stored").field("leaf", 20).endObject().endObject(); + } + b.endObject(); + } + b.endArray(); + }); + assertEquals(""" + {"path":{"to":{"stored":[{"leaf":10},{"leaf":20}]}}}""", syntheticSource); + } + public void testObjectArrayAndValueDisabledObject() throws IOException { DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { b.startObject("path").field("type", "object").startObject("properties"); @@ -1061,7 +1172,7 @@ public void testStoredArrayWithinHigherLevelArray() throws IOException { b.field("type", "object"); b.startObject("properties"); { - b.startObject("to").field("type", "object").field("store_array_source", true); + b.startObject("to").field("type", "object").field("synthetic_source_keep", "arrays"); { b.startObject("properties"); { @@ -1107,6 +1218,42 @@ public void testStoredArrayWithinHigherLevelArray() throws IOException { {"path":{"to":[{"name":"A"},{"name":"B"},{"name":"C"},{"name":"D"}]}}""", booleanValue), syntheticSource); } + public void testObjectWithKeepAll() throws IOException { + DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { + b.startObject("path"); + { + b.field("type", "object").field("synthetic_source_keep", "all"); + b.startObject("properties"); + { + b.startObject("a").field("type", "object").endObject(); + b.startObject("b").field("type", "integer").endObject(); + } + b.endObject(); + } + b.endObject(); + b.startObject("id").field("type", "integer").endObject(); + })).documentMapper(); + var syntheticSource = syntheticSource(documentMapper, b -> { + b.startObject("path"); + { + b.startArray("a"); + { + b.startObject().field("foo", 30).endObject(); + b.startObject().field("foo", 20).endObject(); + b.startObject().field("foo", 10).endObject(); + b.startObject().field("bar", 20).endObject(); + b.startObject().field("bar", 10).endObject(); + } + b.endArray(); + b.array("b", 4, 1, 3, 2); + } + b.endObject(); + b.field("id", 10); + }); + assertEquals(""" + {"id":10,"path":{"a":[{"foo":30},{"foo":20},{"foo":10},{"bar":20},{"bar":10}],"b":[4,1,3,2]}}""", syntheticSource); + } + public void testFallbackFieldWithinHigherLevelArray() throws IOException { DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { b.startObject("path"); @@ -1140,7 +1287,7 @@ public void testFallbackFieldWithinHigherLevelArray() throws IOException { public void testFieldOrdering() throws IOException { DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { b.startObject("A").field("type", "integer").endObject(); - b.startObject("B").field("type", "object").field("store_array_source", true); + b.startObject("B").field("type", "object").field("synthetic_source_keep", "arrays"); { b.startObject("properties"); { @@ -1151,7 +1298,7 @@ public void testFieldOrdering() throws IOException { } b.endObject(); b.startObject("C").field("type", "integer").endObject(); - b.startObject("D").field("type", "object").field("store_array_source", true); + b.startObject("D").field("type", "object").field("synthetic_source_keep", "arrays"); { b.startObject("properties"); { @@ -1189,7 +1336,7 @@ public void testNestedObjectWithField() throws IOException { DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { b.startObject("path").field("type", "nested"); { - b.field("store_array_source", true); + b.field("synthetic_source_keep", "all"); b.startObject("properties"); { b.startObject("foo").field("type", "keyword").endObject(); @@ -1211,7 +1358,7 @@ public void testNestedObjectWithArray() throws IOException { DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { b.startObject("path").field("type", "nested"); { - b.field("store_array_source", true); + b.field("synthetic_source_keep", "all"); b.startObject("properties"); { b.startObject("foo").field("type", "keyword").endObject(); @@ -1244,7 +1391,7 @@ public void testNestedSubobjectWithField() throws IOException { b.startObject("int_value").field("type", "integer").endObject(); b.startObject("to").field("type", "nested"); { - b.field("store_array_source", true); + b.field("synthetic_source_keep", "all"); b.startObject("properties"); { b.startObject("foo").field("type", "keyword").endObject(); @@ -1285,7 +1432,7 @@ public void testNestedSubobjectWithArray() throws IOException { b.startObject("int_value").field("type", "integer").endObject(); b.startObject("to").field("type", "nested"); { - b.field("store_array_source", true); + b.field("synthetic_source_keep", "all"); b.startObject("properties"); { b.startObject("foo").field("type", "keyword").endObject(); @@ -1325,7 +1472,7 @@ public void testNestedSubobjectWithArray() throws IOException { public void testNestedObjectIncludeInRoot() throws IOException { DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { - b.startObject("path").field("type", "nested").field("store_array_source", true).field("include_in_root", true); + b.startObject("path").field("type", "nested").field("synthetic_source_keep", "all").field("include_in_root", true); { b.startObject("properties"); { @@ -1599,7 +1746,7 @@ public void testStoredNestedSubObjectWithNameOverlappingParentName() throws IOEx b.startObject("path"); b.startObject("properties"); { - b.startObject("at").field("type", "nested").field("store_array_source", "true").endObject(); + b.startObject("at").field("type", "nested").field("synthetic_source_keep", "all").endObject(); } b.endObject(); b.endObject(); @@ -1646,6 +1793,107 @@ public void testCopyToLogicInsideObject() throws IOException { assertEquals("{\"path\":{\"at\":\"A\"}}", syntheticSource); } + public void testDynamicIgnoredObjectWithFlatFields() throws IOException { + DocumentMapper documentMapper = createMapperService(topMapping(b -> { + b.startObject("_source").field("mode", "synthetic").endObject(); + b.field("dynamic", false); + })).documentMapper(); + + CheckedConsumer document = b -> { + b.startObject("top"); + b.field("file.name", "A"); + b.field("file.line", 10); + b.endObject(); + }; + + var syntheticSource = syntheticSource(documentMapper, document); + assertEquals("{\"top\":{\"file.name\":\"A\",\"file.line\":10}}", syntheticSource); + + CheckedConsumer documentWithArray = b -> { + b.startArray("top"); + b.startObject(); + b.field("file.name", "A"); + b.field("file.line", 10); + b.endObject(); + b.startObject(); + b.field("file.name", "B"); + b.field("file.line", 20); + b.endObject(); + b.endArray(); + }; + + var syntheticSourceWithArray = syntheticSource(documentMapper, documentWithArray); + assertEquals(""" + {"top":[{"file.name":"A","file.line":10},{"file.name":"B","file.line":20}]}""", syntheticSourceWithArray); + } + + public void testDisabledRootObjectWithFlatFields() throws IOException { + DocumentMapper documentMapper = createMapperService(topMapping(b -> { + b.startObject("_source").field("mode", "synthetic").endObject(); + b.field("enabled", false); + })).documentMapper(); + + CheckedConsumer document = b -> { + b.startObject("top"); + b.field("file.name", "A"); + b.field("file.line", 10); + b.endObject(); + }; + + var syntheticSource = syntheticSource(documentMapper, document); + assertEquals("{\"top\":{\"file.name\":\"A\",\"file.line\":10}}", syntheticSource); + + CheckedConsumer documentWithArray = b -> { + b.startArray("top"); + b.startObject(); + b.field("file.name", "A"); + b.field("file.line", 10); + b.endObject(); + b.startObject(); + b.field("file.name", "B"); + b.field("file.line", 20); + b.endObject(); + b.endArray(); + }; + + var syntheticSourceWithArray = syntheticSource(documentMapper, documentWithArray); + assertEquals(""" + {"top":[{"file.name":"A","file.line":10},{"file.name":"B","file.line":20}]}""", syntheticSourceWithArray); + } + + public void testDisabledObjectWithFlatFields() throws IOException { + DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { + b.startObject("top").field("type", "object").field("enabled", false).endObject(); + })).documentMapper(); + + CheckedConsumer document = b -> { + b.startObject("top"); + b.field("file.name", "A"); + b.field("file.line", 10); + b.endObject(); + }; + + var syntheticSource = syntheticSource(documentMapper, document); + assertEquals("{\"top\":{\"file.name\":\"A\",\"file.line\":10}}", syntheticSource); + + CheckedConsumer documentWithArray = b -> { + b.startArray("top"); + b.startObject(); + b.field("file.name", "A"); + b.field("file.line", 10); + b.endObject(); + b.startObject(); + b.field("file.name", "B"); + b.field("file.line", 20); + b.endObject(); + b.endArray(); + }; + + var syntheticSourceWithArray = syntheticSource(documentMapper, documentWithArray); + assertEquals(""" + {"top":[{"file.name":"A","file.line":10},{"file.name":"B","file.line":20}]}""", syntheticSourceWithArray); + } + protected void validateRoundTripReader(String syntheticSource, DirectoryReader reader, DirectoryReader roundTripReader) throws IOException { // We exclude ignored source field since in some cases it contains an exact copy of a part of document source. diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java index 1381df07789b5..fd44e68df19a8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java @@ -84,7 +84,7 @@ public void testSubfieldOverride() { "object", Explicit.EXPLICIT_TRUE, Optional.empty(), - Explicit.IMPLICIT_FALSE, + Optional.empty(), ObjectMapper.Dynamic.TRUE, Collections.singletonMap("object.subfield", fieldMapper) ); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java index 0a954115e77f6..be1469e25f24d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java @@ -1571,14 +1571,14 @@ public void testNestedMapperFilters() throws Exception { public void testStoreArraySourceinSyntheticSourceMode() throws IOException { DocumentMapper mapper = createDocumentMapper(syntheticSourceMapping(b -> { - b.startObject("o").field("type", "nested").field(ObjectMapper.STORE_ARRAY_SOURCE_PARAM, true).endObject(); + b.startObject("o").field("type", "nested").field("synthetic_source_keep", "all").endObject(); })); assertNotNull(mapper.mapping().getRoot().getMapper("o")); } public void testStoreArraySourceNoopInNonSyntheticSourceMode() throws IOException { DocumentMapper mapper = createDocumentMapper(mapping(b -> { - b.startObject("o").field("type", "nested").field(ObjectMapper.STORE_ARRAY_SOURCE_PARAM, true).endObject(); + b.startObject("o").field("type", "nested").field("synthetic_source_keep", "all").endObject(); })); assertNotNull(mapper.mapping().getRoot().getMapper("o")); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 3312c94e8a0e1..64eee39532c31 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -167,7 +167,7 @@ public void testMergeEnabledForIndexTemplates() throws IOException { assertNotNull(objectMapper); assertFalse(objectMapper.isEnabled()); assertEquals(ObjectMapper.Subobjects.ENABLED, objectMapper.subobjects()); - assertFalse(objectMapper.storeArraySource()); + assertTrue(objectMapper.sourceKeepMode().isEmpty()); // Setting 'enabled' to true is allowed, and updates the mapping. update = Strings.toString( @@ -189,7 +189,7 @@ public void testMergeEnabledForIndexTemplates() throws IOException { assertNotNull(objectMapper); assertTrue(objectMapper.isEnabled()); assertEquals(ObjectMapper.Subobjects.AUTO, objectMapper.subobjects()); - assertTrue(objectMapper.storeArraySource()); + assertEquals(Mapper.SourceKeepMode.ARRAYS, objectMapper.sourceKeepMode().orElse(Mapper.SourceKeepMode.NONE)); } public void testFieldReplacementForIndexTemplates() throws IOException { @@ -678,14 +678,14 @@ public void testSyntheticSourceDocValuesFieldWithout() throws IOException { public void testStoreArraySourceinSyntheticSourceMode() throws IOException { DocumentMapper mapper = createDocumentMapper(syntheticSourceMapping(b -> { - b.startObject("o").field("type", "object").field(ObjectMapper.STORE_ARRAY_SOURCE_PARAM, true).endObject(); + b.startObject("o").field("type", "object").field("synthetic_source_keep", "arrays").endObject(); })); assertNotNull(mapper.mapping().getRoot().getMapper("o")); } public void testStoreArraySourceNoopInNonSyntheticSourceMode() throws IOException { DocumentMapper mapper = createDocumentMapper(mapping(b -> { - b.startObject("o").field("type", "object").field(ObjectMapper.STORE_ARRAY_SOURCE_PARAM, true).endObject(); + b.startObject("o").field("type", "object").field("synthetic_source_keep", "arrays").endObject(); })); assertNotNull(mapper.mapping().getRoot().getMapper("o")); } @@ -727,7 +727,7 @@ private ObjectMapper createObjectMapperWithAllParametersSet(CheckedConsumer createMapperService(mapping)); + assertThat(e.getMessage(), containsString("root object can't be configured with [synthetic_source_keep:all]")); + } + public void testWithoutMappers() throws IOException { RootObjectMapper shallowRoot = createRootObjectMapperWithAllParametersSet(b -> {}, b -> {}); RootObjectMapper root = createRootObjectMapperWithAllParametersSet(b -> { diff --git a/server/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java index bc2ba833536ff..3890b53e29ffb 100644 --- a/server/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java @@ -9,13 +9,12 @@ package org.elasticsearch.index.query; -import org.apache.lucene.index.Term; import org.apache.lucene.queries.spans.FieldMaskingSpanQuery; -import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; import org.elasticsearch.core.Strings; +import org.elasticsearch.lucene.queries.SpanMatchNoDocsQuery; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -105,7 +104,7 @@ public void testJsonWithTopLevelBoost() throws IOException { } }""", NAME.getPreferredName()); Query q = parseQuery(json).toQuery(createSearchExecutionContext()); - assertEquals(new BoostQuery(new FieldMaskingSpanQuery(new SpanTermQuery(new Term("value", "foo")), "mapped_geo_shape"), 42.0f), q); + assertEquals(new BoostQuery(new FieldMaskingSpanQuery(new SpanMatchNoDocsQuery("value", null), "mapped_geo_shape"), 42.0f), q); } public void testJsonWithDeprecatedName() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanGapQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanGapQueryBuilderTests.java index cef43a635541e..5adca6d562dca 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanGapQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanGapQueryBuilderTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.Query; +import org.elasticsearch.lucene.queries.SpanMatchNoDocsQuery; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -50,7 +51,9 @@ protected SpanNearQueryBuilder doCreateTestQueryBuilder() { protected void doAssertLuceneQuery(SpanNearQueryBuilder queryBuilder, Query query, SearchExecutionContext context) throws IOException { assertThat( query, - either(instanceOf(SpanNearQuery.class)).or(instanceOf(SpanTermQuery.class)).or(instanceOf(MatchAllQueryBuilder.class)) + either(instanceOf(SpanNearQuery.class)).or(instanceOf(SpanTermQuery.class)) + .or(instanceOf(MatchAllQueryBuilder.class)) + .or(instanceOf(SpanMatchNoDocsQuery.class)) ); if (query instanceof SpanNearQuery spanNearQuery) { assertThat(spanNearQuery.getSlop(), equalTo(queryBuilder.slop())); diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java index f0f23d8539e13..c4a9267ff68a0 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java @@ -15,9 +15,9 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.lucene.queries.SpanMatchNoDocsQuery; import org.elasticsearch.xcontent.json.JsonStringEncoder; import java.io.IOException; @@ -49,18 +49,16 @@ protected SpanTermQueryBuilder createQueryBuilder(String fieldName, Object value @Override protected void doAssertLuceneQuery(SpanTermQueryBuilder queryBuilder, Query query, SearchExecutionContext context) throws IOException { - assertThat(query, instanceOf(SpanTermQuery.class)); - SpanTermQuery spanTermQuery = (SpanTermQuery) query; - - String expectedFieldName = expectedFieldName(queryBuilder.fieldName); - assertThat(spanTermQuery.getTerm().field(), equalTo(expectedFieldName)); - MappedFieldType mapper = context.getFieldType(queryBuilder.fieldName()); if (mapper != null) { + String expectedFieldName = expectedFieldName(queryBuilder.fieldName); + assertThat(query, instanceOf(SpanTermQuery.class)); + SpanTermQuery spanTermQuery = (SpanTermQuery) query; + assertThat(spanTermQuery.getTerm().field(), equalTo(expectedFieldName)); Term term = ((TermQuery) mapper.termQuery(queryBuilder.value(), null)).getTerm(); assertThat(spanTermQuery.getTerm(), equalTo(term)); } else { - assertThat(spanTermQuery.getTerm().bytes(), equalTo(BytesRefs.toBytesRef(queryBuilder.value()))); + assertThat(query, instanceOf(SpanMatchNoDocsQuery.class)); } } @@ -117,23 +115,13 @@ public void testParseFailsWithMultipleFields() throws IOException { assertEquals("[span_term] query doesn't support multiple fields, found [message1] and [message2]", e.getMessage()); } - public void testWithMetadataField() throws IOException { - SearchExecutionContext context = createSearchExecutionContext(); - for (String field : new String[] { "field1", "field2" }) { - SpanTermQueryBuilder spanTermQueryBuilder = new SpanTermQueryBuilder(field, "toto"); - Query query = spanTermQueryBuilder.toQuery(context); - Query expected = new SpanTermQuery(new Term(field, "toto")); - assertEquals(expected, query); - } - } - public void testWithBoost() throws IOException { SearchExecutionContext context = createSearchExecutionContext(); - for (String field : new String[] { "field1", "field2" }) { + for (String field : new String[] { TEXT_FIELD_NAME, TEXT_ALIAS_FIELD_NAME }) { SpanTermQueryBuilder spanTermQueryBuilder = new SpanTermQueryBuilder(field, "toto"); spanTermQueryBuilder.boost(10); Query query = spanTermQueryBuilder.toQuery(context); - Query expected = new BoostQuery(new SpanTermQuery(new Term(field, "toto")), 10); + Query expected = new BoostQuery(new SpanTermQuery(new Term(TEXT_FIELD_NAME, "toto")), 10); assertEquals(expected, query); } } diff --git a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java index 156460d320ee2..610e87b50d365 100644 --- a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.indices.breaker; +import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.breaker.ChildMemoryCircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; @@ -911,9 +912,11 @@ public double getOverhead() { ), oneOf( "[parent] Data too large, data for [test] would be [3/3b], which is larger than the limit of [6/6b], " - + "usages [child=7/7b, otherChild=8/8b]", + + "usages [child=7/7b, otherChild=8/8b]; for more information, see " + + ReferenceDocs.CIRCUIT_BREAKER_ERRORS, "[parent] Data too large, data for [test] would be [3/3b], which is larger than the limit of [6/6b], " - + "usages [otherChild=8/8b, child=7/7b]" + + "usages [otherChild=8/8b, child=7/7b]; for more information, see " + + ReferenceDocs.CIRCUIT_BREAKER_ERRORS ) ); @@ -928,7 +931,8 @@ public double getOverhead() { ), equalTo( "[parent] Data too large, data for [test] would be [3/3b], which is larger than the limit of [6/6b], " - + "real usage: [2/2b], new bytes reserved: [1/1b], usages []" + + "real usage: [2/2b], new bytes reserved: [1/1b], usages []; for more information, see " + + ReferenceDocs.CIRCUIT_BREAKER_ERRORS ) ); @@ -945,7 +949,8 @@ public double getOverhead() { ), equalTo( "[parent] Data too large, data for [test] would be [-3], which is larger than the limit of [-6], " - + "real usage: [-2], new bytes reserved: [-1/-1b], usages [child1=-7]" + + "real usage: [-2], new bytes reserved: [-1/-1b], usages [child1=-7]; for more information, see " + + ReferenceDocs.CIRCUIT_BREAKER_ERRORS ) ); } finally { diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 49e75a71aa7f7..3adaf398624de 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -32,20 +32,16 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.AliasMetadata; -import org.elasticsearch.cluster.metadata.ComponentTemplate; -import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterStateTaskExecutorUtils; import org.elasticsearch.common.TriConsumer; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.util.Maps; @@ -77,7 +73,6 @@ import org.mockito.ArgumentMatcher; import org.mockito.invocation.InvocationOnMock; -import java.io.IOException; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.util.ArrayList; @@ -2493,7 +2488,7 @@ public void testResolveFinalPipelineWithDateMathExpression() { // index name matches with IDM: IndexRequest indexRequest = new IndexRequest(""); - IngestService.resolvePipelinesAndUpdateIndexRequest(indexRequest, indexRequest, metadata, epochMillis, Map.of()); + IngestService.resolvePipelinesAndUpdateIndexRequest(indexRequest, indexRequest, metadata, epochMillis); assertTrue(hasPipeline(indexRequest)); assertTrue(indexRequest.isPipelineResolved()); assertThat(indexRequest.getPipeline(), equalTo("_none")); @@ -2858,83 +2853,6 @@ public void testResolvePipelinesWithNonePipeline() { } } - public void testResolvePipelinesAndUpdateIndexRequestWithComponentTemplateSubstitutions() throws IOException { - final String componentTemplateName = "test-component-template"; - final String indexName = "my-index-1"; - final String indexPipeline = "index-pipeline"; - final String realTemplatePipeline = "template-pipeline"; - final String substitutePipeline = "substitute-pipeline"; - - Metadata metadata; - { - // Build up cluster state metadata - IndexMetadata.Builder builder = IndexMetadata.builder(indexName) - .settings(settings(IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0); - ComponentTemplate realComponentTemplate = new ComponentTemplate( - new Template( - Settings.builder().put("index.default_pipeline", realTemplatePipeline).build(), - CompressedXContent.fromJSON("{}"), - null - ), - null, - null - ); - ComposableIndexTemplate composableIndexTemplate = ComposableIndexTemplate.builder() - .indexPatterns(List.of("my-index-*")) - .componentTemplates(List.of(componentTemplateName)) - .build(); - metadata = Metadata.builder() - .put(builder) - .indexTemplates(Map.of("my-index-template", composableIndexTemplate)) - .componentTemplates(Map.of("test-component-template", realComponentTemplate)) - .build(); - } - - Map componentTemplateSubstitutions; - { - ComponentTemplate simulatedComponentTemplate = new ComponentTemplate( - new Template( - Settings.builder().put("index.default_pipeline", substitutePipeline).build(), - CompressedXContent.fromJSON("{}"), - null - ), - null, - null - ); - componentTemplateSubstitutions = Map.of(componentTemplateName, simulatedComponentTemplate); - } - - { - /* - * Here there is a pipeline in the request. This takes precedence over anything in the index or templates or component template - * substitutions. - */ - IndexRequest indexRequest = new IndexRequest(indexName).setPipeline(indexPipeline); - IngestService.resolvePipelinesAndUpdateIndexRequest(indexRequest, indexRequest, metadata, 0, componentTemplateSubstitutions); - assertThat(indexRequest.getPipeline(), equalTo(indexPipeline)); - } - { - /* - * Here there is no pipeline in the request, but there is one in the substitute component template. So it takes precedence. - */ - IndexRequest indexRequest = new IndexRequest(indexName); - IngestService.resolvePipelinesAndUpdateIndexRequest(indexRequest, indexRequest, metadata, 0, componentTemplateSubstitutions); - assertThat(indexRequest.getPipeline(), equalTo(substitutePipeline)); - } - { - /* - * This one is tricky. Since the index exists and there are no component template substitutions, we're going to use the actual - * index in this case rather than its template. The index does not have a default pipeline set, so it's "_none" instead of - * realTemplatePipeline. - */ - IndexRequest indexRequest = new IndexRequest(indexName); - IngestService.resolvePipelinesAndUpdateIndexRequest(indexRequest, indexRequest, metadata, 0, Map.of()); - assertThat(indexRequest.getPipeline(), equalTo("_none")); - } - } - private static Tuple randomMapEntry() { return tuple(randomAlphaOfLength(5), randomObject()); } diff --git a/server/src/test/java/org/elasticsearch/rest/DeprecationRestHandlerTests.java b/server/src/test/java/org/elasticsearch/rest/DeprecationRestHandlerTests.java index 4e0fe14fb1def..b534a6be0dc5f 100644 --- a/server/src/test/java/org/elasticsearch/rest/DeprecationRestHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/DeprecationRestHandlerTests.java @@ -73,7 +73,7 @@ public void testHandleRequestLogsThenForwards() throws Exception { RestChannel channel = mock(RestChannel.class); NodeClient client = mock(NodeClient.class); - final Level deprecationLevel = randomBoolean() ? null : randomFrom(Level.WARN, DeprecationLogger.CRITICAL); + final Level deprecationLevel = randomFrom(Level.WARN, DeprecationLogger.CRITICAL); DeprecationRestHandler deprecatedHandler = new DeprecationRestHandler( handler, @@ -159,17 +159,55 @@ public void testInvalidHeaderValueEmpty() { public void testSupportsBulkContentTrue() { when(handler.supportsBulkContent()).thenReturn(true); assertTrue( - new DeprecationRestHandler(handler, METHOD, PATH, null, deprecationMessage, deprecationLogger, false).supportsBulkContent() + new DeprecationRestHandler(handler, METHOD, PATH, Level.WARN, deprecationMessage, deprecationLogger, false) + .supportsBulkContent() ); } public void testSupportsBulkContentFalse() { when(handler.supportsBulkContent()).thenReturn(false); assertFalse( - new DeprecationRestHandler(handler, METHOD, PATH, null, deprecationMessage, deprecationLogger, false).supportsBulkContent() + new DeprecationRestHandler(handler, METHOD, PATH, Level.WARN, deprecationMessage, deprecationLogger, false) + .supportsBulkContent() ); } + public void testDeprecationLevel() { + DeprecationRestHandler handler = new DeprecationRestHandler( + this.handler, + METHOD, + PATH, + Level.WARN, + deprecationMessage, + deprecationLogger, + false + ); + assertEquals(Level.WARN, handler.getDeprecationLevel()); + + handler = new DeprecationRestHandler( + this.handler, + METHOD, + PATH, + DeprecationLogger.CRITICAL, + deprecationMessage, + deprecationLogger, + false + ); + assertEquals(DeprecationLogger.CRITICAL, handler.getDeprecationLevel()); + + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> new DeprecationRestHandler(this.handler, METHOD, PATH, null, deprecationMessage, deprecationLogger, false) + ); + assertEquals(exception.getMessage(), "unexpected deprecation logger level: null, expected either 'CRITICAL' or 'WARN'"); + + exception = expectThrows( + IllegalArgumentException.class, + () -> new DeprecationRestHandler(this.handler, METHOD, PATH, Level.OFF, deprecationMessage, deprecationLogger, false) + ); + assertEquals(exception.getMessage(), "unexpected deprecation logger level: OFF, expected either 'CRITICAL' or 'WARN'"); + } + /** * {@code ASCIIHeaderGenerator} only uses characters expected to be valid in headers (simplified US-ASCII). */ diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index 1d946681661e7..8f1904ce42438 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.rest; +import org.apache.logging.log4j.Level; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.breaker.CircuitBreaker; @@ -17,6 +18,7 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.io.stream.BytesStream; import org.elasticsearch.common.io.stream.RecyclerBytesStreamOutput; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; @@ -85,6 +87,7 @@ import static org.mockito.ArgumentMatchers.anyMap; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.clearInvocations; import static org.mockito.Mockito.doCallRealMethod; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -349,18 +352,22 @@ public void testRegisterAsDeprecatedHandler() { String path = "/_" + randomAlphaOfLengthBetween(1, 6); RestHandler handler = (request, channel, client) -> {}; String deprecationMessage = randomAlphaOfLengthBetween(1, 10); - RestApiVersion deprecatedInVersion = RestApiVersion.current(); - Route route = Route.builder(method, path).deprecated(deprecationMessage, deprecatedInVersion).build(); + List replacedInVersions = List.of(RestApiVersion.current(), RestApiVersion.minimumSupported()); + for (RestApiVersion replacedInVersion : replacedInVersions) { + Level level = replacedInVersion == RestApiVersion.current() ? Level.WARN : DeprecationLogger.CRITICAL; + clearInvocations(controller); + Route route = Route.builder(method, path).deprecatedForRemoval(deprecationMessage, replacedInVersion).build(); - // don't want to test everything -- just that it actually wraps the handler - doCallRealMethod().when(controller).registerHandler(route, handler); - doCallRealMethod().when(controller) - .registerAsDeprecatedHandler(method, path, deprecatedInVersion, handler, deprecationMessage, null); + // don't want to test everything -- just that it actually wraps the handler + doCallRealMethod().when(controller).registerHandler(route, handler); + doCallRealMethod().when(controller) + .registerAsDeprecatedHandler(method, path, replacedInVersion, handler, deprecationMessage, level); - controller.registerHandler(route, handler); + controller.registerHandler(route, handler); - verify(controller).registerHandler(eq(method), eq(path), eq(deprecatedInVersion), any(DeprecationRestHandler.class)); + verify(controller).registerHandler(eq(method), eq(path), eq(replacedInVersion), any(DeprecationRestHandler.class)); + } } public void testRegisterAsReplacedHandler() { @@ -383,17 +390,40 @@ public void testRegisterAsReplacedHandler() { + path + "] instead."; - final Route route = Route.builder(method, path).replaces(replacedMethod, replacedPath, previous).build(); - - // don't want to test everything -- just that it actually wraps the handlers - doCallRealMethod().when(controller).registerHandler(route, handler); - doCallRealMethod().when(controller) - .registerAsReplacedHandler(method, path, current, handler, replacedMethod, replacedPath, previous); - - controller.registerHandler(route, handler); + List replacedInVersions = List.of(current, previous); + for (RestApiVersion replacedInVersion : replacedInVersions) { + clearInvocations(controller); + Route route = Route.builder(method, path).replaces(replacedMethod, replacedPath, replacedInVersion).build(); + // don't want to test everything -- just that it actually wraps the handler + doCallRealMethod().when(controller).registerHandler(route, handler); + Level level = replacedInVersion == current ? Level.WARN : DeprecationLogger.CRITICAL; + doCallRealMethod().when(controller) + .registerAsReplacedHandler( + method, + path, + current, + handler, + replacedMethod, + replacedPath, + replacedInVersion, + deprecationMessage, + level + ); - verify(controller).registerHandler(method, path, current, handler); - verify(controller).registerAsDeprecatedHandler(replacedMethod, replacedPath, previous, handler, deprecationMessage); + controller.registerHandler(route, handler); + + // verify we registered the primary handler + verify(controller).registerHandler(method, path, current, handler); + // verify we register the replaced handler with the correct deprecation message and level + verify(controller).registerAsDeprecatedHandler( + replacedMethod, + replacedPath, + replacedInVersion, + handler, + deprecationMessage, + level + ); + } } public void testRegisterSecondMethodWithDifferentNamedWildcard() { diff --git a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java index fff5dcb4bb80b..5e1296c354015 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java @@ -22,7 +22,7 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.TotalHitCountCollector; +import org.apache.lucene.search.TotalHitCountCollectorManager; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.util.TestUtil; @@ -96,7 +96,6 @@ public void testAddingCancellationActions() throws IOException { } public void testCancellableCollector() throws IOException { - TotalHitCountCollector collector1 = new TotalHitCountCollector(); Runnable cancellation = () -> { throw new TaskCancelledException("cancelled"); }; ContextIndexSearcher searcher = new ContextIndexSearcher( reader, @@ -106,16 +105,15 @@ public void testCancellableCollector() throws IOException { true ); - searcher.search(new MatchAllDocsQuery(), collector1); - assertThat(collector1.getTotalHits(), equalTo(reader.numDocs())); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + assertThat(totalHits, equalTo(reader.numDocs())); searcher.addQueryCancellation(cancellation); - expectThrows(TaskCancelledException.class, () -> searcher.search(new MatchAllDocsQuery(), collector1)); + expectThrows(TaskCancelledException.class, () -> searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager())); searcher.removeQueryCancellation(cancellation); - TotalHitCountCollector collector2 = new TotalHitCountCollector(); - searcher.search(new MatchAllDocsQuery(), collector2); - assertThat(collector2.getTotalHits(), equalTo(reader.numDocs())); + Integer totalHits2 = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + assertThat(totalHits2, equalTo(reader.numDocs())); } public void testExitableDirectoryReader() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java index 8b8a4f97d540e..ae4ed3568683a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java @@ -14,16 +14,20 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import java.io.IOException; +import java.util.Map; import java.util.function.Consumer; import static java.util.Collections.singleton; +import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; public class ExtendedStatsAggregatorTests extends AggregatorTestCase { private static final double TOLERANCE = 1e-5; @@ -49,6 +53,37 @@ public void testEmpty() throws IOException { }); } + public void testEmptyDate() throws IOException { + DateFormatter.forPattern("epoch_millis"); + final MappedFieldType ft = new DateFieldMapper.DateFieldType( + "field", + true, + true, + false, + true, + DateFormatter.forPattern("epoch_millis"), + DateFieldMapper.Resolution.MILLISECONDS, + null, + null, + Map.of() + ); + testCase(ft, iw -> {}, stats -> { + assertEquals(0d, stats.getCount(), 0); + assertEquals(0d, stats.getSum(), 0); + assertEquals(Float.NaN, stats.getAvg(), 0); + assertEquals(Double.POSITIVE_INFINITY, stats.getMin(), 0); + assertEquals(Double.NEGATIVE_INFINITY, stats.getMax(), 0); + assertEquals(Double.NaN, stats.getVariance(), 0); + assertEquals(Double.NaN, stats.getVariancePopulation(), 0); + assertEquals(Double.NaN, stats.getVarianceSampling(), 0); + assertEquals(Double.NaN, stats.getStdDeviation(), 0); + assertEquals(Double.NaN, stats.getStdDeviationPopulation(), 0); + assertEquals(Double.NaN, stats.getStdDeviationSampling(), 0); + assertEquals(0d, stats.getSumOfSquares(), 0); + assertFalse(AggregationInspectionHelper.hasValue(stats)); + }); + } + public void testRandomDoubles() throws IOException { MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); final ExtendedSimpleStatsAggregator expected = new ExtendedSimpleStatsAggregator(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java index 9f48cb2279320..ddd1fa987ad2b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java @@ -18,7 +18,9 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; @@ -73,6 +75,30 @@ public void testEmpty() throws IOException { }, ft); } + public void testEmptyDate() throws IOException { + DateFormatter.forPattern("epoch_millis"); + final MappedFieldType ft = new DateFieldMapper.DateFieldType( + "field", + true, + true, + false, + true, + DateFormatter.forPattern("epoch_millis"), + DateFieldMapper.Resolution.MILLISECONDS, + null, + null, + Map.of() + ); + testCase(stats("_name").field(ft.name()), iw -> {}, stats -> { + assertEquals(0d, stats.getCount(), 0); + assertEquals(0d, stats.getSum(), 0); + assertEquals(Float.NaN, stats.getAvg(), 0); + assertEquals(Double.POSITIVE_INFINITY, stats.getMin(), 0); + assertEquals(Double.NEGATIVE_INFINITY, stats.getMax(), 0); + assertFalse(AggregationInspectionHelper.hasValue(stats)); + }, ft); + } + public void testRandomDoubles() throws IOException { final MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.DOUBLE); final SimpleStatsAggregator expected = new SimpleStatsAggregator(); diff --git a/settings.gradle b/settings.gradle index 2926a9a303375..6767ce4a3e3c8 100644 --- a/settings.gradle +++ b/settings.gradle @@ -75,6 +75,8 @@ List projects = [ 'distribution:docker:ubi-docker-export', 'distribution:docker:wolfi-docker-aarch64-export', 'distribution:docker:wolfi-docker-export', + 'distribution:docker:wolfi-ess-docker-aarch64-export', + 'distribution:docker:wolfi-ess-docker-export', 'distribution:packages:aarch64-deb', 'distribution:packages:deb', 'distribution:packages:aarch64-rpm', diff --git a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java index 1cc6043e0be17..de87772d5ae82 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java +++ b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java @@ -695,8 +695,13 @@ public T get(long index) { } @Override - public T set(long index, T value) { - return in.set(index, value); + public void set(long index, T value) { + in.set(index, value); + } + + @Override + public T getAndSet(long index, T value) { + return in.getAndSet(index, value); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 3e4925bb97efd..0b5803e9887d6 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -42,7 +42,7 @@ import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Sort; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.TotalHitCountCollector; +import org.apache.lucene.search.TotalHitCountCollectorManager; import org.apache.lucene.search.Weight; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; @@ -178,9 +178,8 @@ protected static void assertVisibleCount(Engine engine, int numDocs, boolean ref engine.refresh("test"); } try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - final TotalHitCountCollector collector = new TotalHitCountCollector(); - searcher.search(new MatchAllDocsQuery(), collector); - assertThat(collector.getTotalHits(), equalTo(numDocs)); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + assertThat(totalHits, equalTo(numDocs)); } } @@ -971,9 +970,8 @@ protected static void assertVisibleCount(InternalEngine engine, int numDocs, boo engine.refresh("test"); } try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - final TotalHitCountCollector collector = new TotalHitCountCollector(); - searcher.search(new MatchAllDocsQuery(), collector); - assertThat(collector.getTotalHits(), equalTo(numDocs)); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + assertThat(totalHits, equalTo(numDocs)); } } @@ -1170,9 +1168,8 @@ public static void assertOpsOnReplica( assertVisibleCount(replicaEngine, lastFieldValue == null ? 0 : 1); if (lastFieldValue != null) { try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { - final TotalHitCountCollector collector = new TotalHitCountCollector(); - searcher.search(new TermQuery(new Term("value", lastFieldValue)), collector); - assertThat(collector.getTotalHits(), equalTo(1)); + Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + assertThat(totalHits, equalTo(1)); } } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index a7d18ff782400..ca26779f3376d 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -1537,7 +1537,7 @@ public void testSyntheticSourceKeepNone() throws IOException { SyntheticSourceExample example = syntheticSourceSupportForKeepTests(shouldUseIgnoreMalformed()).example(1); DocumentMapper mapper = createDocumentMapper(syntheticSourceMapping(b -> { b.startObject("field"); - b.field(Mapper.SYNTHETIC_SOURCE_KEEP_PARAM, "none"); + b.field("synthetic_source_keep", "none"); example.mapping().accept(b); b.endObject(); })); @@ -1548,7 +1548,7 @@ public void testSyntheticSourceKeepAll() throws IOException { SyntheticSourceExample example = syntheticSourceSupportForKeepTests(shouldUseIgnoreMalformed()).example(1); DocumentMapper mapperAll = createDocumentMapper(syntheticSourceMapping(b -> { b.startObject("field"); - b.field(Mapper.SYNTHETIC_SOURCE_KEEP_PARAM, "all"); + b.field("synthetic_source_keep", "all"); example.mapping().accept(b); b.endObject(); })); @@ -1565,7 +1565,7 @@ public void testSyntheticSourceKeepArrays() throws IOException { SyntheticSourceExample example = syntheticSourceSupportForKeepTests(shouldUseIgnoreMalformed()).example(1); DocumentMapper mapperAll = createDocumentMapper(syntheticSourceMapping(b -> { b.startObject("field"); - b.field(Mapper.SYNTHETIC_SOURCE_KEEP_PARAM, randomFrom("arrays", "all")); // Both options keep array source. + b.field("synthetic_source_keep", randomFrom("arrays", "all")); // Both options keep array source. example.mapping().accept(b); b.endObject(); })); diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceRequest.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceRequest.java index 81e120511a40f..8dee5876aa207 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceRequest.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceRequest.java @@ -9,6 +9,7 @@ package org.elasticsearch.logsdb.datageneration.datasource; +import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.logsdb.datageneration.DataGeneratorSpecification; import org.elasticsearch.logsdb.datageneration.FieldType; import org.elasticsearch.logsdb.datageneration.fields.DynamicMapping; @@ -115,7 +116,7 @@ public DataSourceResponse.LeafMappingParametersGenerator accept(DataSourceHandle } } - record ObjectMappingParametersGenerator(boolean isNested) + record ObjectMappingParametersGenerator(boolean isRoot, boolean isNested, ObjectMapper.Subobjects parentSubobjects) implements DataSourceRequest { public DataSourceResponse.ObjectMappingParametersGenerator accept(DataSourceHandler handler) { diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java index 89850cd56bbd0..81bd80f464525 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java @@ -10,6 +10,7 @@ package org.elasticsearch.logsdb.datageneration.datasource; import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.logsdb.datageneration.fields.DynamicMapping; import org.elasticsearch.test.ESTestCase; @@ -78,11 +79,17 @@ private Supplier> scaledFloatMapping(Map inj @Override public DataSourceResponse.ObjectMappingParametersGenerator handle(DataSourceRequest.ObjectMappingParametersGenerator request) { if (request.isNested()) { + assert request.parentSubobjects() != ObjectMapper.Subobjects.DISABLED; + return new DataSourceResponse.ObjectMappingParametersGenerator(() -> { var parameters = new HashMap(); + if (ESTestCase.randomBoolean()) { parameters.put("dynamic", ESTestCase.randomFrom("true", "false", "strict")); } + if (ESTestCase.randomBoolean()) { + parameters.put(Mapper.SYNTHETIC_SOURCE_KEEP_PARAM, "all"); // [arrays] doesn't apply to nested objects + } return parameters; }); @@ -90,6 +97,33 @@ public DataSourceResponse.ObjectMappingParametersGenerator handle(DataSourceRequ return new DataSourceResponse.ObjectMappingParametersGenerator(() -> { var parameters = new HashMap(); + + // Changing subobjects from subobjects: false is not supported, but we can f.e. go from "true" to "false". + // TODO enable subobjects: auto + // It is disabled because it currently does not have auto flattening and that results in asserts being triggered when using + // copy_to. + if (ESTestCase.randomBoolean()) { + parameters.put( + "subobjects", + ESTestCase.randomValueOtherThan( + ObjectMapper.Subobjects.AUTO, + () -> ESTestCase.randomFrom(ObjectMapper.Subobjects.values()) + ).toString() + ); + } + + if (request.parentSubobjects() == ObjectMapper.Subobjects.DISABLED + || parameters.getOrDefault("subobjects", "true").equals("false")) { + // "enabled: false" is not compatible with subobjects: false + // changing "dynamic" from parent context is not compatible with subobjects: false + // changing subobjects value is not compatible with subobjects: false + if (ESTestCase.randomBoolean()) { + parameters.put("enabled", "true"); + } + + return parameters; + } + if (ESTestCase.randomBoolean()) { parameters.put("dynamic", ESTestCase.randomFrom("true", "false", "strict", "runtime")); } @@ -97,6 +131,11 @@ public DataSourceResponse.ObjectMappingParametersGenerator handle(DataSourceRequ parameters.put("enabled", ESTestCase.randomFrom("true", "false")); } + if (ESTestCase.randomBoolean()) { + var value = request.isRoot() ? ESTestCase.randomFrom("none", "arrays") : ESTestCase.randomFrom("none", "arrays", "all"); + parameters.put(Mapper.SYNTHETIC_SOURCE_KEEP_PARAM, value); + } + return parameters; }); } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/Context.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/Context.java index ebf13eb93ff4b..c1ec15a3479b3 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/Context.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/Context.java @@ -9,6 +9,7 @@ package org.elasticsearch.logsdb.datageneration.fields; +import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.logsdb.datageneration.DataGeneratorSpecification; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse; @@ -31,9 +32,14 @@ class Context { private final AtomicInteger nestedFieldsCount; private final Set eligibleCopyToDestinations; private final DynamicMapping parentDynamicMapping; + private final ObjectMapper.Subobjects currentSubobjectsConfig; - Context(DataGeneratorSpecification specification, DynamicMapping parentDynamicMapping) { - this(specification, "", 0, new AtomicInteger(0), new HashSet<>(), parentDynamicMapping); + Context( + DataGeneratorSpecification specification, + DynamicMapping parentDynamicMapping, + ObjectMapper.Subobjects currentSubobjectsConfig + ) { + this(specification, "", 0, new AtomicInteger(0), new HashSet<>(), parentDynamicMapping, currentSubobjectsConfig); } private Context( @@ -42,7 +48,8 @@ private Context( int objectDepth, AtomicInteger nestedFieldsCount, Set eligibleCopyToDestinations, - DynamicMapping parentDynamicMapping + DynamicMapping parentDynamicMapping, + ObjectMapper.Subobjects currentSubobjectsConfig ) { this.specification = specification; this.childFieldGenerator = specification.dataSource().get(new DataSourceRequest.ChildFieldGenerator(specification)); @@ -52,6 +59,7 @@ private Context( this.nestedFieldsCount = nestedFieldsCount; this.eligibleCopyToDestinations = eligibleCopyToDestinations; this.parentDynamicMapping = parentDynamicMapping; + this.currentSubobjectsConfig = currentSubobjectsConfig; } public DataGeneratorSpecification specification() { @@ -66,21 +74,30 @@ public DataSourceResponse.FieldTypeGenerator fieldTypeGenerator(DynamicMapping d return specification.dataSource().get(new DataSourceRequest.FieldTypeGenerator(dynamicMapping)); } - public Context subObject(String name, DynamicMapping dynamicMapping) { + public Context subObject(String name, DynamicMapping dynamicMapping, ObjectMapper.Subobjects subobjects) { return new Context( specification, pathToField(name), objectDepth + 1, nestedFieldsCount, eligibleCopyToDestinations, - dynamicMapping + dynamicMapping, + subobjects ); } - public Context nestedObject(String name, DynamicMapping dynamicMapping) { + public Context nestedObject(String name, DynamicMapping dynamicMapping, ObjectMapper.Subobjects subobjects) { nestedFieldsCount.incrementAndGet(); // copy_to can't be used across nested documents so all currently eligible fields are not eligible inside nested document. - return new Context(specification, pathToField(name), objectDepth + 1, nestedFieldsCount, new HashSet<>(), dynamicMapping); + return new Context( + specification, + pathToField(name), + objectDepth + 1, + nestedFieldsCount, + new HashSet<>(), + dynamicMapping, + subobjects + ); } public boolean shouldAddDynamicObjectField(DynamicMapping dynamicMapping) { @@ -99,10 +116,11 @@ public boolean shouldAddObjectField() { return childFieldGenerator.generateRegularSubObject(); } - public boolean shouldAddNestedField() { + public boolean shouldAddNestedField(ObjectMapper.Subobjects subobjects) { if (objectDepth >= specification.maxObjectDepth() || nestedFieldsCount.get() >= specification.nestedFieldsLimit() - || parentDynamicMapping == DynamicMapping.FORCED) { + || parentDynamicMapping == DynamicMapping.FORCED + || subobjects == ObjectMapper.Subobjects.DISABLED) { return false; } @@ -131,6 +149,14 @@ public DynamicMapping determineDynamicMapping(Map mappingParamet return dynamicParameter.equals("strict") ? DynamicMapping.FORBIDDEN : DynamicMapping.SUPPORTED; } + public ObjectMapper.Subobjects determineSubobjects(Map mappingParameters) { + if (currentSubobjectsConfig == ObjectMapper.Subobjects.DISABLED) { + return ObjectMapper.Subobjects.DISABLED; + } + + return ObjectMapper.Subobjects.from(mappingParameters.getOrDefault("subobjects", "true")); + } + public Set getEligibleCopyToDestinations() { return eligibleCopyToDestinations; } @@ -142,4 +168,8 @@ public void markFieldAsEligibleForCopyTo(String field) { private String pathToField(String field) { return path.isEmpty() ? field : path + "." + field; } + + public ObjectMapper.Subobjects getCurrentSubobjectsConfig() { + return currentSubobjectsConfig; + } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/GenericSubObjectFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/GenericSubObjectFieldDataGenerator.java index ba03b2f91c53c..83a68519d5de1 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/GenericSubObjectFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/GenericSubObjectFieldDataGenerator.java @@ -10,6 +10,7 @@ package org.elasticsearch.logsdb.datageneration.fields; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; import org.elasticsearch.logsdb.datageneration.FieldType; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; @@ -31,7 +32,7 @@ public class GenericSubObjectFieldDataGenerator { this.context = context; } - List generateChildFields(DynamicMapping dynamicMapping) { + List generateChildFields(DynamicMapping dynamicMapping, ObjectMapper.Subobjects subobjects) { var existingFieldNames = new HashSet(); // no child fields is legal var childFieldsCount = context.childFieldGenerator().generateChildFieldCount(); @@ -42,12 +43,24 @@ List generateChildFields(DynamicMapping dynamicMapping) { if (context.shouldAddDynamicObjectField(dynamicMapping)) { result.add( - new ChildField(fieldName, new ObjectFieldDataGenerator(context.subObject(fieldName, DynamicMapping.FORCED)), true) + new ChildField( + fieldName, + new ObjectFieldDataGenerator(context.subObject(fieldName, DynamicMapping.FORCED, subobjects)), + true + ) ); } else if (context.shouldAddObjectField()) { - result.add(new ChildField(fieldName, new ObjectFieldDataGenerator(context.subObject(fieldName, dynamicMapping)), false)); - } else if (context.shouldAddNestedField()) { - result.add(new ChildField(fieldName, new NestedFieldDataGenerator(context.nestedObject(fieldName, dynamicMapping)), false)); + result.add( + new ChildField(fieldName, new ObjectFieldDataGenerator(context.subObject(fieldName, dynamicMapping, subobjects)), false) + ); + } else if (context.shouldAddNestedField(subobjects)) { + result.add( + new ChildField( + fieldName, + new NestedFieldDataGenerator(context.nestedObject(fieldName, dynamicMapping, subobjects)), + false + ) + ); } else { var fieldTypeInfo = context.fieldTypeGenerator(dynamicMapping).generator().get(); diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/NestedFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/NestedFieldDataGenerator.java index b5cd4f78aff95..69853debf9b77 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/NestedFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/NestedFieldDataGenerator.java @@ -28,13 +28,14 @@ public class NestedFieldDataGenerator implements FieldDataGenerator { this.mappingParameters = context.specification() .dataSource() - .get(new DataSourceRequest.ObjectMappingParametersGenerator(true)) + .get(new DataSourceRequest.ObjectMappingParametersGenerator(false, true, context.getCurrentSubobjectsConfig())) .mappingGenerator() .get(); var dynamicMapping = context.determineDynamicMapping(mappingParameters); + var subobjects = context.determineSubobjects(mappingParameters); var genericGenerator = new GenericSubObjectFieldDataGenerator(context); - this.childFields = genericGenerator.generateChildFields(dynamicMapping); + this.childFields = genericGenerator.generateChildFields(dynamicMapping, subobjects); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/ObjectFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/ObjectFieldDataGenerator.java index 27c27e31702f7..701642c57619b 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/ObjectFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/ObjectFieldDataGenerator.java @@ -28,13 +28,14 @@ public class ObjectFieldDataGenerator implements FieldDataGenerator { this.mappingParameters = context.specification() .dataSource() - .get(new DataSourceRequest.ObjectMappingParametersGenerator(false)) + .get(new DataSourceRequest.ObjectMappingParametersGenerator(false, false, context.getCurrentSubobjectsConfig())) .mappingGenerator() .get(); var dynamicMapping = context.determineDynamicMapping(mappingParameters); + var subobjects = context.determineSubobjects(mappingParameters); var genericGenerator = new GenericSubObjectFieldDataGenerator(context); - this.childFields = genericGenerator.generateChildFields(dynamicMapping); + this.childFields = genericGenerator.generateChildFields(dynamicMapping, subobjects); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/TopLevelObjectFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/TopLevelObjectFieldDataGenerator.java index e85d18a1dac12..1374362df7f4a 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/TopLevelObjectFieldDataGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/TopLevelObjectFieldDataGenerator.java @@ -10,6 +10,7 @@ package org.elasticsearch.logsdb.datageneration.fields; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.logsdb.datageneration.DataGeneratorSpecification; import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; import org.elasticsearch.xcontent.XContentBuilder; @@ -37,7 +38,12 @@ public TopLevelObjectFieldDataGenerator(DataGeneratorSpecification specification this.mappingParameters = Map.of(); } else { this.mappingParameters = new HashMap<>( - specification.dataSource().get(new DataSourceRequest.ObjectMappingParametersGenerator(false)).mappingGenerator().get() + // Value of subobjects here is for a parent of this object. + // Since there is no parent we pass ENABLED to allow to set subobjects to any value at top level. + specification.dataSource() + .get(new DataSourceRequest.ObjectMappingParametersGenerator(true, false, ObjectMapper.Subobjects.ENABLED)) + .mappingGenerator() + .get() ); // Top-level object can't be disabled because @timestamp is a required field in data streams. this.mappingParameters.remove("enabled"); @@ -46,11 +52,15 @@ public TopLevelObjectFieldDataGenerator(DataGeneratorSpecification specification ? DynamicMapping.FORBIDDEN : DynamicMapping.SUPPORTED; } - this.context = new Context(specification, dynamicMapping); + var subobjects = ObjectMapper.Subobjects.from(mappingParameters.getOrDefault("subobjects", "true")); + + // Value of subobjects here is for a parent of this object. + // Since there is no parent we pass ENABLED to allow to set subobjects to any value at top level. + this.context = new Context(specification, dynamicMapping, ObjectMapper.Subobjects.ENABLED); var genericGenerator = new GenericSubObjectFieldDataGenerator(context); this.predefinedFields = genericGenerator.generateChildFields(specification.predefinedFields()); - this.generatedChildFields = genericGenerator.generateChildFields(dynamicMapping); + this.generatedChildFields = genericGenerator.generateChildFields(dynamicMapping, subobjects); } public CheckedConsumer mappingWriter(Map customMappingParameters) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 068a666d78d74..31c8e5bc3d457 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -900,10 +900,11 @@ public static long randomLongBetween(long min, long max) { * @return a random instant between a min and a max value with a random nanosecond precision */ public static Instant randomInstantBetween(Instant minInstant, Instant maxInstant) { - return Instant.ofEpochSecond( - randomLongBetween(minInstant.getEpochSecond(), maxInstant.getEpochSecond()), - randomLongBetween(0, 999999999) - ); + long epochSecond = randomLongBetween(minInstant.getEpochSecond(), maxInstant.getEpochSecond()); + long minNanos = epochSecond == minInstant.getEpochSecond() ? minInstant.getNano() : 0; + long maxNanos = epochSecond == maxInstant.getEpochSecond() ? maxInstant.getNano() : 999999999; + long nanos = randomLongBetween(minNanos, maxNanos); + return Instant.ofEpochSecond(epochSecond, nanos); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestCustomMetadata.java b/test/framework/src/main/java/org/elasticsearch/test/TestCustomMetadata.java index 9954739e4a9c3..3a24b7ae0f14f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestCustomMetadata.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestCustomMetadata.java @@ -13,9 +13,9 @@ import org.elasticsearch.cluster.AbstractNamedDiffable; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; @@ -94,8 +94,8 @@ public static T fromXContent(Function sup } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return Iterators.single((builder, params) -> builder.field("data", getData())); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).field("data", getData()); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index e8c1aecb7abee..d17016f850300 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -719,10 +719,6 @@ protected boolean preserveTemplatesUponCompletion() { * all feature states, deleting system indices, system associated indices, and system data streams. */ protected boolean resetFeatureStates() { - if (clusterHasFeature(RestTestLegacyFeatures.FEATURE_STATE_RESET_SUPPORTED) == false) { - return false; - } - // ML reset fails when ML is disabled in versions before 8.7 if (isMlEnabled() == false && clusterHasFeature(RestTestLegacyFeatures.ML_STATE_RESET_FALLBACK_ON_DISABLED) == false) { return false; @@ -917,22 +913,10 @@ private void wipeCluster() throws Exception { .filter(name -> isXPackTemplate(name) == false) .collect(Collectors.toList()); if (names.isEmpty() == false) { - // Ideally we would want to check if the elected master node supports this feature and send the delete request - // directly to that node, but node-specific feature checks is something we want to avoid if possible. - if (clusterHasFeature(RestTestLegacyFeatures.DELETE_TEMPLATE_MULTIPLE_NAMES_SUPPORTED)) { - try { - adminClient().performRequest(new Request("DELETE", "_index_template/" + String.join(",", names))); - } catch (ResponseException e) { - logger.warn(() -> format("unable to remove multiple composable index templates %s", names), e); - } - } else { - for (String name : names) { - try { - adminClient().performRequest(new Request("DELETE", "_index_template/" + name)); - } catch (ResponseException e) { - logger.warn(() -> format("unable to remove composable index template %s", name), e); - } - } + try { + adminClient().performRequest(new Request("DELETE", "_index_template/" + String.join(",", names))); + } catch (ResponseException e) { + logger.warn(() -> format("unable to remove multiple composable index templates %s", names), e); } } } catch (Exception e) { @@ -948,22 +932,10 @@ private void wipeCluster() throws Exception { .filter(name -> isXPackTemplate(name) == false) .collect(Collectors.toList()); if (names.isEmpty() == false) { - // Ideally we would want to check if the elected master node supports this feature and send the delete request - // directly to that node, but node-specific feature checks is something we want to avoid if possible. - if (clusterHasFeature(RestTestLegacyFeatures.DELETE_TEMPLATE_MULTIPLE_NAMES_SUPPORTED)) { - try { - adminClient().performRequest(new Request("DELETE", "_component_template/" + String.join(",", names))); - } catch (ResponseException e) { - logger.warn(() -> format("unable to remove multiple component templates %s", names), e); - } - } else { - for (String componentTemplate : names) { - try { - adminClient().performRequest(new Request("DELETE", "_component_template/" + componentTemplate)); - } catch (ResponseException e) { - logger.warn(() -> format("unable to remove component template %s", componentTemplate), e); - } - } + try { + adminClient().performRequest(new Request("DELETE", "_component_template/" + String.join(",", names))); + } catch (ResponseException e) { + logger.warn(() -> format("unable to remove multiple component templates %s", names), e); } } } catch (Exception e) { @@ -1141,7 +1113,6 @@ protected static void wipeAllIndices() throws IOException { } protected static void wipeAllIndices(boolean preserveSecurityIndices) throws IOException { - boolean includeHidden = clusterHasFeature(RestTestLegacyFeatures.HIDDEN_INDICES_SUPPORTED); try { // remove all indices except some history indices which can pop up after deleting all data streams but shouldn't interfere final List indexPatterns = new ArrayList<>( @@ -1151,7 +1122,7 @@ protected static void wipeAllIndices(boolean preserveSecurityIndices) throws IOE indexPatterns.add("-.security-*"); } final Request deleteRequest = new Request("DELETE", Strings.collectionToCommaDelimitedString(indexPatterns)); - deleteRequest.addParameter("expand_wildcards", "open,closed" + (includeHidden ? ",hidden" : "")); + deleteRequest.addParameter("expand_wildcards", "open,closed,hidden"); final Response response = adminClient().performRequest(deleteRequest); try (InputStream is = response.getEntity().getContent()) { assertTrue((boolean) XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true).get("acknowledged")); @@ -1320,9 +1291,8 @@ private void wipeRollupJobs() throws IOException { } protected void refreshAllIndices() throws IOException { - boolean includeHidden = clusterHasFeature(RestTestLegacyFeatures.HIDDEN_INDICES_SUPPORTED); Request refreshRequest = new Request("POST", "/_refresh"); - refreshRequest.addParameter("expand_wildcards", "open" + (includeHidden ? ",hidden" : "")); + refreshRequest.addParameter("expand_wildcards", "open,hidden"); // Allow system index deprecation warnings refreshRequest.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warnings -> { if (warnings.isEmpty()) { @@ -2394,7 +2364,7 @@ protected static void waitForActiveLicense(final RestClient restClient) throws E assertThat("Expecting non-null license status", status, notNullValue()); assertThat("Expecting active license", status, equalTo("active")); } - }); + }, 10, TimeUnit.MINUTES); } // TODO: replace usages of this with warning_regex or allowed_warnings_regex @@ -2488,18 +2458,6 @@ public static void setIgnoredErrorResponseCodes(Request request, RestStatus... r } private static XContentType randomSupportedContentType() { - if (clusterHasFeature(RestTestLegacyFeatures.SUPPORTS_TRUE_BINARY_RESPONSES) == false) { - // Very old versions encode binary stored fields using base64 in all formats, not just JSON, but we expect to see raw binary - // fields in non-JSON formats, so we stick to JSON in these cases. - return XContentType.JSON; - } - - if (clusterHasFeature(RestTestLegacyFeatures.SUPPORTS_VENDOR_XCONTENT_TYPES) == false) { - // The VND_* formats were introduced part-way through the 7.x series for compatibility with 8.x, but are not supported by older - // 7.x versions. - return randomFrom(XContentType.JSON, XContentType.CBOR, XContentType.YAML, XContentType.SMILE); - } - return randomFrom(XContentType.values()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java index 427398b9a8c0e..194dfc057b84f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java @@ -27,23 +27,8 @@ public class RestTestLegacyFeatures implements FeatureSpecification { public static final NodeFeature ML_STATE_RESET_FALLBACK_ON_DISABLED = new NodeFeature("ml.state_reset_fallback_on_disabled"); @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature FEATURE_STATE_RESET_SUPPORTED = new NodeFeature("system_indices.feature_state_reset_supported"); - public static final NodeFeature SYSTEM_INDICES_REST_ACCESS_ENFORCED = new NodeFeature("system_indices.rest_access_enforced"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature SYSTEM_INDICES_REST_ACCESS_DEPRECATED = new NodeFeature("system_indices.rest_access_deprecated"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature HIDDEN_INDICES_SUPPORTED = new NodeFeature("indices.hidden_supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) public static final NodeFeature COMPONENT_TEMPLATE_SUPPORTED = new NodeFeature("indices.component_template_supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature DELETE_TEMPLATE_MULTIPLE_NAMES_SUPPORTED = new NodeFeature( - "indices.delete_template_multiple_names_supported" - ); public static final NodeFeature ML_NEW_MEMORY_FORMAT = new NodeFeature("ml.new_memory_format"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature SUPPORTS_VENDOR_XCONTENT_TYPES = new NodeFeature("rest.supports_vendor_xcontent_types"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature SUPPORTS_TRUE_BINARY_RESPONSES = new NodeFeature("rest.supports_true_binary_responses"); /** These are "pure test" features: normally we would not need them, and test for TransportVersion/fallback to Version (see for example * {@code ESRestTestCase#minimumTransportVersion()}. However, some tests explicitly check and validate the content of a response, so @@ -61,21 +46,6 @@ public class RestTestLegacyFeatures implements FeatureSpecification { public static final NodeFeature DESIRED_NODE_API_SUPPORTED = new NodeFeature("desired_node_supported"); public static final NodeFeature SECURITY_UPDATE_API_KEY = new NodeFeature("security.api_key_update"); public static final NodeFeature SECURITY_BULK_UPDATE_API_KEY = new NodeFeature("security.api_key_bulk_update"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature WATCHES_VERSION_IN_META = new NodeFeature("watcher.version_in_meta"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature SECURITY_ROLE_DESCRIPTORS_OPTIONAL = new NodeFeature("security.role_descriptors_optional"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature SEARCH_AGGREGATIONS_FORCE_INTERVAL_SELECTION_DATE_HISTOGRAM = new NodeFeature( - "search.aggregations.force_interval_selection_on_date_histogram" - ); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature TRANSFORM_NEW_API_ENDPOINT = new NodeFeature("transform.new_api_endpoint"); - // Ref: https://github.com/elastic/elasticsearch/pull/65205 - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature ML_INDICES_HIDDEN = new NodeFeature("ml.indices_hidden"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature ML_ANALYTICS_MAPPINGS = new NodeFeature("ml.analytics_mappings"); public static final NodeFeature TSDB_NEW_INDEX_FORMAT = new NodeFeature("indices.tsdb_new_format"); public static final NodeFeature TSDB_GENERALLY_AVAILABLE = new NodeFeature("indices.tsdb_supported"); @@ -104,14 +74,10 @@ public class RestTestLegacyFeatures implements FeatureSpecification { @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) public static final NodeFeature REPLICATION_OF_CLOSED_INDICES = new NodeFeature("indices.closed_replication_supported"); @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature TASK_INDEX_SYSTEM_INDEX = new NodeFeature("tasks.moved_to_system_index"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) public static final NodeFeature SOFT_DELETES_ENFORCED = new NodeFeature("indices.soft_deletes_enforced"); @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) public static final NodeFeature NEW_TRANSPORT_COMPRESSED_SETTING = new NodeFeature("transport.new_compressed_setting"); @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature SHUTDOWN_SUPPORTED = new NodeFeature("shutdown.supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) public static final NodeFeature SERVICE_ACCOUNTS_SUPPORTED = new NodeFeature("auth.service_accounts_supported"); @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) public static final NodeFeature TRANSFORM_SUPPORTED = new NodeFeature("transform.supported"); @@ -140,27 +106,14 @@ public class RestTestLegacyFeatures implements FeatureSpecification { @Override public Map getHistoricalFeatures() { return Map.ofEntries( - entry(FEATURE_STATE_RESET_SUPPORTED, Version.V_7_13_0), - entry(SYSTEM_INDICES_REST_ACCESS_ENFORCED, Version.V_8_0_0), - entry(SYSTEM_INDICES_REST_ACCESS_DEPRECATED, Version.V_7_10_0), - entry(HIDDEN_INDICES_SUPPORTED, Version.V_7_7_0), entry(COMPONENT_TEMPLATE_SUPPORTED, Version.V_7_8_0), - entry(DELETE_TEMPLATE_MULTIPLE_NAMES_SUPPORTED, Version.V_7_13_0), entry(ML_STATE_RESET_FALLBACK_ON_DISABLED, Version.V_8_7_0), entry(SECURITY_UPDATE_API_KEY, Version.V_8_4_0), entry(SECURITY_BULK_UPDATE_API_KEY, Version.V_8_5_0), entry(ML_NEW_MEMORY_FORMAT, Version.V_8_11_0), - entry(SUPPORTS_VENDOR_XCONTENT_TYPES, Version.V_7_11_0), - entry(SUPPORTS_TRUE_BINARY_RESPONSES, Version.V_7_7_0), entry(TRANSPORT_VERSION_SUPPORTED, VERSION_INTRODUCING_TRANSPORT_VERSIONS), entry(STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION, Version.V_8_11_0), entry(ML_MEMORY_OVERHEAD_FIXED, Version.V_8_2_1), - entry(WATCHES_VERSION_IN_META, Version.V_7_13_0), - entry(SECURITY_ROLE_DESCRIPTORS_OPTIONAL, Version.V_7_3_0), - entry(SEARCH_AGGREGATIONS_FORCE_INTERVAL_SELECTION_DATE_HISTOGRAM, Version.V_7_2_0), - entry(TRANSFORM_NEW_API_ENDPOINT, Version.V_7_5_0), - entry(ML_INDICES_HIDDEN, Version.V_7_7_0), - entry(ML_ANALYTICS_MAPPINGS, Version.V_7_3_0), entry(REST_ELASTIC_PRODUCT_HEADER_PRESENT, Version.V_8_0_1), entry(DESIRED_NODE_API_SUPPORTED, Version.V_8_1_0), entry(TSDB_NEW_INDEX_FORMAT, Version.V_8_2_0), @@ -173,10 +126,8 @@ public Map getHistoricalFeatures() { entry(INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED, Version.V_8_0_0), entry(DEPRECATION_WARNINGS_LEAK_FIXED, Version.V_7_17_9), entry(REPLICATION_OF_CLOSED_INDICES, Version.V_7_2_0), - entry(TASK_INDEX_SYSTEM_INDEX, Version.V_7_10_0), entry(SOFT_DELETES_ENFORCED, Version.V_8_0_0), entry(NEW_TRANSPORT_COMPRESSED_SETTING, Version.V_7_14_0), - entry(SHUTDOWN_SUPPORTED, Version.V_7_15_0), entry(SERVICE_ACCOUNTS_SUPPORTED, Version.V_7_13_0), entry(TRANSFORM_SUPPORTED, Version.V_7_2_0), entry(SLM_SUPPORTED, Version.V_7_4_0), diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index 7df791bf11559..5adf01a2a0e7d 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -19,7 +19,8 @@ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), CHUNKING_SETTINGS_ENABLED("es.inference_chunking_settings_feature_flag_enabled=true", Version.fromString("8.16.0"), null), - INFERENCE_SCALE_TO_ZERO("es.inference_scale_to_zero_feature_flag_enabled=true", Version.fromString("8.16.0"), null); + INFERENCE_SCALE_TO_ZERO("es.inference_scale_to_zero_feature_flag_enabled=true", Version.fromString("8.16.0"), null), + INFERENCE_DEFAULT_ELSER("es.inference_default_elser_feature_flag_enabled=true", Version.fromString("8.16.0"), null); public final String systemProperty; public final Version from; diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 7cfbd2b3a57f1..86c3f42a6a8ec 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -371,14 +371,10 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx ? executionContext.getClientYamlTestCandidate().getTestPath() : null; - // #84038 and #84089 mean that this assertion fails when running against < 7.17.2 and 8.0.0 released versions - // This is really difficult to express just with features, so I will break it down into 2 parts: version check for v7, - // and feature check for v8. This way the version check can be removed once we move to v9 - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - var fixedInV7 = executionContext.clusterHasFeature("gte_v7.17.2", false) - && executionContext.clusterHasFeature("gte_v8.0.0", false) == false; - var fixedProductionHeader = fixedInV7 - || executionContext.clusterHasFeature(RestTestLegacyFeatures.REST_ELASTIC_PRODUCT_HEADER_PRESENT.id(), false); + var fixedProductionHeader = executionContext.clusterHasFeature( + RestTestLegacyFeatures.REST_ELASTIC_PRODUCT_HEADER_PRESENT.id(), + false + ); if (fixedProductionHeader) { checkElasticProductHeader(response.getHeaders("X-elastic-product")); } diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java index 5c885ad718d8c..38c654f94fff3 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java @@ -16,7 +16,7 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -118,8 +118,8 @@ public TransportVersion getMinimalSupportedVersion() { } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return ChunkedToXContentHelper.xContentValuesMap(POLICIES_FIELD.getPreferredName(), policies); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).xContentObjectFields(POLICIES_FIELD.getPreferredName(), policies); } @Override diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index eb0796672a174..158cccb1b6ea2 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -83,5 +83,6 @@ tasks.named("precommit").configure { tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("security/10_forbidden/Test bulk response with invalid credentials", "warning does not exist for compatibility") task.skipTest("wildcard/30_ignore_above_synthetic_source/wildcard field type ignore_above", "Temporary until backported") + task.skipTest("inference/inference_crud/Test get all", "Assertions on number of inference models break due to default configs") }) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java index b7795fd3b579e..6b54f7a7dddce 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java @@ -150,9 +150,9 @@ public void writeTo(StreamOutput out) throws IOException { @Override public Iterator toXContentChunked(ToXContent.Params params) { return ChunkedToXContent.builder(params) - .object(PATTERNS_FIELD.getPreferredName(), b -> b.appendXContentObjects(patterns)) - .object(FOLLOWED_LEADER_INDICES_FIELD.getPreferredName(), b -> b.appendEntries(followedLeaderIndexUUIDs)) - .object(HEADERS.getPreferredName(), b -> b.appendEntries(headers)); + .xContentObjectFieldObjects(PATTERNS_FIELD.getPreferredName(), patterns) + .object(FOLLOWED_LEADER_INDICES_FIELD.getPreferredName(), followedLeaderIndexUUIDs) + .object(HEADERS.getPreferredName(), headers); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java index 0cd5d617752f4..b949e44ef036a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java @@ -13,7 +13,7 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -98,8 +98,8 @@ public void writeTo(StreamOutput out) throws IOException { } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return ChunkedToXContentHelper.xContentFragmentValuesMap(POLICIES.getPreferredName(), policies); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).xContentObjectFieldObjects(POLICIES.getPreferredName(), policies); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java index d4f2ecb36e95d..3674103eda215 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java @@ -15,10 +15,9 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.Metadata.Custom; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -105,11 +104,10 @@ public Diff diff(Custom previousState) { } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return Iterators.concat( - ChunkedToXContentHelper.xContentValuesMap(POLICIES_FIELD.getPreferredName(), policyMetadatas), - Iterators.single((builder, params) -> builder.field(OPERATION_MODE_FIELD.getPreferredName(), operationMode)) - ); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params) + .xContentObjectFields(POLICIES_FIELD.getPreferredName(), policyMetadatas) + .field(OPERATION_MODE_FIELD.getPreferredName(), operationMode); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java index 4a570bfde99a4..34ebdcb7f9f9f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java @@ -237,7 +237,9 @@ public int computeNumberOfAllocations() { if (numberOfAllocations != null) { return numberOfAllocations; } else { - if (adaptiveAllocationsSettings == null || adaptiveAllocationsSettings.getMinNumberOfAllocations() == null) { + if (adaptiveAllocationsSettings == null + || adaptiveAllocationsSettings.getMinNumberOfAllocations() == null + || adaptiveAllocationsSettings.getMinNumberOfAllocations() == 0) { return DEFAULT_NUM_ALLOCATIONS; } else { return adaptiveAllocationsSettings.getMinNumberOfAllocations(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java index 1d6c5e564a442..fd07688b1578d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java @@ -17,7 +17,7 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -92,8 +92,8 @@ public Map modelAliases() { } @Override - public Iterator toXContentChunked(ToXContent.Params ignored) { - return ChunkedToXContentHelper.xContentValuesMap(MODEL_ALIASES.getPreferredName(), modelAliases); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params).xContentObjectFields(MODEL_ALIASES.getPreferredName(), modelAliases); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AdaptiveAllocationsSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AdaptiveAllocationsSettings.java index 19af6a3a4ef4c..d4eace8e96621 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AdaptiveAllocationsSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AdaptiveAllocationsSettings.java @@ -147,8 +147,8 @@ public AdaptiveAllocationsSettings merge(AdaptiveAllocationsSettings updates) { public ActionRequestValidationException validate() { ActionRequestValidationException validationException = new ActionRequestValidationException(); boolean hasMinNumberOfAllocations = (minNumberOfAllocations != null && minNumberOfAllocations != -1); - if (hasMinNumberOfAllocations && minNumberOfAllocations < 1) { - validationException.addValidationError("[" + MIN_NUMBER_OF_ALLOCATIONS + "] must be a positive integer or null"); + if (hasMinNumberOfAllocations && minNumberOfAllocations < 0) { + validationException.addValidationError("[" + MIN_NUMBER_OF_ALLOCATIONS + "] must be a non-negative integer or null"); } boolean hasMaxNumberOfAllocations = (maxNumberOfAllocations != null && maxNumberOfAllocations != -1); if (hasMaxNumberOfAllocations && maxNumberOfAllocations < 1) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java index 13a751829797f..4f18411ac3af6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import java.io.IOException; +import java.util.Collection; /** * Response to {@link GetRoleMappingsAction get role-mappings API}. @@ -21,6 +22,10 @@ public class GetRoleMappingsResponse extends ActionResponse { private final ExpressionRoleMapping[] mappings; + public GetRoleMappingsResponse(Collection mappings) { + this(mappings.toArray(new ExpressionRoleMapping[0])); + } + public GetRoleMappingsResponse(ExpressionRoleMapping... mappings) { this.mappings = mappings; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java index da6ff6ad24c34..8f78fdbccd923 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java @@ -12,6 +12,7 @@ import org.elasticsearch.cluster.AbstractNamedDiffable; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NamedDiff; +import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; @@ -57,6 +58,7 @@ public final class RoleMappingMetadata extends AbstractNamedDiffable toXContentChunked(ToXContent.Params ignored) { - return Iterators.concat( - ChunkedToXContentHelper.xContentValuesMap(POLICIES_FIELD.getPreferredName(), this.snapshotConfigurations), - Iterators.single((builder, params) -> { - builder.field(OPERATION_MODE_FIELD.getPreferredName(), operationMode); - builder.field(STATS_FIELD.getPreferredName(), this.slmStats); - return builder; - }) - ); + public Iterator toXContentChunked(ToXContent.Params params) { + return ChunkedToXContent.builder(params) + .xContentObjectFields(POLICIES_FIELD.getPreferredName(), snapshotConfigurations) + .field(OPERATION_MODE_FIELD.getPreferredName(), operationMode) + .field(STATS_FIELD.getPreferredName(), slmStats); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/AdaptiveAllocationSettingsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/AdaptiveAllocationSettingsTests.java index c86648f10f08b..d59fbb2a24ee0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/AdaptiveAllocationSettingsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/AdaptiveAllocationSettingsTests.java @@ -17,7 +17,7 @@ public class AdaptiveAllocationSettingsTests extends AbstractWireSerializingTest public static AdaptiveAllocationsSettings testInstance() { return new AdaptiveAllocationsSettings( randomBoolean() ? null : randomBoolean(), - randomBoolean() ? null : randomIntBetween(1, 2), + randomBoolean() ? null : randomIntBetween(0, 2), randomBoolean() ? null : randomIntBetween(2, 4) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java index ccf5d4e6de9f8..df64c4f87410a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java @@ -20,7 +20,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.TotalHitCountCollector; +import org.apache.lucene.search.TotalHitCountCollectorManager; import org.apache.lucene.store.Directory; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.bytes.BytesArray; @@ -193,10 +193,8 @@ protected IndicesAccessControl getIndicesAccessControl() { int expectedHitCount = valuesHitCount[i]; logger.info("Going to verify hit count with query [{}] with expected total hits [{}]", parsedQuery.query(), expectedHitCount); - TotalHitCountCollector countCollector = new TotalHitCountCollector(); - indexSearcher.search(new MatchAllDocsQuery(), countCollector); - - assertThat(countCollector.getTotalHits(), equalTo(expectedHitCount)); + Integer totalHits = indexSearcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + assertThat(totalHits, equalTo(expectedHitCount)); assertThat(wrappedDirectoryReader.numDocs(), equalTo(expectedHitCount)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index acf530fb7c5cc..26b306d6f1334 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -436,6 +436,11 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(kibanaRole.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(true)); + // Inference + assertTrue(kibanaRole.cluster().check("cluster:admin/xpack/inference/get", request, authentication)); + assertTrue(kibanaRole.cluster().check("cluster:admin/xpack/inference/put", request, authentication)); + assertTrue(kibanaRole.cluster().check("cluster:admin/xpack/inference/delete", request, authentication)); + // Enrich assertThat(kibanaRole.cluster().check("cluster:admin/xpack/enrich/put", request, authentication), is(true)); assertThat(kibanaRole.cluster().check("cluster:admin/xpack/enrich/execute", request, authentication), is(true)); diff --git a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java index f6dd43164e387..3fb9573dd7b62 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java +++ b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java @@ -477,6 +477,111 @@ public void testDeprecationWarnMessagesCanBeIndexed() throws Exception { } + public void testDeprecateAndKeep() throws Exception { + final Request request = new Request("GET", "/_test_cluster/deprecated_but_dont_remove"); + request.setEntity(buildSettingsRequest(Collections.singletonList(TEST_NOT_DEPRECATED_SETTING), "settings")); + Response response = performScopedRequest(request); + + final List deprecatedWarnings = getWarningHeaders(response.getHeaders()); + assertThat( + extractWarningValuesFromWarningHeaders(deprecatedWarnings), + containsInAnyOrder("[/_test_cluster/deprecated_but_dont_remove] is deprecated, but no plans to remove quite yet") + ); + + assertBusy(() -> { + List> documents = DeprecationTestUtils.getIndexedDeprecations(client(), xOpaqueId()); + + logger.warn(documents); + + // only assert the relevant fields: level, message, and category + assertThat( + documents, + containsInAnyOrder( + allOf( + hasEntry("elasticsearch.event.category", "api"), + hasEntry("log.level", "WARN"), + hasEntry("message", "[/_test_cluster/deprecated_but_dont_remove] is deprecated, but no plans to remove quite yet") + ) + ) + ); + }, 30, TimeUnit.SECONDS); + } + + public void testReplacesInCurrentVersion() throws Exception { + final Request request = new Request("GET", "/_test_cluster/old_name1"); // deprecated in current version + request.setEntity(buildSettingsRequest(Collections.singletonList(TEST_NOT_DEPRECATED_SETTING), "settings")); + Response response = performScopedRequest(request); + + final List deprecatedWarnings = getWarningHeaders(response.getHeaders()); + assertThat( + extractWarningValuesFromWarningHeaders(deprecatedWarnings), + containsInAnyOrder("[GET /_test_cluster/old_name1] is deprecated! Use [GET /_test_cluster/new_name1] instead.") + ); + + assertBusy(() -> { + List> documents = DeprecationTestUtils.getIndexedDeprecations(client(), xOpaqueId()); + + logger.warn(documents); + + // only assert the relevant fields: level, message, and category + assertThat( + documents, + containsInAnyOrder( + allOf( + hasEntry("elasticsearch.event.category", "api"), + hasEntry("log.level", "WARN"), + hasEntry("message", "[GET /_test_cluster/old_name1] is deprecated! Use [GET /_test_cluster/new_name1] instead.") + ) + ) + ); + }, 30, TimeUnit.SECONDS); + } + + public void testReplacesInCompatibleVersion() throws Exception { + final Request request = new Request("GET", "/_test_cluster/old_name2"); // deprecated in minimum supported version + request.setEntity(buildSettingsRequest(Collections.singletonList(TEST_DEPRECATED_SETTING_TRUE1), "deprecated_settings")); + final RequestOptions compatibleOptions = request.getOptions() + .toBuilder() + .addHeader("Accept", "application/vnd.elasticsearch+json;compatible-with=" + RestApiVersion.minimumSupported().major) + .addHeader("Content-Type", "application/vnd.elasticsearch+json;compatible-with=" + RestApiVersion.minimumSupported().major) + .build(); + request.setOptions(compatibleOptions); + Response response = performScopedRequest(request); + + final List deprecatedWarnings = getWarningHeaders(response.getHeaders()); + assertThat( + extractWarningValuesFromWarningHeaders(deprecatedWarnings), + containsInAnyOrder( + "[GET /_test_cluster/old_name2] is deprecated! Use [GET /_test_cluster/new_name2] instead.", + "You are using a compatible API for this request" + ) + ); + assertBusy(() -> { + List> documents = DeprecationTestUtils.getIndexedDeprecations(client(), xOpaqueId()); + + logger.warn(documents); + + // only assert the relevant fields: level, message, and category + assertThat( + documents, + containsInAnyOrder( + allOf( + + hasEntry("elasticsearch.event.category", "compatible_api"), + hasEntry("log.level", "CRITICAL"), + hasEntry("message", "[GET /_test_cluster/old_name2] is deprecated! Use [GET /_test_cluster/new_name2] instead.") + ), + allOf( + hasEntry("elasticsearch.event.category", "compatible_api"), + hasEntry("log.level", "CRITICAL"), + // this message comes from the test, not production code. this is the message for setting the deprecated setting + hasEntry("message", "You are using a compatible API for this request") + ) + ) + ); + }, 30, TimeUnit.SECONDS); + } + /** * Check that log messages about REST API compatibility are recorded to an index */ diff --git a/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationHeaderRestAction.java b/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationHeaderRestAction.java index 70942b04f85b8..9e5f999d1f825 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationHeaderRestAction.java +++ b/x-pack/plugin/deprecation/qa/rest/src/main/java/org/elasticsearch/xpack/deprecation/TestDeprecationHeaderRestAction.java @@ -97,10 +97,23 @@ public List routes() { return List.of( // note: RestApiVersion.current() is acceptable here because this is test code -- ordinary callers of `.deprecated(...)` // should use an actual version - Route.builder(GET, "/_test_cluster/deprecated_settings").deprecated(DEPRECATED_ENDPOINT, RestApiVersion.current()).build(), + Route.builder(GET, "/_test_cluster/deprecated_settings") + .deprecatedForRemoval(DEPRECATED_ENDPOINT, RestApiVersion.current()) + .build(), + // TODO: s/deprecated/deprecatedForRemoval when removing `deprecated` method Route.builder(POST, "/_test_cluster/deprecated_settings").deprecated(DEPRECATED_ENDPOINT, RestApiVersion.current()).build(), - Route.builder(GET, "/_test_cluster/compat_only").deprecated(DEPRECATED_ENDPOINT, RestApiVersion.minimumSupported()).build(), - Route.builder(GET, "/_test_cluster/only_deprecated_setting").build() + Route.builder(GET, "/_test_cluster/compat_only") + .deprecatedForRemoval(DEPRECATED_ENDPOINT, RestApiVersion.minimumSupported()) + .build(), + Route.builder(GET, "/_test_cluster/only_deprecated_setting").build(), + Route.builder(GET, "/_test_cluster/deprecated_but_dont_remove") + .deprecateAndKeep("[/_test_cluster/deprecated_but_dont_remove] is deprecated, but no plans to remove quite yet") + .build(), + Route.builder(GET, "/_test_cluster/new_name1").replaces(GET, "/_test_cluster/old_name1", RestApiVersion.current()).build(), + Route.builder(GET, "/_test_cluster/new_name2") + .replaces(GET, "/_test_cluster/old_name2", RestApiVersion.minimumSupported()) + .build() + ); } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index c5ab20469bf77..974180526d750 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -117,7 +117,8 @@ public void testDoNotLogWithInfo() throws IOException { setLoggingLevel("INFO"); RequestObjectBuilder builder = requestObjectBuilder().query("ROW DO_NOT_LOG_ME = 1"); Map result = runEsql(builder); - assertEquals(2, result.size()); + assertEquals(3, result.size()); + assertThat(((Integer) result.get("took")).intValue(), greaterThanOrEqualTo(0)); Map colA = Map.of("name", "DO_NOT_LOG_ME", "type", "integer"); assertEquals(List.of(colA), result.get("columns")); assertEquals(List.of(List.of(1)), result.get("values")); @@ -136,7 +137,8 @@ public void testDoLogWithDebug() throws IOException { setLoggingLevel("DEBUG"); RequestObjectBuilder builder = requestObjectBuilder().query("ROW DO_LOG_ME = 1"); Map result = runEsql(builder); - assertEquals(2, result.size()); + assertEquals(3, result.size()); + assertThat(((Integer) result.get("took")).intValue(), greaterThanOrEqualTo(0)); Map colA = Map.of("name", "DO_LOG_ME", "type", "integer"); assertEquals(List.of(colA), result.get("columns")); assertEquals(List.of(List.of(1)), result.get("values")); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 39340ab745a4d..c3e9652f51fb4 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -249,7 +249,8 @@ public static RequestObjectBuilder jsonBuilder() throws IOException { public void testGetAnswer() throws IOException { Map answer = runEsql(requestObjectBuilder().query("row a = 1, b = 2")); - assertEquals(2, answer.size()); + assertEquals(3, answer.size()); + assertThat(((Integer) answer.get("took")).intValue(), greaterThanOrEqualTo(0)); Map colA = Map.of("name", "a", "type", "integer"); Map colB = Map.of("name", "b", "type", "integer"); assertEquals(List.of(colA, colB), answer.get("columns")); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index f52829741ed6e..1fdb6150a0e81 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -1270,3 +1270,19 @@ emp_no:integer | birth_date:datetime 10007 | 1957-05-23T00:00:00Z 10008 | 1958-02-19T00:00:00Z ; + +Least for dates +required_capability: least_greatest_for_dates +ROW a = LEAST(TO_DATETIME("1957-05-23T00:00:00Z"), TO_DATETIME("1958-02-19T00:00:00Z")); + +a:datetime +1957-05-23T00:00:00 +; + +GREATEST for dates +required_capability: least_greatest_for_dates +ROW a = GREATEST(TO_DATETIME("1957-05-23T00:00:00Z"), TO_DATETIME("1958-02-19T00:00:00Z")); + +a:datetime +1958-02-19T00:00:00 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 13c3857a5c497..6e8d5fba67cee 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -33,9 +33,9 @@ double e() "double exp(number:double|integer|long|unsigned_long)" "double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" "keyword from_base64(string:keyword|text)" -"boolean|double|integer|ip|keyword|long|text|version greatest(first:boolean|double|integer|ip|keyword|long|text|version, ?rest...:boolean|double|integer|ip|keyword|long|text|version)" +"boolean|date|double|integer|ip|keyword|long|text|version greatest(first:boolean|date|double|integer|ip|keyword|long|text|version, ?rest...:boolean|date|double|integer|ip|keyword|long|text|version)" "ip ip_prefix(ip:ip, prefixLengthV4:integer, prefixLengthV6:integer)" -"boolean|double|integer|ip|keyword|long|text|version least(first:boolean|double|integer|ip|keyword|long|text|version, ?rest...:boolean|double|integer|ip|keyword|long|text|version)" +"boolean|date|double|integer|ip|keyword|long|text|version least(first:boolean|date|double|integer|ip|keyword|long|text|version, ?rest...:boolean|date|double|integer|ip|keyword|long|text|version)" "keyword left(string:keyword|text, length:integer)" "integer length(string:keyword|text)" "integer locate(string:keyword|text, substring:keyword|text, ?start:integer)" @@ -69,6 +69,7 @@ double pi() "double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" "keyword repeat(string:keyword|text, number:integer)" "keyword replace(string:keyword|text, regex:keyword|text, newString:keyword|text)" +"keyword|text reverse(str:keyword|text)" "keyword right(string:keyword|text, length:integer)" "double|integer|long|unsigned_long round(number:double|integer|long|unsigned_long, ?decimals:integer)" "keyword|text rtrim(string:keyword|text)" @@ -165,9 +166,9 @@ ends_with |[str, suffix] |["keyword|text", "keyword|te exp |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. floor |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. from_base64 |string |"keyword|text" |A base64 string. -greatest |first |"boolean|double|integer|ip|keyword|long|text|version" |First of the columns to evaluate. +greatest |first |"boolean|date|double|integer|ip|keyword|long|text|version" |First of the columns to evaluate. ip_prefix |[ip, prefixLengthV4, prefixLengthV6]|[ip, integer, integer] |[IP address of type `ip` (both IPv4 and IPv6 are supported)., Prefix length for IPv4 addresses., Prefix length for IPv6 addresses.] -least |first |"boolean|double|integer|ip|keyword|long|text|version" |First of the columns to evaluate. +least |first |"boolean|date|double|integer|ip|keyword|long|text|version" |First of the columns to evaluate. left |[string, length] |["keyword|text", integer] |[The string from which to return a substring., The number of characters to return.] length |string |"keyword|text" |String expression. If `null`, the function returns `null`. locate |[string, substring, start] |["keyword|text", "keyword|text", "integer"] |[An input string, A substring to locate in the input string, The start index] @@ -201,6 +202,7 @@ pi |null |null pow |[base, exponent] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["Numeric expression for the base. If `null`\, the function returns `null`.", "Numeric expression for the exponent. If `null`\, the function returns `null`."] repeat |[string, number] |["keyword|text", integer] |[String expression., Number times to repeat.] replace |[string, regex, newString] |["keyword|text", "keyword|text", "keyword|text"] |[String expression., Regular expression., Replacement string.] +reverse |str |"keyword|text" |String expression. If `null`, the function returns `null`. right |[string, length] |["keyword|text", integer] |[The string from which to returns a substring., The number of characters to return.] round |[number, decimals] |["double|integer|long|unsigned_long", integer] |["The numeric value to round. If `null`\, the function returns `null`.", "The number of decimal places to round to. Defaults to 0. If `null`\, the function returns `null`."] rtrim |string |"keyword|text" |String expression. If `null`, the function returns `null`. @@ -333,6 +335,7 @@ pi |Returns {wikipedia}/Pi[Pi], the ratio of a circle's circumference pow |Returns the value of `base` raised to the power of `exponent`. repeat |Returns a string constructed by concatenating `string` with itself the specified `number` of times. replace |The function substitutes in the string `str` any match of the regular expression `regex` with the replacement string `newStr`. +reverse |Returns a new string representing the input string in reverse order. right |Return the substring that extracts 'length' chars from 'str' starting from the right. round |Rounds a number to the specified number of decimal places. Defaults to 0, which returns the nearest integer. If the precision is a negative number, rounds to the number of digits left of the decimal point. rtrim |Removes trailing whitespaces from a string. @@ -431,9 +434,9 @@ ends_with |boolean exp |double |false |false |false floor |"double|integer|long|unsigned_long" |false |false |false from_base64 |keyword |false |false |false -greatest |"boolean|double|integer|ip|keyword|long|text|version" |false |true |false +greatest |"boolean|date|double|integer|ip|keyword|long|text|version" |false |true |false ip_prefix |ip |[false, false, false] |false |false -least |"boolean|double|integer|ip|keyword|long|text|version" |false |true |false +least |"boolean|date|double|integer|ip|keyword|long|text|version" |false |true |false left |keyword |[false, false] |false |false length |integer |false |false |false locate |integer |[false, false, true] |false |false @@ -467,6 +470,7 @@ pi |double pow |double |[false, false] |false |false repeat |keyword |[false, false] |false |false replace |keyword |[false, false, false] |false |false +reverse |"keyword|text" |false |false |false right |keyword |[false, false] |false |false round |"double|integer|long|unsigned_long" |[false, true] |false |false rtrim |"keyword|text" |false |false |false @@ -544,5 +548,5 @@ required_capability: meta meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -121 | 121 | 121 +122 | 122 | 122 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index ffcceab26bcaf..5313e6630c75d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -1194,6 +1194,113 @@ a:keyword | upper:keyword | lower:keyword π/2 + a + B + Λ ºC | Π/2 + A + B + Λ ºC | π/2 + a + b + λ ºc ; +reverse +required_capability: fn_reverse +from employees | sort emp_no | eval name_reversed = REVERSE(first_name) | keep emp_no, first_name, name_reversed | limit 1; + +emp_no:integer | first_name:keyword | name_reversed:keyword +10001 | Georgi | igroeG +; + +reverseRow +required_capability: fn_reverse +// tag::reverse[] +ROW message = "Some Text" | EVAL message_reversed = REVERSE(message); +// end::reverse[] + +// tag::reverse-result[] +message:keyword | message_reversed:keyword +Some Text | txeT emoS +// end::reverse-result[] +; + +reverseEmoji +required_capability: fn_reverse +// tag::reverseEmoji[] +ROW bending_arts = "💧🪨🔥💨" | EVAL bending_arts_reversed = REVERSE(bending_arts); +// end::reverseEmoji[] + +// tag::reverseEmoji-result[] +bending_arts:keyword | bending_arts_reversed:keyword +💧🪨🔥💨 | 💨🔥🪨💧 +// end::reverseEmoji-result[] +; + +reverseEmoji2 +required_capability: fn_reverse +ROW off_on_holiday = "🏠➡️🚌➡️✈️➡️🏝️" | EVAL back_home_again = REVERSE(off_on_holiday); + +off_on_holiday:keyword | back_home_again:keyword +🏠➡️🚌➡️✈️➡️🏝️ | 🏝️➡️✈️➡️🚌➡️🏠 +; + +reverseGraphemeClusters +required_capability: fn_reverse +ROW message = "áéíóúàèìòùâêîôû😊👍🏽🎉💖कंठाी" | EVAL message_reversed = REVERSE(message); + +message:keyword | message_reversed:keyword +áéíóúàèìòùâêîôû😊👍🏽🎉💖कंठाी | ठाीकं💖🎉👍🏽😊ûôîêâùòìèàúóíéá +; + +reverseMultiValue +required_capability: fn_reverse +FROM employees | SORT emp_no | EVAL jobs_reversed = REVERSE(job_positions) | KEEP job*, emp_no | LIMIT 5; + +warning:Line 1:53: evaluation of [REVERSE(job_positions)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:53: java.lang.IllegalArgumentException: single-value function encountered multi-value + +job_positions:keyword | jobs_reversed:keyword | emp_no:integer +["Accountant", "Senior Python Developer"] | null | 10001 +Senior Team Lead | daeL maeT roineS | 10002 +null | null | 10003 +[Head Human Resources, Reporting Analyst, Support Engineer, Tech Lead] | null | 10004 +null | null | 10005 +; + +reverseNested +required_capability: fn_reverse +FROM employees | SORT emp_no | EVAL name_reversed = REVERSE(REVERSE(first_name)), eq = name_reversed == first_name | KEEP first_name, name_reversed, eq, emp_no | LIMIT 5; + +first_name:keyword | name_reversed:keyword | eq:boolean | emp_no:integer +Georgi | Georgi | true | 10001 +Bezalel | Bezalel | true | 10002 +Parto | Parto | true | 10003 +Chirstian | Chirstian | true | 10004 +Kyoichi | Kyoichi | true | 10005 +; + +reverseRowNull +required_capability: fn_reverse +ROW x = null | EVAL y = REVERSE(x); + +x:null | y:null +null | null +; + + +reverseRowInlineCastWithNull +required_capability: fn_reverse +ROW x = 1 | EVAL y = REVERSE((null + 1)::string); + +x:integer | y:string +1 | null +; + +reverseWithTextFields +required_capability: fn_reverse +FROM books +| EVAL title_reversed = REVERSE(title), author_reversed_twice = REVERSE(REVERSE(author)), eq = author_reversed_twice == author +| KEEP title, title_reversed, author, author_reversed_twice, eq, book_no +| SORT book_no +| WHERE book_no IN ("1211", "1463") +| LIMIT 2; + +title:text | title_reversed:text | author:text | author_reversed_twice:text | eq:boolean | book_no:keyword +The brothers Karamazov | vozamaraK srehtorb ehT | Fyodor Dostoevsky | Fyodor Dostoevsky | true | 1211 +Realms of Tolkien: Images of Middle-earth | htrae-elddiM fo segamI :neikloT fo smlaeR | J. R. R. Tolkien | J. R. R. Tolkien | true | 1463 +; + + values required_capability: agg_values diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java index 03757d44a9f58..4f4f3d112247e 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java @@ -43,6 +43,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; public class CrossClustersQueryIT extends AbstractMultiClustersTestCase { private static final String REMOTE_CLUSTER = "cluster-a"; @@ -339,6 +340,108 @@ public void testSearchesWhereNonExistentClusterIsSpecifiedWithWildcards() { } } + /** + * Searches with LIMIT 0 are used by Kibana to get a list of columns. After the initial planning + * (which involves cross-cluster field-caps calls), it is a coordinator only operation at query time + * which uses a different pathway compared to queries that require data node (and remote data node) operations + * at query time. + */ + public void testCCSExecutionOnSearchesWithLimit0() { + setupTwoClusters(); + + // Ensure non-cross cluster queries have overall took time + try (EsqlQueryResponse resp = runQuery("FROM logs* | LIMIT 0")) { + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertNotNull(executionInfo); + assertThat(executionInfo.isCrossClusterSearch(), is(false)); + assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + } + + // ensure cross-cluster searches have overall took time and correct per-cluster details in EsqlExecutionInfo + try (EsqlQueryResponse resp = runQuery("FROM logs*,cluster-a:* | LIMIT 0")) { + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertNotNull(executionInfo); + assertThat(executionInfo.isCrossClusterSearch(), is(true)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); + assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); + + EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER); + assertThat(remoteCluster.getIndexExpression(), equalTo("*")); + assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); + assertNull(remoteCluster.getTotalShards()); + assertNull(remoteCluster.getSuccessfulShards()); + assertNull(remoteCluster.getSkippedShards()); + assertNull(remoteCluster.getFailedShards()); + + EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER); + assertThat(localCluster.getIndexExpression(), equalTo("logs*")); + assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); + assertNull(localCluster.getTotalShards()); + assertNull(localCluster.getSuccessfulShards()); + assertNull(localCluster.getSkippedShards()); + assertNull(localCluster.getFailedShards()); + } + + try (EsqlQueryResponse resp = runQuery("FROM logs*,cluster-a:nomatch* | LIMIT 0")) { + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertNotNull(executionInfo); + assertThat(executionInfo.isCrossClusterSearch(), is(true)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); + assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); + + EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER); + assertThat(remoteCluster.getIndexExpression(), equalTo("nomatch*")); + assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); + assertThat(remoteCluster.getTook().millis(), equalTo(0L)); + assertThat(remoteCluster.getTotalShards(), equalTo(0)); + assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); + assertThat(remoteCluster.getSkippedShards(), equalTo(0)); + assertThat(remoteCluster.getFailedShards(), equalTo(0)); + + EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER); + assertThat(localCluster.getIndexExpression(), equalTo("logs*")); + assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); + assertNull(localCluster.getTotalShards()); + assertNull(localCluster.getSuccessfulShards()); + assertNull(localCluster.getSkippedShards()); + assertNull(localCluster.getFailedShards()); + } + + try (EsqlQueryResponse resp = runQuery("FROM nomatch*,cluster-a:* | LIMIT 0")) { + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertNotNull(executionInfo); + assertThat(executionInfo.isCrossClusterSearch(), is(true)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); + assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER, LOCAL_CLUSTER))); + + EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER); + assertThat(remoteCluster.getIndexExpression(), equalTo("*")); + assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); + assertNull(remoteCluster.getTotalShards()); + assertNull(remoteCluster.getSuccessfulShards()); + assertNull(remoteCluster.getSkippedShards()); + assertNull(remoteCluster.getFailedShards()); + + EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER); + assertThat(localCluster.getIndexExpression(), equalTo("nomatch*")); + // TODO: in https://github.com/elastic/elasticsearch/issues/112886, this will be changed to be SKIPPED + assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); + } + } + public void testMetadataIndex() { Map testClusterInfo = setupTwoClusters(); int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseEvaluator.java new file mode 100644 index 0000000000000..68ea53ad342e1 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseEvaluator.java @@ -0,0 +1,111 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Warnings; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Reverse}. + * This class is generated. Do not edit it. + */ +public final class ReverseEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator val; + + private final DriverContext driverContext; + + public ReverseEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.val = val; + this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock valBlock = (BytesRefBlock) val.eval(page)) { + BytesRefVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { + try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef valScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBytesRef(Reverse.process(valBlock.getBytesRef(valBlock.getFirstValueIndex(p), valScratch))); + } + return result.build(); + } + } + + public BytesRefVector eval(int positionCount, BytesRefVector valVector) { + try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { + BytesRef valScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendBytesRef(Reverse.process(valVector.getBytesRef(p, valScratch))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "ReverseEvaluator[" + "val=" + val + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public ReverseEvaluator get(DriverContext context) { + return new ReverseEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "ReverseEvaluator[" + "val=" + val + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index f0fa89dedd9ab..c39a2041a61be 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -27,6 +27,11 @@ */ public class EsqlCapabilities { public enum Cap { + /** + * Support for function {@code REVERSE}. + */ + FN_REVERSE, + /** * Support for function {@code CBRT}. Done in #108574. */ @@ -278,6 +283,11 @@ public enum Cap { */ TO_DATE_NANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + /** + * Support for datetime in least and greatest functions + */ + LEAST_GREATEST_FOR_DATES, + /** * Support CIDRMatch in CombineDisjunctions rule. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java index b01aff2a09bd4..f7966ff5ae9ec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java @@ -7,12 +7,11 @@ package org.elasticsearch.xpack.esql.action; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; @@ -167,19 +166,18 @@ public Cluster swapCluster(String clusterAlias, BiFunction toXContentChunked(ToXContent.Params params) { if (isCrossClusterSearch() == false || clusterInfo.isEmpty()) { - return Iterators.concat(); + return Collections.emptyIterator(); } - return Iterators.concat( - ChunkedToXContentHelper.startObject(), - ChunkedToXContentHelper.field(TOTAL_FIELD.getPreferredName(), clusterInfo.size()), - ChunkedToXContentHelper.field(SUCCESSFUL_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.SUCCESSFUL)), - ChunkedToXContentHelper.field(RUNNING_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.RUNNING)), - ChunkedToXContentHelper.field(SKIPPED_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.SKIPPED)), - ChunkedToXContentHelper.field(PARTIAL_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.PARTIAL)), - ChunkedToXContentHelper.field(FAILED_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.FAILED)), - ChunkedToXContentHelper.xContentFragmentValuesMapCreateOwnName("details", clusterInfo), - ChunkedToXContentHelper.endObject() - ); + return ChunkedToXContent.builder(params).object(b -> { + b.field(TOTAL_FIELD.getPreferredName(), clusterInfo.size()); + b.field(SUCCESSFUL_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.SUCCESSFUL)); + b.field(RUNNING_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.RUNNING)); + b.field(SKIPPED_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.SKIPPED)); + b.field(PARTIAL_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.PARTIAL)); + b.field(FAILED_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.FAILED)); + // each clusterinfo defines its own field object name + b.xContentObject("details", clusterInfo.values().iterator()); + }); } /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index 146a88128da35..8e4da3f138a6f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentBuilder; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.compute.data.BlockFactory; @@ -375,11 +376,8 @@ public int hashCode() { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return Iterators.concat( - ChunkedToXContentHelper.startObject(), - ChunkedToXContentHelper.array("drivers", drivers.iterator(), params), - ChunkedToXContentHelper.endObject() - ); + return ChunkedToXContent.builder(params) + .object(ob -> ob.array("drivers", drivers.iterator(), ChunkedToXContentBuilder::append)); } List drivers() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java index 5888e30747557..d8692faef5290 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java @@ -14,8 +14,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.planner.Layout; -import java.util.function.Function; - import static org.elasticsearch.compute.data.BlockUtils.fromArrayRow; import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; @@ -23,6 +21,10 @@ * Expressions that have a mapping to an {@link ExpressionEvaluator}. */ public interface EvaluatorMapper { + interface ToEvaluator { + ExpressionEvaluator.Factory apply(Expression expression); + } + /** *

* Note for implementors: @@ -50,7 +52,7 @@ public interface EvaluatorMapper { * garbage. Or return an evaluator that throws when run. *

*/ - ExpressionEvaluator.Factory toEvaluator(Function toEvaluator); + ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator); /** * Fold using {@link #toEvaluator} so you don't need a "by hand" diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 0923db51f19cf..7c0f1fa3a8ad0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -124,6 +124,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Repeat; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Reverse; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Space; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Split; @@ -300,22 +301,23 @@ private FunctionDefinition[][] functions() { def(Tau.class, Tau::new, "tau") }, // string new FunctionDefinition[] { - def(Length.class, Length::new, "length"), - def(Substring.class, Substring::new, "substring"), def(Concat.class, Concat::new, "concat"), + def(EndsWith.class, EndsWith::new, "ends_with"), def(LTrim.class, LTrim::new, "ltrim"), - def(RTrim.class, RTrim::new, "rtrim"), - def(Trim.class, Trim::new, "trim"), def(Left.class, Left::new, "left"), + def(Length.class, Length::new, "length"), + def(Locate.class, Locate::new, "locate"), + def(RTrim.class, RTrim::new, "rtrim"), + def(Repeat.class, Repeat::new, "repeat"), def(Replace.class, Replace::new, "replace"), + def(Reverse.class, Reverse::new, "reverse"), def(Right.class, Right::new, "right"), + def(Space.class, Space::new, "space"), def(StartsWith.class, StartsWith::new, "starts_with"), - def(EndsWith.class, EndsWith::new, "ends_with"), + def(Substring.class, Substring::new, "substring"), def(ToLower.class, ToLower::new, "to_lower"), def(ToUpper.class, ToUpper::new, "to_upper"), - def(Locate.class, Locate::new, "locate"), - def(Repeat.class, Repeat::new, "repeat"), - def(Space.class, Space::new, "space") }, + def(Trim.class, Trim::new, "trim") }, // date new FunctionDefinition[] { def(DateDiff.class, DateDiff::new, "date_diff"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java index 5fabfe0e03d89..3357b2abf0e0f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java @@ -39,7 +39,6 @@ import java.time.ZoneOffset; import java.util.ArrayList; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; @@ -241,7 +240,7 @@ public boolean foldable() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { if (field.dataType() == DataType.DATETIME) { Rounding.Prepared preparedRounding; if (buckets.dataType().isWholeNumber()) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java index 0865e070aecd4..75a9883a77102 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java @@ -36,7 +36,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; @@ -107,7 +106,7 @@ static int process( } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return new CategorizeEvaluator.Factory( source(), toEvaluator.apply(field), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java index 14b0c872a3b86..afe9bf6e45eda 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java @@ -43,6 +43,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.Locate; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Repeat; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Reverse; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Split; import org.elasticsearch.xpack.esql.expression.function.scalar.string.StartsWith; @@ -100,6 +101,7 @@ public static List getNamedWriteables() { entries.add(Right.ENTRY); entries.add(Repeat.ENTRY); entries.add(Replace.ENTRY); + entries.add(Reverse.ENTRY); entries.add(Round.ENTRY); entries.add(Split.ENTRY); entries.add(Substring.ENTRY); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index 6acb8ea974ed0..62e5651d07dca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -50,7 +50,7 @@ public final class Case extends EsqlScalarFunction { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Case", Case::new); record Condition(Expression condition, Expression value) { - ConditionEvaluatorSupplier toEvaluator(Function toEvaluator) { + ConditionEvaluatorSupplier toEvaluator(ToEvaluator toEvaluator) { return new ConditionEvaluatorSupplier(condition.source(), toEvaluator.apply(condition), toEvaluator.apply(value)); } } @@ -311,7 +311,7 @@ private Expression finishPartialFold(List newChildren) { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { List conditionsFactories = conditions.stream().map(c -> c.toEvaluator(toEvaluator)).toList(); ExpressionEvaluator.Factory elseValueFactory = toEvaluator.apply(elseValue); ElementType resultType = PlannerUtils.toElementType(dataType()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index 7c0427a95d478..9d815d15accdc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -30,7 +30,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; import java.util.stream.Stream; import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; @@ -44,7 +43,7 @@ public class Greatest extends EsqlScalarFunction implements OptionalArgument { private DataType dataType; @FunctionInfo( - returnType = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "Returns the maximum value from multiple columns. This is similar to <>\n" + "except it is intended to run on multiple columns at once.", note = "When run on `keyword` or `text` fields, this returns the last string in alphabetical order. " @@ -55,12 +54,12 @@ public Greatest( Source source, @Param( name = "first", - type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "First of the columns to evaluate." ) Expression first, @Param( name = "rest", - type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "The rest of the columns to evaluate.", optional = true ) List rest @@ -138,7 +137,7 @@ public boolean foldable() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { // force datatype initialization var dataType = dataType(); ExpressionEvaluator.Factory[] factories = children().stream() @@ -153,7 +152,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function> except it is intended to run on multiple columns at once.", examples = @Example(file = "math", tag = "least") @@ -53,12 +52,12 @@ public Least( Source source, @Param( name = "first", - type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "First of the columns to evaluate." ) Expression first, @Param( name = "rest", - type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "The rest of the columns to evaluate.", optional = true ) List rest @@ -136,7 +135,7 @@ public boolean foldable() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { // force datatype initialization var dataType = dataType(); @@ -152,7 +151,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function factories(); @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return evaluator(toEvaluator.apply(field())); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64.java index 873d496bfc8fd..7f9d0d3f2e647 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64.java @@ -29,7 +29,6 @@ import java.io.IOException; import java.util.Base64; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; @@ -96,9 +95,7 @@ static BytesRef process(BytesRef field, @Fixed(includeInToString = false, build } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return switch (PlannerUtils.toElementType(field.dataType())) { case BYTES_REF -> new FromBase64Evaluator.Factory(source(), toEvaluator.apply(field), context -> new BytesRefBuilder()); case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64.java index ab8287413c614..c23cef31f32f5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64.java @@ -29,7 +29,6 @@ import java.io.IOException; import java.util.Base64; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; @@ -90,9 +89,7 @@ static BytesRef process(BytesRef field, @Fixed(includeInToString = false, build } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return switch (PlannerUtils.toElementType(field.dataType())) { case BYTES_REF -> new ToBase64Evaluator.Factory(source(), toEvaluator.apply(field), context -> new BytesRefBuilder()); case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java index f9039417e48a6..f6a23a5d5962e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java @@ -37,7 +37,6 @@ import java.util.Map; import java.util.Set; import java.util.function.BiFunction; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -227,7 +226,7 @@ static int process(BytesRef unit, long startTimestamp, long endTimestamp) throws } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { ExpressionEvaluator.Factory startTimestampEvaluator = toEvaluator.apply(startTimestamp); ExpressionEvaluator.Factory endTimestampEvaluator = toEvaluator.apply(endTimestamp); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index bbb19ca0eecab..501dfd431f106 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -31,7 +31,6 @@ import java.time.ZoneId; import java.time.temporal.ChronoField; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isDate; import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; @@ -108,7 +107,7 @@ public String getWriteableName() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var fieldEvaluator = toEvaluator.apply(children().get(1)); if (children().get(0).foldable()) { ChronoField chrono = chronoField(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java index bfca72a563c05..60bc014ccbeec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java @@ -30,7 +30,6 @@ import java.io.IOException; import java.util.List; import java.util.Locale; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -139,7 +138,7 @@ static BytesRef process(long val, BytesRef formatter, @Fixed Locale locale) { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var fieldEvaluator = toEvaluator.apply(field); if (format == null) { return new DateFormatConstantEvaluator.Factory(source(), fieldEvaluator, DEFAULT_DATE_TIME_FORMATTER); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java index 27b613e0e6d5a..1aaa227c3846e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java @@ -30,7 +30,6 @@ import java.io.IOException; import java.time.ZoneId; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.common.time.DateFormatter.forPattern; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; @@ -136,7 +135,7 @@ static long process(BytesRef val, BytesRef formatter, @Fixed ZoneId zoneId) thro } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { ZoneId zone = UTC; // TODO session timezone? ExpressionEvaluator.Factory fieldEvaluator = toEvaluator.apply(field); if (format == null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index d5ec3d1d96fae..35a705f418906 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -32,7 +32,6 @@ import java.time.ZoneOffset; import java.util.List; import java.util.concurrent.TimeUnit; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -199,7 +198,7 @@ private static Rounding.Prepared createRounding(final Duration duration, final Z } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var fieldEvaluator = toEvaluator.apply(timestampField); if (interval.foldable() == false) { throw new IllegalArgumentException("Function [" + sourceText() + "] has invalid interval [" + interval.sourceText() + "]."); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java index 0654aec3a0522..d259fc6ae57ce 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java @@ -25,7 +25,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; public class Now extends EsqlConfigurationFunction { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Now", Now::new); @@ -90,7 +89,7 @@ protected NodeInfo info() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return dvrCtx -> new NowEvaluator(source(), now, dvrCtx); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java index c141beeefb1ea..51430603a4077 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java @@ -30,7 +30,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; @@ -113,12 +112,12 @@ public boolean foldable() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var ipEvaluatorSupplier = toEvaluator.apply(ipField); return new CIDRMatchEvaluator.Factory( source(), ipEvaluatorSupplier, - matches.stream().map(x -> toEvaluator.apply(x)).toArray(EvalOperator.ExpressionEvaluator.Factory[]::new) + matches.stream().map(toEvaluator::apply).toArray(EvalOperator.ExpressionEvaluator.Factory[]::new) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefix.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefix.java index 60b464b26750a..26e75e752f681 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefix.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefix.java @@ -29,7 +29,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -120,7 +119,7 @@ public boolean foldable() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var ipEvaluatorSupplier = toEvaluator.apply(ipField); var prefixLengthV4EvaluatorSupplier = toEvaluator.apply(prefixLengthV4Field); var prefixLengthV6EvaluatorSupplier = toEvaluator.apply(prefixLengthV6Field); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java index 363b70ef5ed12..ba47fd15e9c9d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java @@ -23,7 +23,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; public class Abs extends UnaryScalarFunction { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Abs", Abs::new); @@ -69,7 +68,7 @@ static int process(int fieldVal) { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var field = toEvaluator.apply(field()); if (dataType() == DataType.DOUBLE) { return new AbsDoubleEvaluator.Factory(source(), field); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java index 8353fe24b3dd0..f44e7b029643c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import java.io.IOException; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNumeric; @@ -39,7 +38,7 @@ protected AbstractTrigonometricFunction(StreamInput in) throws IOException { protected abstract EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field); @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return doubleEvaluator(Cast.cast(source(), field().dataType(), DataType.DOUBLE, toEvaluator.apply(field()))); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java index f940cb6d68554..7dbc0001f4b3d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java @@ -26,7 +26,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNumeric; @@ -118,7 +117,7 @@ public boolean foldable() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var yEval = Cast.cast(source(), y.dataType(), DataType.DOUBLE, toEvaluator.apply(y)); var xEval = Cast.cast(source(), x.dataType(), DataType.DOUBLE, toEvaluator.apply(x)); return new Atan2Evaluator.Factory(source(), yEval, xEval); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cbrt.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cbrt.java index 364e91aad8b1b..dcc704318ca5f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cbrt.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cbrt.java @@ -23,7 +23,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNumeric; @@ -56,7 +55,7 @@ public String getWriteableName() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var field = toEvaluator.apply(field()); var fieldType = field().dataType(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java index 909de387c62ff..f7295421de8aa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java @@ -21,7 +21,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNumeric; @@ -64,7 +63,7 @@ public String getWriteableName() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { if (dataType().isWholeNumber()) { return toEvaluator.apply(field()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Exp.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Exp.java index a0d9937fc87b8..7abef8bba711d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Exp.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Exp.java @@ -24,7 +24,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; /** * Returns the value of e raised to the power of tbe number specified as parameter @@ -58,9 +57,7 @@ public String getWriteableName() { } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var field = toEvaluator.apply(field()); var fieldType = field().dataType(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java index 638770f2f079a..7e727c1c2cada 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java @@ -21,7 +21,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNumeric; @@ -66,7 +65,7 @@ public String getWriteableName() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { if (dataType().isWholeNumber()) { return toEvaluator.apply(field()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java index da11d1e77885b..4528897c194ca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java @@ -26,7 +26,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -145,7 +144,7 @@ public DataType dataType() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var valueEval = Cast.cast(source(), value.dataType(), DataType.DOUBLE, toEvaluator.apply(value)); if (base != null) { var baseEval = Cast.cast(source(), base.dataType(), DataType.DOUBLE, toEvaluator.apply(base)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java index ae725f6ed6498..1e987651b686c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java @@ -24,7 +24,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNumeric; @@ -62,7 +61,7 @@ public String getWriteableName() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var field = toEvaluator.apply(field()); var fieldType = field().dataType(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java index 46d80635823ca..3f5249e3e8cb3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java @@ -26,7 +26,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -128,7 +127,7 @@ public DataType dataType() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var baseEval = Cast.cast(source(), base.dataType(), DataType.DOUBLE, toEvaluator.apply(base)); var expEval = Cast.cast(source(), exponent.dataType(), DataType.DOUBLE, toEvaluator.apply(exponent)); return new PowEvaluator.Factory(source(), baseEval, expEval); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java index 8fcb04d021e7a..b1baa6c55ce47 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java @@ -31,7 +31,6 @@ import java.util.Arrays; import java.util.List; import java.util.function.BiFunction; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -167,7 +166,7 @@ public DataType dataType() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { DataType fieldType = dataType(); if (fieldType == DataType.DOUBLE) { return toEvaluator(toEvaluator, RoundDoubleNoDecimalsEvaluator.Factory::new, RoundDoubleEvaluator.Factory::new); @@ -185,7 +184,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator, + ToEvaluator toEvaluator, BiFunction noDecimals, TriFunction withDecimals ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java index e78c2ce90e6c1..9c7a5fdcaa236 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java @@ -24,7 +24,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; public class Signum extends UnaryScalarFunction { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Signum", Signum::new); @@ -56,9 +55,7 @@ public String getWriteableName() { } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var field = toEvaluator.apply(field()); var fieldType = field().dataType(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java index d1af693d8aa7f..080c0448e082c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java @@ -23,7 +23,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNumeric; @@ -56,7 +55,7 @@ public String getWriteableName() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var field = toEvaluator.apply(field()); var fieldType = field().dataType(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java index 998a1815cbada..6a3b58728b192 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java @@ -84,7 +84,7 @@ protected final TypeResolution resolveType() { protected abstract TypeResolution resolveFieldType(); @Override - public final ExpressionEvaluator.Factory toEvaluator(java.util.function.Function toEvaluator) { + public final ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return evaluator(toEvaluator.apply(field())); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java index deb170d9e569c..72d96a86d31eb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java @@ -35,7 +35,6 @@ import java.util.Arrays; import java.util.List; import java.util.Objects; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -149,9 +148,7 @@ public boolean foldable() { } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return switch (PlannerUtils.toElementType(dataType())) { case BOOLEAN -> new MvAppendBooleanEvaluator.Factory(source(), toEvaluator.apply(field1), toEvaluator.apply(field2)); case BYTES_REF -> new MvAppendBytesRefEvaluator.Factory(source(), toEvaluator.apply(field1), toEvaluator.apply(field2)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java index fa9475055515f..1996744a76567 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java @@ -28,7 +28,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import java.io.IOException; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; @@ -87,7 +86,7 @@ public DataType dataType() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return new EvaluatorFactory(toEvaluator.apply(left()), toEvaluator.apply(right())); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSum.java index 212f626090789..cf49607893aae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSum.java @@ -14,7 +14,6 @@ import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -33,7 +32,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -110,7 +108,7 @@ public boolean foldable() { } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return switch (PlannerUtils.toElementType(field.dataType())) { case DOUBLE -> new MvPSeriesWeightedSumDoubleEvaluator.Factory( source(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentile.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentile.java index 1eb0c70a7b08e..f3a63c835bd34 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentile.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentile.java @@ -34,7 +34,6 @@ import java.math.BigDecimal; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -120,7 +119,7 @@ public DataType dataType() { } @Override - public final ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public final ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var fieldEval = toEvaluator.apply(field); var percentileEval = Cast.cast(source(), percentile.dataType(), DOUBLE, toEvaluator.apply(percentile)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java index c332e94b20049..9846ebe4111c0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java @@ -37,7 +37,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -178,9 +177,7 @@ public boolean foldable() { } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { if (start.foldable() && end.foldable()) { int startOffset = stringToInt(String.valueOf(start.fold())); int endOffset = stringToInt(String.valueOf(end.fold())); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java index 4ed9a01c29797..d9e41233952de 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java @@ -48,7 +48,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -146,9 +145,7 @@ public boolean foldable() { } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { boolean ordering = true; if (isValidOrder() == false) { throw new IllegalArgumentException( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java index fd3b9e7664dff..d6a30c6ca151c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java @@ -32,7 +32,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -130,9 +129,7 @@ public Nullability nullable() { } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return new MvZipEvaluator.Factory( source(), toEvaluator.apply(mvLeft), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 30c6abc5398e3..575bb085c41f7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -35,7 +35,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -192,8 +191,8 @@ public boolean foldable() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - List childEvaluators = children().stream().map(toEvaluator).toList(); + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { + List childEvaluators = children().stream().map(toEvaluator::apply).toList(); return new ExpressionEvaluator.Factory() { @Override public ExpressionEvaluator get(DriverContext context) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java index f8b05aea324dc..46538b77edc74 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java @@ -151,6 +151,8 @@ *
  • {@code docs/reference/esql/functions/parameters/myfunction.asciidoc}
  • *
  • {@code docs/reference/esql/functions/signature/myfunction.svg}
  • *
  • {@code docs/reference/esql/functions/types/myfunction.asciidoc}
  • + *
  • {@code docs/reference/esql/functions/kibana/definition/myfunction.json}
  • + *
  • {@code docs/reference/esql/functions/kibana/docs/myfunction.asciidoc}
  • * * * Make sure to commit them. Add a reference to the @@ -194,6 +196,9 @@ * for your function. Now add something like {@code required_capability: my_function} * to all of your csv-spec tests. Run those csv-spec tests as integration tests to double * check that they run on the main branch. + *

    + * **Note:** you may notice tests gated based on Elasticsearch version. This was the old way + * of doing things. Now, we use specific capabilities for each function. * *
  • * Open the PR. The subject and description of the PR are important because those'll turn @@ -201,7 +206,7 @@ * happy. But functions don't need an essay. *
  • *
  • - * Add the {@code >enhancement} and {@code :Query Languages/ES|QL} tags if you are able. + * Add the {@code >enhancement} and {@code :Analytics/ES|QL} tags if you are able. * Request a review if you can, probably from one of the folks that github proposes to you. *
  • *
  • diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java index 6fd4f79125a21..1a51af8dfeeb4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java @@ -13,9 +13,9 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import java.util.Map; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2D; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2Ds; @@ -34,15 +34,12 @@ abstract class SpatialEvaluatorFactory { this.factoryCreator = factoryCreator; } - public abstract EvalOperator.ExpressionEvaluator.Factory get( - SpatialSourceSupplier function, - Function toEvaluator - ); + public abstract EvalOperator.ExpressionEvaluator.Factory get(SpatialSourceSupplier function, EvaluatorMapper.ToEvaluator toEvaluator); static EvalOperator.ExpressionEvaluator.Factory makeSpatialEvaluator( SpatialSourceSupplier s, Map> evaluatorRules, - Function toEvaluator + EvaluatorMapper.ToEvaluator toEvaluator ) { var evaluatorKey = new SpatialEvaluatorKey( s.crsType(), @@ -149,10 +146,7 @@ protected static class SpatialEvaluatorFactoryWithFields extends SpatialEvaluato } @Override - public EvalOperator.ExpressionEvaluator.Factory get( - SpatialSourceSupplier s, - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory get(SpatialSourceSupplier s, EvaluatorMapper.ToEvaluator toEvaluator) { return factoryCreator.apply(s.source(), toEvaluator.apply(s.left()), toEvaluator.apply(s.right())); } } @@ -176,10 +170,7 @@ protected static class SpatialEvaluatorWithConstantFactory extends SpatialEvalua } @Override - public EvalOperator.ExpressionEvaluator.Factory get( - SpatialSourceSupplier s, - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory get(SpatialSourceSupplier s, EvaluatorMapper.ToEvaluator toEvaluator) { return factoryCreator.apply(s.source(), toEvaluator.apply(s.left()), asLuceneComponent2D(s.crsType(), s.right())); } } @@ -205,10 +196,7 @@ protected static class SpatialEvaluatorWithConstantArrayFactory extends SpatialE } @Override - public EvalOperator.ExpressionEvaluator.Factory get( - SpatialSourceSupplier s, - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory get(SpatialSourceSupplier s, EvaluatorMapper.ToEvaluator toEvaluator) { return factoryCreator.apply(s.source(), toEvaluator.apply(s.left()), asLuceneComponent2Ds(s.crsType(), s.right())); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java index 68ca793089499..ee2b4450a64ff 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java @@ -32,7 +32,6 @@ import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.function.Function; import java.util.function.Predicate; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; @@ -112,9 +111,7 @@ public SpatialRelatesFunction surrogate() { } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return SpatialEvaluatorFactory.makeSpatialEvaluator(this, evaluatorRules(), toEvaluator); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistance.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistance.java index 14bded51aa55f..17bcc68004bff 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistance.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistance.java @@ -30,7 +30,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import java.io.IOException; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.makeGeometryFromLiteral; @@ -177,9 +176,7 @@ public Object fold() { } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { if (right().foldable()) { return toEvaluator(toEvaluator, left(), makeGeometryFromLiteral(right()), leftDocValues); } else if (left().foldable()) { @@ -209,7 +206,7 @@ public EvalOperator.ExpressionEvaluator.Factory toEvaluator( } private EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator, + ToEvaluator toEvaluator, Expression field, Geometry geometry, boolean docValues @@ -222,7 +219,7 @@ private EvalOperator.ExpressionEvaluator.Factory toEvaluator( } private EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator, + ToEvaluator toEvaluator, Expression field, Point point, boolean docValues diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java index 18046135933b0..d1d85b03eb18a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java @@ -24,7 +24,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.UNSPECIFIED; @@ -72,9 +71,7 @@ protected Expression.TypeResolution resolveType() { } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return new StXFromWKBEvaluator.Factory(toEvaluator.apply(field()), source()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java index bf97c3e2a3547..2056dcaed87a7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java @@ -24,7 +24,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.UNSPECIFIED; @@ -72,9 +71,7 @@ protected TypeResolution resolveType() { } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return new StYFromWKBEvaluator.Factory(toEvaluator.apply(field()), source()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java index 23ee942bcf53a..46ecc9e026d3d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java @@ -29,7 +29,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; import java.util.stream.Stream; import static org.elasticsearch.common.unit.ByteSizeUnit.MB; @@ -106,8 +105,8 @@ public boolean foldable() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var values = children().stream().map(toEvaluator).toArray(ExpressionEvaluator.Factory[]::new); + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { + var values = children().stream().map(toEvaluator::apply).toArray(ExpressionEvaluator.Factory[]::new); return new ConcatEvaluator.Factory(source(), context -> new BreakingBytesRefBuilder(context.breaker(), "concat"), values); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java index 1d2b743fe5a7a..e97e65a3e60fc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java @@ -26,7 +26,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -126,7 +125,7 @@ protected NodeInfo info() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return new EndsWithEvaluator.Factory(source(), toEvaluator.apply(str), toEvaluator.apply(suffix)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java index ece70da51ef19..8a4a5f4d841a5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java @@ -24,7 +24,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; @@ -69,7 +68,7 @@ protected TypeResolution resolveType() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return new LTrimEvaluator.Factory(source(), toEvaluator.apply(field())); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java index b0e5b41f971e1..e7572caafd8f5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java @@ -29,7 +29,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -95,7 +94,7 @@ static BytesRef process( } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return new LeftEvaluator.Factory( source(), context -> new BytesRef(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java index 241eab6d5b904..f4bb7f35cb466 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java @@ -24,7 +24,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; @@ -87,7 +86,7 @@ protected NodeInfo info() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return new LengthEvaluator.Factory(source(), toEvaluator.apply(field())); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java index f6eff2fcbd6b3..528baa613cc02 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java @@ -28,7 +28,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -161,7 +160,7 @@ protected NodeInfo info() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { ExpressionEvaluator.Factory strExpr = toEvaluator.apply(str); ExpressionEvaluator.Factory substrExpr = toEvaluator.apply(substr); if (start == null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java index bb923ec924d31..b46c46c89deba 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import java.io.IOException; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; @@ -79,9 +78,7 @@ protected TypeResolution resolveType() { } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return AutomataMatch.toEvaluator(source(), toEvaluator.apply(field()), pattern().createAutomaton()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java index 4c210607cfbe0..b79e1adf99a20 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java @@ -24,7 +24,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; @@ -69,7 +68,7 @@ protected TypeResolution resolveType() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return new RTrimEvaluator.Factory(source(), toEvaluator.apply(field())); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java index 3ff28e08f4ce1..2cc14399df2ae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java @@ -29,7 +29,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.common.unit.ByteSizeUnit.MB; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; @@ -143,7 +142,7 @@ protected NodeInfo info() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { ExpressionEvaluator.Factory strExpr = toEvaluator.apply(str); if (number.foldable()) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java index 30c8793fe371a..4fa191244cb42 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java @@ -26,7 +26,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; @@ -146,7 +145,7 @@ protected NodeInfo info() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var strEval = toEvaluator.apply(str); var newStrEval = toEvaluator.apply(newStr); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Reverse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Reverse.java new file mode 100644 index 0000000000000..bf4e47d8d0de4 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Reverse.java @@ -0,0 +1,140 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; + +import java.io.IOException; +import java.text.BreakIterator; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; + +import static org.elasticsearch.common.util.ArrayUtils.reverseArray; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; + +/** + * Function that reverses a string. + */ +public class Reverse extends UnaryScalarFunction { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Reverse", Reverse::new); + + @FunctionInfo( + returnType = { "keyword", "text" }, + description = "Returns a new string representing the input string in reverse order.", + examples = { + @Example(file = "string", tag = "reverse"), + @Example( + file = "string", + tag = "reverseEmoji", + description = "`REVERSE` works with unicode, too! It keeps unicode grapheme clusters together during reversal." + ) } + ) + public Reverse( + Source source, + @Param( + name = "str", + type = { "keyword", "text" }, + description = "String expression. If `null`, the function returns `null`." + ) Expression field + ) { + super(source, field); + } + + private Reverse(StreamInput in) throws IOException { + super(in); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + + return isString(field, sourceText(), DEFAULT); + } + + /** + * Reverses a unicode string, keeping grapheme clusters together + * @param str + * @return + */ + public static String reverseStringWithUnicodeCharacters(String str) { + BreakIterator boundary = BreakIterator.getCharacterInstance(Locale.ROOT); + boundary.setText(str); + + List characters = new ArrayList<>(); + int start = boundary.first(); + for (int end = boundary.next(); end != BreakIterator.DONE; start = end, end = boundary.next()) { + characters.add(str.substring(start, end)); + } + + StringBuilder reversed = new StringBuilder(str.length()); + for (int i = characters.size() - 1; i >= 0; i--) { + reversed.append(characters.get(i)); + } + + return reversed.toString(); + } + + private static boolean isOneByteUTF8(BytesRef ref) { + int end = ref.offset + ref.length; + for (int i = ref.offset; i < end; i++) { + if (ref.bytes[i] < 0) { + return false; + } + } + return true; + } + + @Evaluator + static BytesRef process(BytesRef val) { + if (isOneByteUTF8(val)) { + // this is the fast path. we know we can just reverse the bytes. + BytesRef reversed = BytesRef.deepCopyOf(val); + reverseArray(reversed.bytes, reversed.offset, reversed.length); + return reversed; + } + return BytesRefs.toBytesRef(reverseStringWithUnicodeCharacters(val.utf8ToString())); + } + + @Override + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { + var fieldEvaluator = toEvaluator.apply(field); + return new ReverseEvaluator.Factory(source(), fieldEvaluator); + } + + @Override + public Expression replaceChildren(List newChildren) { + assert newChildren.size() == 1; + return new Reverse(source(), newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Reverse::new, field); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java index ab6d3bf6cef99..b069b984ea81e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java @@ -29,7 +29,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -99,7 +98,7 @@ static BytesRef process( } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return new RightEvaluator.Factory( source(), context -> new BytesRef(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Space.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Space.java index e6225a008fceb..6481ce5764e1f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Space.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Space.java @@ -29,7 +29,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.common.unit.ByteSizeUnit.MB; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; @@ -111,7 +110,7 @@ protected NodeInfo info() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { if (field.foldable()) { Object folded = field.fold(); if (folded instanceof Integer num) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java index 79ff23ac6737a..b1f5da56d011b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java @@ -28,7 +28,6 @@ import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import java.io.IOException; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -158,7 +157,7 @@ protected NodeInfo info() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var str = toEvaluator.apply(left()); if (right().foldable() == false) { return new SplitVariableEvaluator.Factory(source(), str, toEvaluator.apply(right()), context -> new BytesRef()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java index fc40a73471194..2256ec2179adf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java @@ -26,7 +26,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -123,7 +122,7 @@ protected NodeInfo info() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return new StartsWithEvaluator.Factory(source(), toEvaluator.apply(str), toEvaluator.apply(prefix)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java index 7c2ecd0c60e49..73ea409676fbd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java @@ -29,7 +29,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -180,7 +179,7 @@ protected NodeInfo info() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var strFactory = toEvaluator.apply(str); var startFactory = toEvaluator.apply(start); if (length == null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLower.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLower.java index 62255a0a31ea6..c475469488d7b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLower.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLower.java @@ -29,7 +29,6 @@ import java.io.IOException; import java.util.List; import java.util.Locale; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; @@ -96,7 +95,7 @@ static BytesRef process(BytesRef val, @Fixed Locale locale) { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var fieldEvaluator = toEvaluator.apply(field); return new ToLowerEvaluator.Factory(source(), fieldEvaluator, configuration().locale()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpper.java index e6eba0d01e4da..1b5084a7916ef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpper.java @@ -29,7 +29,6 @@ import java.io.IOException; import java.util.List; import java.util.Locale; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; @@ -96,7 +95,7 @@ static BytesRef process(BytesRef val, @Fixed Locale locale) { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var fieldEvaluator = toEvaluator.apply(field); return new ToUpperEvaluator.Factory(source(), fieldEvaluator, configuration().locale()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java index 36dc3d97992ab..1fe7529caa2da 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java @@ -24,7 +24,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; @@ -69,7 +68,7 @@ protected TypeResolution resolveType() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var field = toEvaluator.apply(field()); return new TrimEvaluator.Factory(source(), field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java index 15470bb56b29f..714c4ca04a862 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java @@ -23,7 +23,6 @@ import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import java.io.IOException; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; @@ -85,9 +84,7 @@ protected TypeResolution resolveType() { } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return AutomataMatch.toEvaluator( source(), toEvaluator.apply(field()), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java index 5b7cc74faed86..d407dd8bf7de1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java @@ -20,7 +20,6 @@ import java.time.Period; import java.time.temporal.TemporalAmount; import java.util.Collection; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_PERIOD; @@ -159,7 +158,7 @@ public final Object fold() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { if (dataType() == DATETIME) { // One of the arguments has to be a datetime and the other a temporal amount. Expression datetimeArgument; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java index 400e70b641111..62201bcfa858d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java @@ -22,7 +22,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; @@ -170,7 +169,7 @@ public static String formatIncompatibleTypesMessage(String symbol, DataType left } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var commonType = dataType(); var leftType = left().dataType(); if (leftType.isNumeric()) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java index 67b770d14339e..fb32282005f02 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java @@ -25,7 +25,6 @@ import java.time.Duration; import java.time.Period; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; @@ -61,7 +60,7 @@ public String getWriteableName() { } @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { DataType type = dataType(); if (type.isNumeric()) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java index b50d70e69819d..db771a6354883 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java @@ -27,7 +27,6 @@ import java.time.ZoneId; import java.util.List; import java.util.Map; -import java.util.function.Function; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; @@ -168,9 +167,7 @@ public BinaryComparisonOperation getFunctionType() { } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { // Our type is always boolean, so figure out the evaluator type from the inputs DataType commonType = commonType(left().dataType(), right().dataType()); EvalOperator.ExpressionEvaluator.Factory lhs; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java index 333f32e82c579..eda6aadccc86a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java @@ -33,7 +33,6 @@ import java.util.BitSet; import java.util.Collections; import java.util.List; -import java.util.function.Function; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; @@ -213,9 +212,7 @@ protected Expression canonicalize() { } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { var commonType = commonType(); EvalOperator.ExpressionEvaluator.Factory lhs; EvalOperator.ExpressionEvaluator.Factory[] factories; @@ -226,7 +223,7 @@ public EvalOperator.ExpressionEvaluator.Factory toEvaluator( .toArray(EvalOperator.ExpressionEvaluator.Factory[]::new); } else { lhs = toEvaluator.apply(value); - factories = list.stream().map(e -> toEvaluator.apply(e)).toArray(EvalOperator.ExpressionEvaluator.Factory[]::new); + factories = list.stream().map(toEvaluator::apply).toArray(EvalOperator.ExpressionEvaluator.Factory[]::new); } if (commonType == BOOLEAN) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index ef4417a1c7a02..2b09a395c4a3d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -209,7 +209,7 @@ private Attribute attributeFromCache(int id) throws IOException { private void cacheAttribute(int id, Attribute attr) { assert id >= 0; if (id >= attributesCache.length) { - attributesCache = ArrayUtil.grow(attributesCache); + attributesCache = ArrayUtil.grow(attributesCache, id + 1); } attributesCache[id] = attr; } @@ -252,7 +252,7 @@ private EsField esFieldFromCache(int id) throws IOException { private void cacheEsField(int id, EsField field) { assert id >= 0; if (id >= esFieldsCache.length) { - esFieldsCache = ArrayUtil.grow(esFieldsCache); + esFieldsCache = ArrayUtil.grow(esFieldsCache, id + 1); } esFieldsCache[id] = field; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index d1f2007af2757..3ec39d1b0ac4b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -171,17 +171,21 @@ public void execute( null, null ); + String local = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; try ( var computeListener = ComputeListener.create( - RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, + local, transportService, rootTask, execInfo, configuration.getQueryStartTimeNanos(), - listener.map(r -> new Result(physicalPlan.output(), collectedPages, r.getProfiles(), execInfo)) + listener.map(r -> { + updateExecutionInfoAfterCoordinatorOnlyQuery(configuration.getQueryStartTimeNanos(), execInfo); + return new Result(physicalPlan.output(), collectedPages, r.getProfiles(), execInfo); + }) ) ) { - runCompute(rootTask, computeContext, coordinatorPlan, computeListener.acquireCompute()); + runCompute(rootTask, computeContext, coordinatorPlan, computeListener.acquireCompute(local)); return; } } else { @@ -247,6 +251,27 @@ public void execute( } } + private static void updateExecutionInfoAfterCoordinatorOnlyQuery(long queryStartNanos, EsqlExecutionInfo execInfo) { + long tookTimeNanos = System.nanoTime() - queryStartNanos; + execInfo.overallTook(new TimeValue(tookTimeNanos, TimeUnit.NANOSECONDS)); + if (execInfo.isCrossClusterSearch()) { + for (String clusterAlias : execInfo.clusterAliases()) { + // The local cluster 'took' time gets updated as part of the acquireCompute(local) call in the coordinator, so + // here we only need to update status for remote clusters since there are no remote ComputeListeners in this case. + // This happens in cross cluster searches that use LIMIT 0, e.g, FROM logs*,remote*:logs* | LIMIT 0. + if (clusterAlias.equals(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY) == false) { + execInfo.swapCluster(clusterAlias, (k, v) -> { + if (v.getStatus() == EsqlExecutionInfo.Cluster.Status.RUNNING) { + return new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL).build(); + } else { + return v; + } + }); + } + } + } + } + private List getRemoteClusters( Map clusterToConcreteIndices, Map clusterToOriginalIndices diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 608e45bb2085b..96391c841856f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -72,6 +72,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Predicate; @@ -245,6 +246,7 @@ private void preAnalyze( if (indexResolution.isValid()) { updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); updateExecutionInfoWithUnavailableClusters(executionInfo, indexResolution.getUnavailableClusters()); + updateTookTimeForRemoteClusters(executionInfo); Set newClusters = enrichPolicyResolver.groupIndicesPerCluster( indexResolution.get().concreteIndices().toArray(String[]::new) ).keySet(); @@ -285,6 +287,7 @@ static void updateExecutionInfoWithClustersWithNoMatchingIndices(EsqlExecutionIn } Set clustersRequested = executionInfo.clusterAliases(); Set clustersWithNoMatchingIndices = Sets.difference(clustersRequested, clustersWithResolvedIndices); + clustersWithNoMatchingIndices.removeAll(indexResolution.getUnavailableClusters()); /* * These are clusters in the original request that are not present in the field-caps response. They were * specified with an index or indices that do not exist, so the search on that cluster is done. @@ -304,6 +307,28 @@ static void updateExecutionInfoWithClustersWithNoMatchingIndices(EsqlExecutionIn } } + private void updateTookTimeForRemoteClusters(EsqlExecutionInfo executionInfo) { + if (executionInfo.isCrossClusterSearch()) { + for (String clusterAlias : executionInfo.clusterAliases()) { + if (clusterAlias.equals(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY) == false) { + executionInfo.swapCluster(clusterAlias, (k, v) -> { + if (v.getTook() == null && v.getStatus() != EsqlExecutionInfo.Cluster.Status.SKIPPED) { + // set took time in case we are finished with the remote cluster (e.g., FROM foo | LIMIT 0). + // this will be overwritten later if ES|QL operations happen on the remote cluster (the typical scenario) + TimeValue took = new TimeValue( + System.nanoTime() - configuration.getQueryStartTimeNanos(), + TimeUnit.NANOSECONDS + ); + return new EsqlExecutionInfo.Cluster.Builder(v).setTook(took).build(); + } else { + return v; + } + }); + } + } + } + } + private void preAnalyzeIndices( LogicalPlan parsed, EsqlExecutionInfo executionInfo, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DeepCopy.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DeepCopy.java index d25305a9ea190..593a444eceec2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DeepCopy.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DeepCopy.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import java.io.IOException; -import java.util.function.Function; /** * Expression that makes a deep copy of the block it receives. @@ -41,9 +40,7 @@ public String getWriteableName() { } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { EvalOperator.ExpressionEvaluator.Factory childEval = toEvaluator.apply(child()); return ctx -> new EvalOperator.ExpressionEvaluator() { private final EvalOperator.ExpressionEvaluator child = childEval.get(ctx); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistryTests.java index 7f19419b21816..801bd8700d014 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistryTests.java @@ -24,7 +24,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Function; import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.ConfigurationTestUtils.randomConfiguration; @@ -258,9 +257,7 @@ protected NodeInfo info() { } @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return null; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java index 7cc03be7d6273..311e3e3d89149 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java @@ -100,6 +100,21 @@ public static Iterable parameters() { ) ) ); + suppliers.add( + new TestCaseSupplier( + "(a, b)", + List.of(DataType.DATETIME, DataType.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(1727877348000L, DataType.DATETIME, "a"), + new TestCaseSupplier.TypedData(1727790948000L, DataType.DATETIME, "b") + ), + "GreatestLongEvaluator[values=[MvMax[field=Attribute[channel=0]], MvMax[field=Attribute[channel=1]]]]", + DataType.DATETIME, + equalTo(1727877348000L) + ) + ) + ); return parameterSuppliersFromTypedData(anyNullIsNull(false, suppliers)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java index aa475f05ebe69..69842fde90312 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java @@ -99,6 +99,21 @@ public static Iterable parameters() { ) ) ); + suppliers.add( + new TestCaseSupplier( + "(a, b)", + List.of(DataType.DATETIME, DataType.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(1727877348000L, DataType.DATETIME, "a"), + new TestCaseSupplier.TypedData(1727790948000L, DataType.DATETIME, "b") + ), + "LeastLongEvaluator[values=[MvMin[field=Attribute[channel=0]], MvMin[field=Attribute[channel=1]]]]", + DataType.DATETIME, + equalTo(1727790948000L) + ) + ) + ); return parameterSuppliersFromTypedData(anyNullIsNull(false, suppliers)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java index f760694391ee4..c9b6de64e079d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.VaragsTestCaseBuilder; @@ -33,7 +34,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.function.Function; import java.util.function.Supplier; import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; @@ -174,7 +174,7 @@ public void testCoalesceIsLazy() { Layout.Builder builder = new Layout.Builder(); buildLayout(builder, exp); Layout layout = builder.build(); - Function map = child -> { + EvaluatorMapper.ToEvaluator toEvaluator = child -> { if (child == evil) { return dvrCtx -> new EvalOperator.ExpressionEvaluator() { @Override @@ -189,7 +189,7 @@ public void close() {} return EvalMapper.toEvaluator(child, layout); }; try ( - EvalOperator.ExpressionEvaluator eval = exp.toEvaluator(map).get(driverContext()); + EvalOperator.ExpressionEvaluator eval = exp.toEvaluator(toEvaluator).get(driverContext()); Block block = eval.eval(row(testCase.getDataValues())) ) { assertThat(toJavaObject(block, 0), testCase.getMatcher()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseSerializationTests.java new file mode 100644 index 0000000000000..7b1ad8c9dffd0 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseSerializationTests.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests; + +public class ReverseSerializationTests extends AbstractUnaryScalarSerializationTests { + @Override + protected Reverse create(Source source, Expression child) { + return new Reverse(source, child); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseTests.java new file mode 100644 index 0000000000000..2873f18d53957 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseTests.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class ReverseTests extends AbstractScalarFunctionTestCase { + public ReverseTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + + for (DataType stringType : new DataType[] { DataType.KEYWORD, DataType.TEXT }) { + for (var supplier : TestCaseSupplier.stringCases(stringType)) { + suppliers.add(makeSupplier(supplier)); + } + } + + return parameterSuppliersFromTypedData(suppliers); + } + + @Override + protected Expression build(Source source, List args) { + return new Reverse(source, args.get(0)); + } + + private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier fieldSupplier) { + return new TestCaseSupplier(fieldSupplier.name(), List.of(fieldSupplier.type()), () -> { + var fieldTypedData = fieldSupplier.get(); + String expectedToString = "ReverseEvaluator[val=Attribute[channel=0]]"; + String value = BytesRefs.toString(fieldTypedData.data()); + String expectedValue = Reverse.reverseStringWithUnicodeCharacters(value); + + return new TestCaseSupplier.TestCase( + List.of(fieldTypedData), + expectedToString, + fieldSupplier.type(), + equalTo(new BytesRef(expectedValue)) + ); + }); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionTests.java index 8dcad2f354b26..326756ad0b5f4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionTests.java @@ -216,6 +216,7 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { randomMapping(), Map.of("logs-a", IndexMode.STANDARD) ); + // mark remote1 as unavailable IndexResolution indexResolution = IndexResolution.valid(esIndex, Set.of(remote1Alias)); EsqlSession.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); @@ -226,12 +227,10 @@ public void testUpdateExecutionInfoWithClustersWithNoMatchingIndices() { EsqlExecutionInfo.Cluster remote1Cluster = executionInfo.getCluster(remote1Alias); assertThat(remote1Cluster.getIndexExpression(), equalTo("*")); - assertThat(remote1Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SKIPPED)); - assertThat(remote1Cluster.getTook().millis(), equalTo(0L)); - assertThat(remote1Cluster.getTotalShards(), equalTo(0)); - assertThat(remote1Cluster.getSuccessfulShards(), equalTo(0)); - assertThat(remote1Cluster.getSkippedShards(), equalTo(0)); - assertThat(remote1Cluster.getFailedShards(), equalTo(0)); + // remote1 is left as RUNNING, since another method (updateExecutionInfoWithUnavailableClusters) not under test changes status + assertThat(remote1Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)); + assertNull(remote1Cluster.getTook()); + assertNull(remote1Cluster.getTotalShards()); EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(remote2Alias); assertThat(remote2Cluster.getIndexExpression(), equalTo("mylogs1,mylogs2,logs*")); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CustomElandModelIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CustomElandModelIT.java index 65b7a138e7e1e..c05d08fa33692 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CustomElandModelIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CustomElandModelIT.java @@ -85,7 +85,7 @@ public void testSparse() throws IOException { var inferenceId = "sparse-inf"; putModel(inferenceId, inferenceConfig, TaskType.SPARSE_EMBEDDING); - var results = inferOnMockService(inferenceId, List.of("washing", "machine")); + var results = infer(inferenceId, List.of("washing", "machine")); deleteModel(inferenceId); assertNotNull(results.get("sparse_embedding")); } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultElserIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultElserIT.java new file mode 100644 index 0000000000000..5d84aad4b7344 --- /dev/null +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultElserIT.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService; +import org.hamcrest.Matchers; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.oneOf; + +public class DefaultElserIT extends InferenceBaseRestTest { + + private TestThreadPool threadPool; + + @Before + public void createThreadPool() { + threadPool = new TestThreadPool(DefaultElserIT.class.getSimpleName()); + } + + @After + public void tearDown() throws Exception { + threadPool.close(); + super.tearDown(); + } + + @SuppressWarnings("unchecked") + public void testInferCreatesDefaultElser() throws IOException { + assumeTrue("Default config requires a feature flag", DefaultElserFeatureFlag.isEnabled()); + var model = getModel(ElasticsearchInternalService.DEFAULT_ELSER_ID); + assertDefaultElserConfig(model); + + var inputs = List.of("Hello World", "Goodnight moon"); + var queryParams = Map.of("timeout", "120s"); + var results = infer(ElasticsearchInternalService.DEFAULT_ELSER_ID, TaskType.SPARSE_EMBEDDING, inputs, queryParams); + var embeddings = (List>) results.get("sparse_embedding"); + assertThat(results.toString(), embeddings, hasSize(2)); + } + + @SuppressWarnings("unchecked") + private static void assertDefaultElserConfig(Map modelConfig) { + assertEquals(modelConfig.toString(), ElasticsearchInternalService.DEFAULT_ELSER_ID, modelConfig.get("inference_id")); + assertEquals(modelConfig.toString(), ElasticsearchInternalService.NAME, modelConfig.get("service")); + assertEquals(modelConfig.toString(), TaskType.SPARSE_EMBEDDING.toString(), modelConfig.get("task_type")); + + var serviceSettings = (Map) modelConfig.get("service_settings"); + assertThat(modelConfig.toString(), serviceSettings.get("model_id"), is(oneOf(".elser_model_2", ".elser_model_2_linux-x86_64"))); + assertEquals(modelConfig.toString(), 1, serviceSettings.get("num_threads")); + + var adaptiveAllocations = (Map) serviceSettings.get("adaptive_allocations"); + assertThat( + modelConfig.toString(), + adaptiveAllocations, + Matchers.is(Map.of("enabled", true, "min_number_of_allocations", 1, "max_number_of_allocations", 8)) + ); + } +} diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index 3fa6159661b7e..f82b6f155c0a0 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -270,7 +270,7 @@ protected Map deployE5TrainedModels() throws IOException { @SuppressWarnings("unchecked") protected Map getModel(String modelId) throws IOException { - var endpoint = Strings.format("_inference/%s", modelId); + var endpoint = Strings.format("_inference/%s?error_trace", modelId); return ((List>) getInternal(endpoint).get("endpoints")).get(0); } @@ -293,9 +293,9 @@ private Map getInternal(String endpoint) throws IOException { return entityAsMap(response); } - protected Map inferOnMockService(String modelId, List input) throws IOException { + protected Map infer(String modelId, List input) throws IOException { var endpoint = Strings.format("_inference/%s", modelId); - return inferOnMockServiceInternal(endpoint, input); + return inferInternal(endpoint, input, Map.of()); } protected Deque streamInferOnMockService(String modelId, TaskType taskType, List input) throws Exception { @@ -324,14 +324,23 @@ public void onFailure(Exception exception) { return responseConsumer.events(); } - protected Map inferOnMockService(String modelId, TaskType taskType, List input) throws IOException { + protected Map infer(String modelId, TaskType taskType, List input) throws IOException { var endpoint = Strings.format("_inference/%s/%s", taskType, modelId); - return inferOnMockServiceInternal(endpoint, input); + return inferInternal(endpoint, input, Map.of()); + } + + protected Map infer(String modelId, TaskType taskType, List input, Map queryParameters) + throws IOException { + var endpoint = Strings.format("_inference/%s/%s?error_trace", taskType, modelId); + return inferInternal(endpoint, input, queryParameters); } - private Map inferOnMockServiceInternal(String endpoint, List input) throws IOException { + private Map inferInternal(String endpoint, List input, Map queryParameters) throws IOException { var request = new Request("POST", endpoint); request.setJsonEntity(jsonBody(input)); + if (queryParameters.isEmpty() == false) { + request.addParameters(queryParameters); + } var response = client().performRequest(request); assertOkOrCreated(response); return entityAsMap(response); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 92affbc043669..5a84fd8985504 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -38,10 +38,12 @@ public void testGet() throws IOException { } var getAllModels = getAllModels(); - assertThat(getAllModels, hasSize(9)); + int numModels = DefaultElserFeatureFlag.isEnabled() ? 10 : 9; + assertThat(getAllModels, hasSize(numModels)); var getSparseModels = getModels("_all", TaskType.SPARSE_EMBEDDING); - assertThat(getSparseModels, hasSize(5)); + int numSparseModels = DefaultElserFeatureFlag.isEnabled() ? 6 : 5; + assertThat(getSparseModels, hasSize(numSparseModels)); for (var sparseModel : getSparseModels) { assertEquals("sparse_embedding", sparseModel.get("task_type")); } @@ -99,7 +101,7 @@ public void testApisWithoutTaskType() throws IOException { assertEquals(modelId, singleModel.get("inference_id")); assertEquals(TaskType.SPARSE_EMBEDDING.toString(), singleModel.get("task_type")); - var inference = inferOnMockService(modelId, List.of(randomAlphaOfLength(10))); + var inference = infer(modelId, List.of(randomAlphaOfLength(10))); assertNonEmptyInferenceResults(inference, 1, TaskType.SPARSE_EMBEDDING); deleteModel(modelId); } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockDenseInferenceServiceIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockDenseInferenceServiceIT.java index 5f6bad5687407..1077bfec8bbbd 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockDenseInferenceServiceIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockDenseInferenceServiceIT.java @@ -28,15 +28,12 @@ public void testMockService() throws IOException { } List input = List.of(randomAlphaOfLength(10)); - var inference = inferOnMockService(inferenceEntityId, input); + var inference = infer(inferenceEntityId, input); assertNonEmptyInferenceResults(inference, 1, TaskType.TEXT_EMBEDDING); // Same input should return the same result - assertEquals(inference, inferOnMockService(inferenceEntityId, input)); + assertEquals(inference, infer(inferenceEntityId, input)); // Different input values should not - assertNotEquals( - inference, - inferOnMockService(inferenceEntityId, randomValueOtherThan(input, () -> List.of(randomAlphaOfLength(10)))) - ); + assertNotEquals(inference, infer(inferenceEntityId, randomValueOtherThan(input, () -> List.of(randomAlphaOfLength(10))))); } public void testMockServiceWithMultipleInputs() throws IOException { @@ -44,7 +41,7 @@ public void testMockServiceWithMultipleInputs() throws IOException { putModel(inferenceEntityId, mockDenseServiceModelConfig(), TaskType.TEXT_EMBEDDING); // The response is randomly generated, the input can be anything - var inference = inferOnMockService( + var inference = infer( inferenceEntityId, TaskType.TEXT_EMBEDDING, List.of(randomAlphaOfLength(5), randomAlphaOfLength(10), randomAlphaOfLength(15)) diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockSparseInferenceServiceIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockSparseInferenceServiceIT.java index 24ba2708f5de4..9a17d8edc0768 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockSparseInferenceServiceIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockSparseInferenceServiceIT.java @@ -30,15 +30,12 @@ public void testMockService() throws IOException { } List input = List.of(randomAlphaOfLength(10)); - var inference = inferOnMockService(inferenceEntityId, input); + var inference = infer(inferenceEntityId, input); assertNonEmptyInferenceResults(inference, 1, TaskType.SPARSE_EMBEDDING); // Same input should return the same result - assertEquals(inference, inferOnMockService(inferenceEntityId, input)); + assertEquals(inference, infer(inferenceEntityId, input)); // Different input values should not - assertNotEquals( - inference, - inferOnMockService(inferenceEntityId, randomValueOtherThan(input, () -> List.of(randomAlphaOfLength(10)))) - ); + assertNotEquals(inference, infer(inferenceEntityId, randomValueOtherThan(input, () -> List.of(randomAlphaOfLength(10))))); } public void testMockServiceWithMultipleInputs() throws IOException { @@ -46,7 +43,7 @@ public void testMockServiceWithMultipleInputs() throws IOException { putModel(inferenceEntityId, mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); // The response is randomly generated, the input can be anything - var inference = inferOnMockService( + var inference = infer( inferenceEntityId, TaskType.SPARSE_EMBEDDING, List.of(randomAlphaOfLength(5), randomAlphaOfLength(10), randomAlphaOfLength(15)) @@ -84,7 +81,7 @@ public void testMockService_DoesNotReturnHiddenField_InModelResponses() throws I } // The response is randomly generated, the input can be anything - var inference = inferOnMockService(inferenceEntityId, List.of(randomAlphaOfLength(10))); + var inference = infer(inferenceEntityId, List.of(randomAlphaOfLength(10))); assertNonEmptyInferenceResults(inference, 1, TaskType.SPARSE_EMBEDDING); } @@ -102,7 +99,7 @@ public void testMockService_DoesReturnHiddenField_InModelResponses() throws IOEx } // The response is randomly generated, the input can be anything - var inference = inferOnMockService(inferenceEntityId, List.of(randomAlphaOfLength(10))); + var inference = infer(inferenceEntityId, List.of(randomAlphaOfLength(10))); assertNonEmptyInferenceResults(inference, 1, TaskType.SPARSE_EMBEDDING); } } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/TextEmbeddingCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/TextEmbeddingCrudIT.java index 01e8c30e3bf27..8d9c859f129cb 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/TextEmbeddingCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/TextEmbeddingCrudIT.java @@ -38,7 +38,7 @@ public void testPutE5Small_withPlatformAgnosticVariant() throws IOException { var models = getTrainedModel("_all"); assertThat(models.toString(), containsString("deployment_id=" + inferenceEntityId)); - Map results = inferOnMockService( + Map results = infer( inferenceEntityId, TaskType.TEXT_EMBEDDING, List.of("hello world", "this is the second document") @@ -57,7 +57,7 @@ public void testPutE5Small_withPlatformSpecificVariant() throws IOException { var models = getTrainedModel("_all"); assertThat(models.toString(), containsString("deployment_id=" + inferenceEntityId)); - Map results = inferOnMockService( + Map results = infer( inferenceEntityId, TaskType.TEXT_EMBEDDING, List.of("hello world", "this is the second document") diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java index 59d3faf6489a6..457ae525b7f4b 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java @@ -19,6 +19,7 @@ import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.not; @@ -27,6 +28,7 @@ public class HuggingFaceServiceMixedIT extends BaseMixedTestCase { private static final String HF_EMBEDDINGS_ADDED = "8.12.0"; private static final String HF_ELSER_ADDED = "8.12.0"; + private static final String HF_EMBEDDINGS_CHUNKING_SETTINGS_ADDED = "8.16.0"; private static final String MINIMUM_SUPPORTED_VERSION = "8.15.0"; private static MockWebServer embeddingsServer; @@ -59,7 +61,24 @@ public void testHFEmbeddings() throws IOException { final String inferenceId = "mixed-cluster-embeddings"; embeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); - put(inferenceId, embeddingConfig(getUrl(embeddingsServer)), TaskType.TEXT_EMBEDDING); + + try { + put(inferenceId, embeddingConfig(getUrl(embeddingsServer)), TaskType.TEXT_EMBEDDING); + } catch (Exception e) { + if (bwcVersion.before(Version.fromString(HF_EMBEDDINGS_CHUNKING_SETTINGS_ADDED))) { + // Chunking settings were added in 8.16.0. if the version is before that, an exception will be thrown if the index mapping + // was created based on a mapping from an old node + assertThat( + e.getMessage(), + containsString( + "One or more nodes in your cluster does not support chunking_settings. " + + "Please update all nodes in your cluster to the latest version to use chunking_settings." + ) + ); + return; + } + } + var configs = (List>) get(TaskType.TEXT_EMBEDDING, inferenceId).get("endpoints"); assertThat(configs, hasSize(1)); assertEquals("hugging_face", configs.get(0).get("service")); diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index ea8b32f36f54c..8e68ca9dfa565 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -20,6 +20,7 @@ import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -38,6 +39,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; @@ -110,7 +112,7 @@ public void testGetModel() throws Exception { assertThat(putModelHolder.get(), is(true)); // now get the model - AtomicReference modelHolder = new AtomicReference<>(); + AtomicReference modelHolder = new AtomicReference<>(); blockingCall(listener -> modelRegistry.getModelWithSecrets(inferenceEntityId, listener), modelHolder, exceptionHolder); assertThat(exceptionHolder.get(), is(nullValue())); assertThat(modelHolder.get(), not(nullValue())); @@ -168,7 +170,7 @@ public void testDeleteModel() throws Exception { // get should fail deleteResponseHolder.set(false); - AtomicReference modelHolder = new AtomicReference<>(); + AtomicReference modelHolder = new AtomicReference<>(); blockingCall(listener -> modelRegistry.getModelWithSecrets("model1", listener), modelHolder, exceptionHolder); assertThat(exceptionHolder.get(), not(nullValue())); @@ -194,7 +196,7 @@ public void testGetModelsByTaskType() throws InterruptedException { } AtomicReference exceptionHolder = new AtomicReference<>(); - AtomicReference> modelHolder = new AtomicReference<>(); + AtomicReference> modelHolder = new AtomicReference<>(); blockingCall(listener -> modelRegistry.getModelsByTaskType(TaskType.SPARSE_EMBEDDING, listener), modelHolder, exceptionHolder); assertThat(modelHolder.get(), hasSize(3)); var sparseIds = sparseAndTextEmbeddingModels.stream() @@ -235,8 +237,9 @@ public void testGetAllModels() throws InterruptedException { assertNull(exceptionHolder.get()); } - AtomicReference> modelHolder = new AtomicReference<>(); + AtomicReference> modelHolder = new AtomicReference<>(); blockingCall(listener -> modelRegistry.getAllModels(listener), modelHolder, exceptionHolder); + assertNull(exceptionHolder.get()); assertThat(modelHolder.get(), hasSize(modelCount)); var getAllModels = modelHolder.get(); @@ -264,15 +267,213 @@ public void testGetModelWithSecrets() throws InterruptedException { assertThat(putModelHolder.get(), is(true)); assertNull(exceptionHolder.get()); - AtomicReference modelHolder = new AtomicReference<>(); + AtomicReference modelHolder = new AtomicReference<>(); blockingCall(listener -> modelRegistry.getModelWithSecrets(inferenceEntityId, listener), modelHolder, exceptionHolder); assertThat(modelHolder.get().secrets().keySet(), hasSize(1)); var secretSettings = (Map) modelHolder.get().secrets().get("secret_settings"); assertThat(secretSettings.get("secret"), equalTo(secret)); + assertReturnModelIsModifiable(modelHolder.get()); // get model without secrets blockingCall(listener -> modelRegistry.getModel(inferenceEntityId, listener), modelHolder, exceptionHolder); assertThat(modelHolder.get().secrets().keySet(), empty()); + assertReturnModelIsModifiable(modelHolder.get()); + } + + public void testGetAllModels_WithDefaults() throws Exception { + var service = "foo"; + var secret = "abc"; + int configuredModelCount = 10; + int defaultModelCount = 2; + int totalModelCount = 12; + + var defaultConfigs = new HashMap(); + for (int i = 0; i < defaultModelCount; i++) { + var id = "default-" + i; + defaultConfigs.put(id, createUnparsedConfig(id, randomFrom(TaskType.values()), service, secret)); + } + defaultConfigs.values().forEach(modelRegistry::addDefaultConfiguration); + + AtomicReference putModelHolder = new AtomicReference<>(); + AtomicReference exceptionHolder = new AtomicReference<>(); + + var createdModels = new HashMap(); + for (int i = 0; i < configuredModelCount; i++) { + var id = randomAlphaOfLength(5) + i; + var model = createModel(id, randomFrom(TaskType.values()), service); + createdModels.put(id, model); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); + assertThat(putModelHolder.get(), is(true)); + assertNull(exceptionHolder.get()); + } + + AtomicReference> modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getAllModels(listener), modelHolder, exceptionHolder); + assertNull(exceptionHolder.get()); + assertThat(modelHolder.get(), hasSize(totalModelCount)); + var getAllModels = modelHolder.get(); + assertReturnModelIsModifiable(modelHolder.get().get(0)); + + // sort in the same order as the returned models + var ids = new ArrayList<>(defaultConfigs.keySet().stream().toList()); + ids.addAll(createdModels.keySet().stream().toList()); + ids.sort(String::compareTo); + for (int i = 0; i < totalModelCount; i++) { + var id = ids.get(i); + assertEquals(id, getAllModels.get(i).inferenceEntityId()); + if (id.startsWith("default")) { + assertEquals(defaultConfigs.get(id).taskType(), getAllModels.get(i).taskType()); + assertEquals(defaultConfigs.get(id).service(), getAllModels.get(i).service()); + } else { + assertEquals(createdModels.get(id).getTaskType(), getAllModels.get(i).taskType()); + assertEquals(createdModels.get(id).getConfigurations().getService(), getAllModels.get(i).service()); + } + } + } + + public void testGetAllModels_OnlyDefaults() throws Exception { + var service = "foo"; + var secret = "abc"; + int defaultModelCount = 2; + + var defaultConfigs = new HashMap(); + for (int i = 0; i < defaultModelCount; i++) { + var id = "default-" + i; + defaultConfigs.put(id, createUnparsedConfig(id, randomFrom(TaskType.values()), service, secret)); + } + defaultConfigs.values().forEach(modelRegistry::addDefaultConfiguration); + + AtomicReference exceptionHolder = new AtomicReference<>(); + AtomicReference> modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getAllModels(listener), modelHolder, exceptionHolder); + assertNull(exceptionHolder.get()); + assertThat(modelHolder.get(), hasSize(2)); + var getAllModels = modelHolder.get(); + assertReturnModelIsModifiable(modelHolder.get().get(0)); + + // sort in the same order as the returned models + var ids = new ArrayList<>(defaultConfigs.keySet().stream().toList()); + ids.sort(String::compareTo); + for (int i = 0; i < defaultModelCount; i++) { + var id = ids.get(i); + assertEquals(id, getAllModels.get(i).inferenceEntityId()); + assertEquals(defaultConfigs.get(id).taskType(), getAllModels.get(i).taskType()); + assertEquals(defaultConfigs.get(id).service(), getAllModels.get(i).service()); + } + } + + public void testGet_WithDefaults() throws InterruptedException { + var service = "foo"; + var secret = "abc"; + + var defaultSparse = createUnparsedConfig("default-sparse", TaskType.SPARSE_EMBEDDING, service, secret); + var defaultText = createUnparsedConfig("default-text", TaskType.TEXT_EMBEDDING, service, secret); + + modelRegistry.addDefaultConfiguration(defaultSparse); + modelRegistry.addDefaultConfiguration(defaultText); + + AtomicReference putModelHolder = new AtomicReference<>(); + AtomicReference exceptionHolder = new AtomicReference<>(); + + var configured1 = createModel(randomAlphaOfLength(5) + 1, randomFrom(TaskType.values()), service); + var configured2 = createModel(randomAlphaOfLength(5) + 1, randomFrom(TaskType.values()), service); + blockingCall(listener -> modelRegistry.storeModel(configured1, listener), putModelHolder, exceptionHolder); + assertThat(putModelHolder.get(), is(true)); + blockingCall(listener -> modelRegistry.storeModel(configured2, listener), putModelHolder, exceptionHolder); + assertThat(putModelHolder.get(), is(true)); + assertNull(exceptionHolder.get()); + + AtomicReference modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModel("default-sparse", listener), modelHolder, exceptionHolder); + assertEquals("default-sparse", modelHolder.get().inferenceEntityId()); + assertEquals(TaskType.SPARSE_EMBEDDING, modelHolder.get().taskType()); + assertReturnModelIsModifiable(modelHolder.get()); + + blockingCall(listener -> modelRegistry.getModel("default-text", listener), modelHolder, exceptionHolder); + assertEquals("default-text", modelHolder.get().inferenceEntityId()); + assertEquals(TaskType.TEXT_EMBEDDING, modelHolder.get().taskType()); + + blockingCall(listener -> modelRegistry.getModel(configured1.getInferenceEntityId(), listener), modelHolder, exceptionHolder); + assertEquals(configured1.getInferenceEntityId(), modelHolder.get().inferenceEntityId()); + assertEquals(configured1.getTaskType(), modelHolder.get().taskType()); + } + + public void testGetByTaskType_WithDefaults() throws Exception { + var service = "foo"; + var secret = "abc"; + + var defaultSparse = createUnparsedConfig("default-sparse", TaskType.SPARSE_EMBEDDING, service, secret); + var defaultText = createUnparsedConfig("default-text", TaskType.TEXT_EMBEDDING, service, secret); + var defaultChat = createUnparsedConfig("default-chat", TaskType.COMPLETION, service, secret); + + modelRegistry.addDefaultConfiguration(defaultSparse); + modelRegistry.addDefaultConfiguration(defaultText); + modelRegistry.addDefaultConfiguration(defaultChat); + + AtomicReference putModelHolder = new AtomicReference<>(); + AtomicReference exceptionHolder = new AtomicReference<>(); + + var configuredSparse = createModel("configured-sparse", TaskType.SPARSE_EMBEDDING, service); + var configuredText = createModel("configured-text", TaskType.TEXT_EMBEDDING, service); + var configuredRerank = createModel("configured-rerank", TaskType.RERANK, service); + blockingCall(listener -> modelRegistry.storeModel(configuredSparse, listener), putModelHolder, exceptionHolder); + assertThat(putModelHolder.get(), is(true)); + blockingCall(listener -> modelRegistry.storeModel(configuredText, listener), putModelHolder, exceptionHolder); + assertThat(putModelHolder.get(), is(true)); + blockingCall(listener -> modelRegistry.storeModel(configuredRerank, listener), putModelHolder, exceptionHolder); + assertThat(putModelHolder.get(), is(true)); + assertNull(exceptionHolder.get()); + + AtomicReference> modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModelsByTaskType(TaskType.SPARSE_EMBEDDING, listener), modelHolder, exceptionHolder); + if (exceptionHolder.get() != null) { + throw exceptionHolder.get(); + } + assertNull(exceptionHolder.get()); + assertThat(modelHolder.get(), hasSize(2)); + assertEquals("configured-sparse", modelHolder.get().get(0).inferenceEntityId()); + assertEquals("default-sparse", modelHolder.get().get(1).inferenceEntityId()); + + blockingCall(listener -> modelRegistry.getModelsByTaskType(TaskType.TEXT_EMBEDDING, listener), modelHolder, exceptionHolder); + assertThat(modelHolder.get(), hasSize(2)); + assertEquals("configured-text", modelHolder.get().get(0).inferenceEntityId()); + assertEquals("default-text", modelHolder.get().get(1).inferenceEntityId()); + assertReturnModelIsModifiable(modelHolder.get().get(0)); + + blockingCall(listener -> modelRegistry.getModelsByTaskType(TaskType.RERANK, listener), modelHolder, exceptionHolder); + assertThat(modelHolder.get(), hasSize(1)); + assertEquals("configured-rerank", modelHolder.get().get(0).inferenceEntityId()); + + blockingCall(listener -> modelRegistry.getModelsByTaskType(TaskType.COMPLETION, listener), modelHolder, exceptionHolder); + assertThat(modelHolder.get(), hasSize(1)); + assertEquals("default-chat", modelHolder.get().get(0).inferenceEntityId()); + assertReturnModelIsModifiable(modelHolder.get().get(0)); + } + + @SuppressWarnings("unchecked") + private void assertReturnModelIsModifiable(UnparsedModel unparsedModel) { + var settings = unparsedModel.settings(); + if (settings != null) { + var serviceSettings = (Map) settings.get("service_settings"); + if (serviceSettings != null && serviceSettings.size() > 0) { + var itr = serviceSettings.entrySet().iterator(); + itr.next(); + itr.remove(); + } + + var taskSettings = (Map) settings.get("task_settings"); + if (taskSettings != null && taskSettings.size() > 0) { + var itr = taskSettings.entrySet().iterator(); + itr.next(); + itr.remove(); + } + + if (unparsedModel.secrets() != null && unparsedModel.secrets().size() > 0) { + var itr = unparsedModel.secrets().entrySet().iterator(); + itr.next(); + itr.remove(); + } + } } private Model buildElserModelConfig(String inferenceEntityId, TaskType taskType) { @@ -327,6 +528,10 @@ public static Model createModelWithSecrets(String inferenceEntityId, TaskType ta ); } + public static UnparsedModel createUnparsedConfig(String inferenceEntityId, TaskType taskType, String service, String secret) { + return new UnparsedModel(inferenceEntityId, taskType, service, Map.of("a", "b"), Map.of("secret", secret)); + } + private static class TestModelOfAnyKind extends ModelConfigurations { record TestModelServiceSettings() implements ServiceSettings { diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java index d56b9fe21cd50..f3cefa04c2911 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.ClusterSettings; @@ -34,7 +33,6 @@ import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; @@ -234,11 +232,7 @@ private static class RandomString implements ChunkedToXContent { @Override public Iterator toXContentChunked(ToXContent.Params params) { var randomString = randomUnicodeOfLengthBetween(2, 20); - return Iterators.concat( - ChunkedToXContentHelper.startObject(), - ChunkedToXContentHelper.field("delta", randomString), - ChunkedToXContentHelper.endObject() - ); + return ChunkedToXContent.builder(params).object(b -> b.field("delta", randomString)); } } @@ -271,7 +265,7 @@ public void writeTo(StreamOutput out) { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContentHelper.field("result", randomUnicodeOfLengthBetween(2, 20)); + return ChunkedToXContent.builder(params).field("result", randomUnicodeOfLengthBetween(2, 20)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/DefaultElserFeatureFlag.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/DefaultElserFeatureFlag.java new file mode 100644 index 0000000000000..2a764dabd62ae --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/DefaultElserFeatureFlag.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.common.util.FeatureFlag; + +public class DefaultElserFeatureFlag { + + private DefaultElserFeatureFlag() {} + + private static final FeatureFlag FEATURE_FLAG = new FeatureFlag("inference_default_elser"); + + public static boolean isEnabled() { + return FEATURE_FLAG.isEnabled(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 0ab395f4bfa39..dbb9130ab91e1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -210,6 +210,9 @@ public Collection createComponents(PluginServices services) { // reference correctly var registry = new InferenceServiceRegistry(inferenceServices, factoryContext); registry.init(services.client()); + for (var service : registry.getServices().values()) { + service.defaultConfigs().forEach(modelRegistry::addDefaultConfiguration); + } inferenceServiceRegistry.set(registry); var actionFilter = new ShardBulkInferenceActionFilter(registry, modelRegistry); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java index 3c893f8870627..829a6b6c67ff9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; @@ -91,7 +92,7 @@ private void doExecuteForked( ClusterState state, ActionListener masterListener ) { - SubscribableListener.newForked(modelConfigListener -> { + SubscribableListener.newForked(modelConfigListener -> { // Get the model from the registry modelRegistry.getModel(request.getInferenceEndpointId(), modelConfigListener); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java index a1f33afa05b5c..5ee1e40869dbc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; @@ -112,7 +113,7 @@ private void getModelsByTaskType(TaskType taskType, ActionListener unparsedModels) { + private GetInferenceModelAction.Response parseModels(List unparsedModels) { var parsedModels = new ArrayList(); for (var unparsedModel : unparsedModels) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java index d2a73b7df77c1..e046e2aad463b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.InferenceService; @@ -19,6 +18,7 @@ import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; @@ -43,7 +43,6 @@ public class TransportInferenceAction extends HandledTransportAction listener) { - ActionListener getModelListener = listener.delegateFailureAndWrap((delegate, unparsedModel) -> { + ActionListener getModelListener = listener.delegateFailureAndWrap((delegate, unparsedModel) -> { var service = serviceRegistry.getService(unparsedModel.service()); if (service.isEmpty()) { - delegate.onFailure( - new ElasticsearchStatusException( - "Unknown service [{}] for model [{}]. ", - RestStatus.INTERNAL_SERVER_ERROR, - unparsedModel.service(), - unparsedModel.inferenceEntityId() - ) - ); + listener.onFailure(unknownServiceException(unparsedModel.service(), request.getInferenceEntityId())); return; } if (request.getTaskType().isAnyOrSame(unparsedModel.taskType()) == false) { // not the wildcard task type and not the model task type - delegate.onFailure( - new ElasticsearchStatusException( - "Incompatible task_type, the requested type [{}] does not match the model type [{}]", - RestStatus.BAD_REQUEST, - request.getTaskType(), - unparsedModel.taskType() - ) - ); + listener.onFailure(incompatibleTaskTypeException(request.getTaskType(), unparsedModel.taskType())); return; } @@ -98,7 +83,6 @@ protected void doExecute(Task task, InferenceAction.Request request, ActionListe unparsedModel.settings(), unparsedModel.secrets() ); - inferenceStats.incrementRequestCount(model); inferOnService(model, request, service.get(), delegate); }); @@ -112,6 +96,7 @@ private void inferOnService( ActionListener listener ) { if (request.isStreaming() == false || service.canStream(request.getTaskType())) { + inferenceStats.incrementRequestCount(model); service.infer( model, request.getQuery(), @@ -160,5 +145,19 @@ private ActionListener createListener( }); } return listener.delegateFailureAndWrap((l, inferenceResults) -> l.onResponse(new InferenceAction.Response(inferenceResults))); - }; + } + + private static ElasticsearchStatusException unknownServiceException(String service, String inferenceId) { + return new ElasticsearchStatusException("Unknown service [{}] for model [{}]. ", RestStatus.BAD_REQUEST, service, inferenceId); + } + + private static ElasticsearchStatusException incompatibleTaskTypeException(TaskType requested, TaskType expected) { + return new ElasticsearchStatusException( + "Incompatible task_type, the requested type [{}] does not match the model type [{}]", + RestStatus.BAD_REQUEST, + requested, + expected + ); + } + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java index ade0748ef10bf..a4eb94c2674d1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java @@ -35,6 +35,7 @@ import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; @@ -211,9 +212,9 @@ private void executeShardBulkInferenceAsync( final Releasable onFinish ) { if (inferenceProvider == null) { - ActionListener modelLoadingListener = new ActionListener<>() { + ActionListener modelLoadingListener = new ActionListener<>() { @Override - public void onResponse(ModelRegistry.UnparsedModel unparsedModel) { + public void onResponse(UnparsedModel unparsedModel) { var service = inferenceServiceRegistry.getService(unparsedModel.service()); if (service.isEmpty() == false) { var provider = new InferenceProvider( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java index 00d44b8e7a0e2..a33a49cc52d94 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java @@ -50,6 +50,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; public class SemanticQueryBuilder extends AbstractQueryBuilder { + // **** THE semantic_text.inner_hits CLUSTER FEATURE IS DEFUNCT, NEVER USE IT **** public static final NodeFeature SEMANTIC_TEXT_INNER_HITS = new NodeFeature("semantic_text.inner_hits"); public static final String NAME = "semantic"; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index a6e4fcae7169f..d756c0ef26f14 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -32,6 +32,7 @@ import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -48,6 +49,8 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Comparator; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Function; @@ -58,32 +61,19 @@ public class ModelRegistry { public record ModelConfigMap(Map config, Map secrets) {} - /** - * Semi parsed model where inference entity id, task type and service - * are known but the settings are not parsed. - */ - public record UnparsedModel( - String inferenceEntityId, - TaskType taskType, - String service, - Map settings, - Map secrets - ) { - - public static UnparsedModel unparsedModelFromMap(ModelConfigMap modelConfigMap) { - if (modelConfigMap.config() == null) { - throw new ElasticsearchStatusException("Missing config map", RestStatus.BAD_REQUEST); - } - String inferenceEntityId = ServiceUtils.removeStringOrThrowIfNull( - modelConfigMap.config(), - ModelConfigurations.INDEX_ONLY_ID_FIELD_NAME - ); - String service = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.SERVICE); - String taskTypeStr = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), TaskType.NAME); - TaskType taskType = TaskType.fromString(taskTypeStr); - - return new UnparsedModel(inferenceEntityId, taskType, service, modelConfigMap.config(), modelConfigMap.secrets()); + public static UnparsedModel unparsedModelFromMap(ModelConfigMap modelConfigMap) { + if (modelConfigMap.config() == null) { + throw new ElasticsearchStatusException("Missing config map", RestStatus.BAD_REQUEST); } + String inferenceEntityId = ServiceUtils.removeStringOrThrowIfNull( + modelConfigMap.config(), + ModelConfigurations.INDEX_ONLY_ID_FIELD_NAME + ); + String service = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.SERVICE); + String taskTypeStr = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), TaskType.NAME); + TaskType taskType = TaskType.fromString(taskTypeStr); + + return new UnparsedModel(inferenceEntityId, taskType, service, modelConfigMap.config(), modelConfigMap.secrets()); } private static final String TASK_TYPE_FIELD = "task_type"; @@ -91,9 +81,27 @@ public static UnparsedModel unparsedModelFromMap(ModelConfigMap modelConfigMap) private static final Logger logger = LogManager.getLogger(ModelRegistry.class); private final OriginSettingClient client; + private Map defaultConfigs; public ModelRegistry(Client client) { this.client = new OriginSettingClient(client, ClientHelper.INFERENCE_ORIGIN); + this.defaultConfigs = new HashMap<>(); + } + + public void addDefaultConfiguration(UnparsedModel serviceDefaultConfig) { + if (defaultConfigs.containsKey(serviceDefaultConfig.inferenceEntityId())) { + throw new IllegalStateException( + "Cannot add default endpoint to the inference endpoint registry with duplicate inference id [" + + serviceDefaultConfig.inferenceEntityId() + + "] declared by service [" + + serviceDefaultConfig.service() + + "]. The inference Id is already use by [" + + defaultConfigs.get(serviceDefaultConfig.inferenceEntityId()).service() + + "] service." + ); + } + + defaultConfigs.put(serviceDefaultConfig.inferenceEntityId(), serviceDefaultConfig); } /** @@ -102,6 +110,11 @@ public ModelRegistry(Client client) { * @param listener Model listener */ public void getModelWithSecrets(String inferenceEntityId, ActionListener listener) { + if (defaultConfigs.containsKey(inferenceEntityId)) { + listener.onResponse(deepCopyDefaultConfig(defaultConfigs.get(inferenceEntityId))); + return; + } + ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // There should be a hit for the configurations and secrets if (searchResponse.getHits().getHits().length == 0) { @@ -109,7 +122,7 @@ public void getModelWithSecrets(String inferenceEntityId, ActionListener listener) { + if (defaultConfigs.containsKey(inferenceEntityId)) { + listener.onResponse(deepCopyDefaultConfig(defaultConfigs.get(inferenceEntityId))); + return; + } + ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // There should be a hit for the configurations and secrets if (searchResponse.getHits().getHits().length == 0) { @@ -135,7 +153,7 @@ public void getModel(String inferenceEntityId, ActionListener lis return; } - var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(UnparsedModel::unparsedModelFromMap).toList(); + var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(ModelRegistry::unparsedModelFromMap).toList(); assert modelConfigs.size() == 1; delegate.onResponse(modelConfigs.get(0)); }); @@ -162,14 +180,29 @@ private ResourceNotFoundException inferenceNotFoundException(String inferenceEnt */ public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { + var defaultConfigsForTaskType = defaultConfigs.values() + .stream() + .filter(m -> m.taskType() == taskType) + .map(ModelRegistry::deepCopyDefaultConfig) + .toList(); + // Not an error if no models of this task_type - if (searchResponse.getHits().getHits().length == 0) { + if (searchResponse.getHits().getHits().length == 0 && defaultConfigsForTaskType.isEmpty()) { delegate.onResponse(List.of()); return; } - var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(UnparsedModel::unparsedModelFromMap).toList(); - delegate.onResponse(modelConfigs); + var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(ModelRegistry::unparsedModelFromMap).toList(); + + if (defaultConfigsForTaskType.isEmpty() == false) { + var allConfigs = new ArrayList(); + allConfigs.addAll(modelConfigs); + allConfigs.addAll(defaultConfigsForTaskType); + allConfigs.sort(Comparator.comparing(UnparsedModel::inferenceEntityId)); + delegate.onResponse(allConfigs); + } else { + delegate.onResponse(modelConfigs); + } }); QueryBuilder queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(TASK_TYPE_FIELD, taskType.toString())); @@ -191,14 +224,19 @@ public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { - // Not an error if no models of this task_type - if (searchResponse.getHits().getHits().length == 0) { + var defaults = defaultConfigs.values().stream().map(ModelRegistry::deepCopyDefaultConfig).toList(); + + if (searchResponse.getHits().getHits().length == 0 && defaults.isEmpty()) { delegate.onResponse(List.of()); return; } - var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(UnparsedModel::unparsedModelFromMap).toList(); - delegate.onResponse(modelConfigs); + var foundConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(ModelRegistry::unparsedModelFromMap).toList(); + var allConfigs = new ArrayList(); + allConfigs.addAll(foundConfigs); + allConfigs.addAll(defaults); + allConfigs.sort(Comparator.comparing(UnparsedModel::inferenceEntityId)); + delegate.onResponse(allConfigs); }); // In theory the index should only contain model config documents @@ -216,7 +254,7 @@ public void getAllModels(ActionListener> listener) { client.search(modelSearch, searchListener); } - private List parseHitsAsModels(SearchHits hits) { + private ArrayList parseHitsAsModels(SearchHits hits) { var modelConfigs = new ArrayList(); for (var hit : hits) { modelConfigs.add(new ModelConfigMap(hit.getSourceAsMap(), Map.of())); @@ -393,4 +431,57 @@ private static IndexRequest createIndexRequest(String docId, String indexName, T private QueryBuilder documentIdQuery(String inferenceEntityId) { return QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds(Model.documentId(inferenceEntityId))); } + + static UnparsedModel deepCopyDefaultConfig(UnparsedModel other) { + // Because the default config uses immutable maps + return new UnparsedModel( + other.inferenceEntityId(), + other.taskType(), + other.service(), + copySettingsMap(other.settings()), + copySecretsMap(other.secrets()) + ); + } + + @SuppressWarnings("unchecked") + static Map copySettingsMap(Map other) { + var result = new HashMap(); + + var serviceSettings = (Map) other.get(ModelConfigurations.SERVICE_SETTINGS); + if (serviceSettings != null) { + var copiedServiceSettings = copyMap1LevelDeep(serviceSettings); + result.put(ModelConfigurations.SERVICE_SETTINGS, copiedServiceSettings); + } + + var taskSettings = (Map) other.get(ModelConfigurations.TASK_SETTINGS); + if (taskSettings != null) { + var copiedTaskSettings = copyMap1LevelDeep(taskSettings); + result.put(ModelConfigurations.TASK_SETTINGS, copiedTaskSettings); + } + + var chunkSettings = (Map) other.get(ModelConfigurations.CHUNKING_SETTINGS); + if (chunkSettings != null) { + var copiedChunkSettings = copyMap1LevelDeep(chunkSettings); + result.put(ModelConfigurations.CHUNKING_SETTINGS, copiedChunkSettings); + } + + return result; + } + + static Map copySecretsMap(Map other) { + return copyMap1LevelDeep(other); + } + + @SuppressWarnings("unchecked") + static Map copyMap1LevelDeep(Map other) { + var result = new HashMap(); + for (var entry : other.entrySet()) { + if (entry.getValue() instanceof Map) { + result.put(entry.getKey(), new HashMap<>((Map) entry.getValue())); + } else { + result.put(entry.getKey(), entry.getValue()); + } + } + return result; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java index 23e806e01300a..0dd41db2f016c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.SubscribableListener; @@ -31,6 +32,7 @@ import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; import org.elasticsearch.xpack.core.ml.utils.MlPlatformArchitecturesUtil; +import org.elasticsearch.xpack.inference.DefaultElserFeatureFlag; import org.elasticsearch.xpack.inference.InferencePlugin; import java.io.IOException; @@ -80,7 +82,6 @@ public BaseElasticsearchInternalService( @Override public void start(Model model, ActionListener finalListener) { if (model instanceof ElasticsearchInternalModel esModel) { - if (supportedTaskTypes().contains(model.getTaskType()) == false) { finalListener.onFailure( new IllegalStateException(TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), name())) @@ -149,7 +150,7 @@ public void putModel(Model model, ActionListener listener) { } } - private void putBuiltInModel(String modelId, ActionListener listener) { + protected void putBuiltInModel(String modelId, ActionListener listener) { var input = new TrainedModelInput(List.of("text_field")); // by convention text_field is used var config = TrainedModelConfig.builder().setInput(input).setModelId(modelId).validate(true).build(); PutTrainedModelAction.Request putRequest = new PutTrainedModelAction.Request(config, false, true); @@ -258,4 +259,27 @@ public static InferModelAction.Request buildInferenceRequest( request.setChunked(chunk); return request; } + + protected abstract boolean isDefaultId(String inferenceId); + + protected void maybeStartDeployment( + ElasticsearchInternalModel model, + Exception e, + InferModelAction.Request request, + ActionListener listener + ) { + if (DefaultElserFeatureFlag.isEnabled() == false) { + listener.onFailure(e); + return; + } + + if (isDefaultId(model.getInferenceEntityId()) && ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { + this.start( + model, + listener.delegateFailureAndWrap((l, started) -> { client.execute(InferModelAction.INSTANCE, request, listener); }) + ); + } else { + listener.onFailure(e); + } + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index e274c641e30be..9b4c0e50bdebe 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -26,6 +26,7 @@ import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; @@ -73,6 +74,8 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 ); + public static final String DEFAULT_ELSER_ID = ".elser-2"; + private static final Logger logger = LogManager.getLogger(ElasticsearchInternalService.class); private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(ElasticsearchInternalService.class); @@ -100,6 +103,17 @@ public void parseRequestConfig( Map config, ActionListener modelListener ) { + if (inferenceEntityId.equals(DEFAULT_ELSER_ID)) { + modelListener.onFailure( + new ElasticsearchStatusException( + "[{}] is a reserved inference Id. Cannot create a new inference endpoint with a reserved Id", + RestStatus.BAD_REQUEST, + inferenceEntityId + ) + ); + return; + } + try { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMap(config, ModelConfigurations.TASK_SETTINGS); @@ -459,20 +473,24 @@ public void infer( TimeValue timeout, ActionListener listener ) { - var taskType = model.getConfigurations().getTaskType(); - if (TaskType.TEXT_EMBEDDING.equals(taskType)) { - inferTextEmbedding(model, input, inputType, timeout, listener); - } else if (TaskType.RERANK.equals(taskType)) { - inferRerank(model, query, input, inputType, timeout, taskSettings, listener); - } else if (TaskType.SPARSE_EMBEDDING.equals(taskType)) { - inferSparseEmbedding(model, input, inputType, timeout, listener); + if (model instanceof ElasticsearchInternalModel esModel) { + var taskType = model.getConfigurations().getTaskType(); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + inferTextEmbedding(esModel, input, inputType, timeout, listener); + } else if (TaskType.RERANK.equals(taskType)) { + inferRerank(esModel, query, input, inputType, timeout, taskSettings, listener); + } else if (TaskType.SPARSE_EMBEDDING.equals(taskType)) { + inferSparseEmbedding(esModel, input, inputType, timeout, listener); + } else { + throw new ElasticsearchStatusException(TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), RestStatus.BAD_REQUEST); + } } else { - throw new ElasticsearchStatusException(TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), RestStatus.BAD_REQUEST); + listener.onFailure(notElasticsearchModelException(model)); } } public void inferTextEmbedding( - Model model, + ElasticsearchInternalModel model, List inputs, InputType inputType, TimeValue timeout, @@ -487,17 +505,19 @@ public void inferTextEmbedding( false ); - client.execute( - InferModelAction.INSTANCE, - request, - listener.delegateFailureAndWrap( - (l, inferenceResult) -> l.onResponse(InferenceTextEmbeddingFloatResults.of(inferenceResult.getInferenceResults())) - ) + ActionListener mlResultsListener = listener.delegateFailureAndWrap( + (l, inferenceResult) -> l.onResponse(InferenceTextEmbeddingFloatResults.of(inferenceResult.getInferenceResults())) + ); + + var maybeDeployListener = mlResultsListener.delegateResponse( + (l, exception) -> maybeStartDeployment(model, exception, request, mlResultsListener) ); + + client.execute(InferModelAction.INSTANCE, request, maybeDeployListener); } public void inferSparseEmbedding( - Model model, + ElasticsearchInternalModel model, List inputs, InputType inputType, TimeValue timeout, @@ -512,17 +532,19 @@ public void inferSparseEmbedding( false ); - client.execute( - InferModelAction.INSTANCE, - request, - listener.delegateFailureAndWrap( - (l, inferenceResult) -> l.onResponse(SparseEmbeddingResults.of(inferenceResult.getInferenceResults())) - ) + ActionListener mlResultsListener = listener.delegateFailureAndWrap( + (l, inferenceResult) -> l.onResponse(SparseEmbeddingResults.of(inferenceResult.getInferenceResults())) ); + + var maybeDeployListener = mlResultsListener.delegateResponse( + (l, exception) -> maybeStartDeployment(model, exception, request, mlResultsListener) + ); + + client.execute(InferModelAction.INSTANCE, request, maybeDeployListener); } public void inferRerank( - Model model, + ElasticsearchInternalModel model, String query, List inputs, InputType inputType, @@ -586,26 +608,33 @@ public void chunkedInfer( return; } - var configUpdate = chunkingOptions != null - ? new TokenizationConfigUpdate(chunkingOptions.windowSize(), chunkingOptions.span()) - : new TokenizationConfigUpdate(null, null); + if (model instanceof ElasticsearchInternalModel esModel) { - var request = buildInferenceRequest( - model.getConfigurations().getInferenceEntityId(), - configUpdate, - input, - inputType, - timeout, - true - ); + var configUpdate = chunkingOptions != null + ? new TokenizationConfigUpdate(chunkingOptions.windowSize(), chunkingOptions.span()) + : new TokenizationConfigUpdate(null, null); - client.execute( - InferModelAction.INSTANCE, - request, - listener.delegateFailureAndWrap( + var request = buildInferenceRequest( + model.getConfigurations().getInferenceEntityId(), + configUpdate, + input, + inputType, + timeout, + true + ); + + ActionListener mlResultsListener = listener.delegateFailureAndWrap( (l, inferenceResult) -> l.onResponse(translateToChunkedResults(inferenceResult.getInferenceResults())) - ) - ); + ); + + var maybeDeployListener = mlResultsListener.delegateResponse( + (l, exception) -> maybeStartDeployment(esModel, exception, request, mlResultsListener) + ); + + client.execute(InferModelAction.INSTANCE, request, maybeDeployListener); + } else { + listener.onFailure(notElasticsearchModelException(model)); + } } private static List translateToChunkedResults(List inferenceResults) { @@ -671,4 +700,42 @@ private RankedDocsResults textSimilarityResultsToRankedDocs( Collections.sort(rankings); return new RankedDocsResults(rankings); } + + @Override + public List defaultConfigs() { + // TODO Chunking settings + Map elserSettings = Map.of( + ModelConfigurations.SERVICE_SETTINGS, + Map.of( + ElasticsearchInternalServiceSettings.MODEL_ID, + ElserModels.ELSER_V2_MODEL, // TODO pick model depending on platform + ElasticsearchInternalServiceSettings.NUM_THREADS, + 1, + ElasticsearchInternalServiceSettings.ADAPTIVE_ALLOCATIONS, + Map.of( + "enabled", + Boolean.TRUE, + "min_number_of_allocations", + 1, + "max_number_of_allocations", + 8 // no max? + ) + ) + ); + + return List.of( + new UnparsedModel( + DEFAULT_ELSER_ID, + TaskType.SPARSE_EMBEDDING, + NAME, + elserSettings, + Map.of() // no secrets + ) + ); + } + + @Override + protected boolean isDefaultId(String inferenceId) { + return DEFAULT_ELSER_ID.equals(inferenceId); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java index d78ea7933e836..770e6e3cb9cf4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; @@ -266,12 +267,11 @@ private static ShardBulkInferenceActionFilter createFilter(ThreadPool threadPool ModelRegistry modelRegistry = mock(ModelRegistry.class); Answer unparsedModelAnswer = invocationOnMock -> { String id = (String) invocationOnMock.getArguments()[0]; - ActionListener listener = (ActionListener) invocationOnMock - .getArguments()[1]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; var model = modelMap.get(id); if (model != null) { listener.onResponse( - new ModelRegistry.UnparsedModel( + new UnparsedModel( model.getInferenceEntityId(), model.getTaskType(), model.getServiceSettings().model(), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java index fbd8ccd621559..75c370fd4d3fb 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchResponseUtils; @@ -38,9 +39,12 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.Strings.format; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -68,7 +72,7 @@ public void testGetUnparsedModelMap_ThrowsResourceNotFound_WhenNoHitsReturned() var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); ResourceNotFoundException exception = expectThrows(ResourceNotFoundException.class, () -> listener.actionGet(TIMEOUT)); @@ -82,7 +86,7 @@ public void testGetUnparsedModelMap_ThrowsIllegalArgumentException_WhenInvalidIn var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> listener.actionGet(TIMEOUT)); @@ -99,7 +103,7 @@ public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFind var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); IllegalStateException exception = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); @@ -116,7 +120,7 @@ public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFind var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); IllegalStateException exception = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); @@ -150,7 +154,7 @@ public void testGetModelWithSecrets() { var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); var modelConfig = listener.actionGet(TIMEOUT); @@ -179,7 +183,7 @@ public void testGetModelNoSecrets() { var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModel("1", listener); registry.getModel("1", listener); @@ -288,6 +292,80 @@ public void testStoreModel_ThrowsException_WhenFailureIsNotAVersionConflict() { ); } + @SuppressWarnings("unchecked") + public void testDeepCopyDefaultConfig() { + { + var toCopy = new UnparsedModel("tocopy", randomFrom(TaskType.values()), "service-a", Map.of(), Map.of()); + var copied = ModelRegistry.deepCopyDefaultConfig(toCopy); + assertThat(copied, not(sameInstance(toCopy))); + assertThat(copied.taskType(), is(toCopy.taskType())); + assertThat(copied.service(), is(toCopy.service())); + assertThat(copied.secrets(), not(sameInstance(toCopy.secrets()))); + assertThat(copied.secrets(), is(toCopy.secrets())); + // Test copied is a modifiable map + copied.secrets().put("foo", "bar"); + + assertThat(copied.settings(), not(sameInstance(toCopy.settings()))); + assertThat(copied.settings(), is(toCopy.settings())); + // Test copied is a modifiable map + copied.settings().put("foo", "bar"); + } + + { + Map secretsMap = Map.of("secret", "value"); + Map chunking = Map.of("strategy", "word"); + Map task = Map.of("user", "name"); + Map service = Map.of("num_threads", 1, "adaptive_allocations", Map.of("enabled", true)); + Map settings = Map.of("chunking_settings", chunking, "service_settings", service, "task_settings", task); + + var toCopy = new UnparsedModel("tocopy", randomFrom(TaskType.values()), "service-a", settings, secretsMap); + var copied = ModelRegistry.deepCopyDefaultConfig(toCopy); + assertThat(copied, not(sameInstance(toCopy))); + + assertThat(copied.secrets(), not(sameInstance(toCopy.secrets()))); + assertThat(copied.secrets(), is(toCopy.secrets())); + // Test copied is a modifiable map + copied.secrets().remove("secret"); + + assertThat(copied.settings(), not(sameInstance(toCopy.settings()))); + assertThat(copied.settings(), is(toCopy.settings())); + // Test copied is a modifiable map + var chunkOut = (Map) copied.settings().get("chunking_settings"); + assertThat(chunkOut, is(chunking)); + chunkOut.remove("strategy"); + + var taskOut = (Map) copied.settings().get("task_settings"); + assertThat(taskOut, is(task)); + taskOut.remove("user"); + + var serviceOut = (Map) copied.settings().get("service_settings"); + assertThat(serviceOut, is(service)); + var adaptiveOut = (Map) serviceOut.remove("adaptive_allocations"); + assertThat(adaptiveOut, is(Map.of("enabled", true))); + adaptiveOut.remove("enabled"); + } + } + + public void testDuplicateDefaultIds() { + var client = mockBulkClient(); + var registry = new ModelRegistry(client); + + var id = "my-inference"; + + registry.addDefaultConfiguration(new UnparsedModel(id, randomFrom(TaskType.values()), "service-a", Map.of(), Map.of())); + var ise = expectThrows( + IllegalStateException.class, + () -> registry.addDefaultConfiguration(new UnparsedModel(id, randomFrom(TaskType.values()), "service-b", Map.of(), Map.of())) + ); + assertThat( + ise.getMessage(), + containsString( + "Cannot add default endpoint to the inference endpoint registry with duplicate inference id [my-inference] declared by " + + "service [service-b]. The inference Id is already use by [service-a] service." + ) + ); + } + private Client mockBulkClient() { var client = mockClient(); when(client.prepareBulk()).thenReturn(new BulkRequestBuilder(client)); diff --git a/x-pack/plugin/otel-data/src/main/resources/component-templates/ecs-tsdb@mappings.yaml b/x-pack/plugin/otel-data/src/main/resources/component-templates/ecs-tsdb@mappings.yaml new file mode 100644 index 0000000000000..1c9d32a4289b9 --- /dev/null +++ b/x-pack/plugin/otel-data/src/main/resources/component-templates/ecs-tsdb@mappings.yaml @@ -0,0 +1,19 @@ +version: ${xpack.oteldata.template.version} +_meta: + description: | + Default mappings that can be changed by users for + the OpenTelemetry metrics index template installed by x-pack + managed: true +template: + mappings: + dynamic_templates: + - ecs_ip: + mapping: + type: ip + path_match: [ "ip", "*.ip", "*_ip" ] + match_mapping_type: string + - all_strings_to_keywords: + mapping: + ignore_above: 1024 + type: keyword + match_mapping_type: string diff --git a/x-pack/plugin/otel-data/src/main/resources/component-templates/logs-otel@mappings.yaml b/x-pack/plugin/otel-data/src/main/resources/component-templates/logs-otel@mappings.yaml index f350997de9e01..107901adb834f 100644 --- a/x-pack/plugin/otel-data/src/main/resources/component-templates/logs-otel@mappings.yaml +++ b/x-pack/plugin/otel-data/src/main/resources/component-templates/logs-otel@mappings.yaml @@ -8,7 +8,7 @@ template: index: mode: logsdb sort: - field: [ "resource.attributes.host.name" ] + field: [ "resource.attributes.host.name", "@timestamp" ] mappings: properties: attributes: diff --git a/x-pack/plugin/otel-data/src/main/resources/component-templates/traces-otel@mappings.yaml b/x-pack/plugin/otel-data/src/main/resources/component-templates/traces-otel@mappings.yaml index a4c62efeed7a4..2b0d1ec536fa6 100644 --- a/x-pack/plugin/otel-data/src/main/resources/component-templates/traces-otel@mappings.yaml +++ b/x-pack/plugin/otel-data/src/main/resources/component-templates/traces-otel@mappings.yaml @@ -8,7 +8,7 @@ template: index: mode: logsdb sort: - field: [ "resource.attributes.host.name" ] + field: [ "resource.attributes.host.name", "@timestamp" ] mappings: _source: mode: synthetic @@ -44,7 +44,7 @@ template: dropped_events_count: type: long links: - store_array_source: true + synthetic_source_keep: arrays properties: trace_id: type: keyword diff --git a/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-otel@template.yaml b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-otel@template.yaml index c2a318f809b7d..3b4c3127bb71c 100644 --- a/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-otel@template.yaml +++ b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-otel@template.yaml @@ -14,6 +14,7 @@ composed_of: - semconv-resource-to-ecs@mappings - metrics@custom - metrics-otel@custom + - ecs-tsdb@mappings ignore_missing_component_templates: - metrics@custom - metrics-otel@custom diff --git a/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_destination.10m@template.yaml b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_destination.10m@template.yaml new file mode 100644 index 0000000000000..f5033135120bc --- /dev/null +++ b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_destination.10m@template.yaml @@ -0,0 +1,40 @@ +--- +version: ${xpack.oteldata.template.version} +index_patterns: ["metrics-service_destination.10m.otel-*"] +priority: 130 +data_stream: + hidden: true +allow_auto_create: true +_meta: + description: aggregated APM metrics template installed by x-pack + managed: true +composed_of: + - metrics@tsdb-settings + - otel@mappings + - metrics-otel@mappings + - semconv-resource-to-ecs@mappings + - metrics@custom + - metrics-otel@custom + - metrics-10m.otel@custom + - ecs-tsdb@mappings +ignore_missing_component_templates: + - metrics@custom + - metrics-otel@custom + - metrics-10m.otel@custom +template: + settings: + index: + mode: time_series + mappings: + properties: + data_stream.type: + type: constant_keyword + value: metrics + metricset: + properties: + interval: + type: constant_keyword + value: 10m + name: + type: constant_keyword + value: service_destination diff --git a/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_destination.1m@template.yaml b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_destination.1m@template.yaml new file mode 100644 index 0000000000000..9168062f30bfb --- /dev/null +++ b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_destination.1m@template.yaml @@ -0,0 +1,39 @@ +--- +version: ${xpack.oteldata.template.version} +index_patterns: ["metrics-service_destination.1m.otel-*"] +priority: 130 +data_stream: {} +allow_auto_create: true +_meta: + description: aggregated APM metrics template installed by x-pack + managed: true +composed_of: + - metrics@tsdb-settings + - otel@mappings + - metrics-otel@mappings + - semconv-resource-to-ecs@mappings + - metrics@custom + - metrics-otel@custom + - metrics-1m.otel@custom + - ecs-tsdb@mappings +ignore_missing_component_templates: + - metrics@custom + - metrics-otel@custom + - metrics-1m.otel@custom +template: + settings: + index: + mode: time_series + mappings: + properties: + data_stream.type: + type: constant_keyword + value: metrics + metricset: + properties: + interval: + type: constant_keyword + value: 1m + name: + type: constant_keyword + value: service_destination diff --git a/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_destination.60m@template.yaml b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_destination.60m@template.yaml new file mode 100644 index 0000000000000..47c2d7d014322 --- /dev/null +++ b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_destination.60m@template.yaml @@ -0,0 +1,40 @@ +--- +version: ${xpack.oteldata.template.version} +index_patterns: ["metrics-service_destination.60m.otel-*"] +priority: 130 +data_stream: + hidden: true +allow_auto_create: true +_meta: + description: aggregated APM metrics template installed by x-pack + managed: true +composed_of: + - metrics@tsdb-settings + - otel@mappings + - metrics-otel@mappings + - semconv-resource-to-ecs@mappings + - metrics@custom + - metrics-otel@custom + - metrics-60m.otel@custom + - ecs-tsdb@mappings +ignore_missing_component_templates: + - metrics@custom + - metrics-otel@custom + - metrics-60m.otel@custom +template: + settings: + index: + mode: time_series + mappings: + properties: + data_stream.type: + type: constant_keyword + value: metrics + metricset: + properties: + interval: + type: constant_keyword + value: 60m + name: + type: constant_keyword + value: service_destination diff --git a/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_summary.10m.otel@template.yaml b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_summary.10m.otel@template.yaml new file mode 100644 index 0000000000000..c9438e8c27402 --- /dev/null +++ b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_summary.10m.otel@template.yaml @@ -0,0 +1,40 @@ +--- +version: ${xpack.oteldata.template.version} +index_patterns: ["metrics-service_summary.10m.otel-*"] +priority: 130 +data_stream: + hidden: true +allow_auto_create: true +_meta: + description: aggregated APM metrics template installed by x-pack + managed: true +composed_of: + - metrics@tsdb-settings + - otel@mappings + - metrics-otel@mappings + - semconv-resource-to-ecs@mappings + - metrics@custom + - metrics-otel@custom + - metrics-10m.otel@custom + - ecs-tsdb@mappings +ignore_missing_component_templates: + - metrics@custom + - metrics-otel@custom + - metrics-10m.otel@custom +template: + settings: + index: + mode: time_series + mappings: + properties: + data_stream.type: + type: constant_keyword + value: metrics + metricset: + properties: + interval: + type: constant_keyword + value: 10m + name: + type: constant_keyword + value: service_summary diff --git a/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_summary.1m.otel@template.yaml b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_summary.1m.otel@template.yaml new file mode 100644 index 0000000000000..b29caa3fe34a7 --- /dev/null +++ b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_summary.1m.otel@template.yaml @@ -0,0 +1,39 @@ +--- +version: ${xpack.oteldata.template.version} +index_patterns: ["metrics-service_summary.1m.otel-*"] +priority: 130 +data_stream: {} +allow_auto_create: true +_meta: + description: aggregated APM metrics template installed by x-pack + managed: true +composed_of: + - metrics@tsdb-settings + - otel@mappings + - metrics-otel@mappings + - semconv-resource-to-ecs@mappings + - metrics@custom + - metrics-otel@custom + - metrics-1m.otel@custom + - ecs-tsdb@mappings +ignore_missing_component_templates: + - metrics@custom + - metrics-otel@custom + - metrics-1m.otel@custom +template: + settings: + index: + mode: time_series + mappings: + properties: + data_stream.type: + type: constant_keyword + value: metrics + metricset: + properties: + interval: + type: constant_keyword + value: 1m + name: + type: constant_keyword + value: service_summary diff --git a/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_summary.60m.otel@template.yaml b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_summary.60m.otel@template.yaml new file mode 100644 index 0000000000000..4cab3e41a1dfa --- /dev/null +++ b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_summary.60m.otel@template.yaml @@ -0,0 +1,40 @@ +--- +version: ${xpack.oteldata.template.version} +index_patterns: ["metrics-service_summary.60m.otel-*"] +priority: 130 +data_stream: + hidden: true +allow_auto_create: true +_meta: + description: aggregated APM metrics template installed by x-pack + managed: true +composed_of: + - metrics@tsdb-settings + - otel@mappings + - metrics-otel@mappings + - semconv-resource-to-ecs@mappings + - metrics@custom + - metrics-otel@custom + - metrics-60m.otel@custom + - ecs-tsdb@mappings +ignore_missing_component_templates: + - metrics@custom + - metrics-otel@custom + - metrics-60m.otel@custom +template: + settings: + index: + mode: time_series + mappings: + properties: + data_stream.type: + type: constant_keyword + value: metrics + metricset: + properties: + interval: + type: constant_keyword + value: 60m + name: + type: constant_keyword + value: service_summary diff --git a/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_transaction.10m.otel@template.yaml b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_transaction.10m.otel@template.yaml new file mode 100644 index 0000000000000..037f3546205d6 --- /dev/null +++ b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_transaction.10m.otel@template.yaml @@ -0,0 +1,40 @@ +--- +version: ${xpack.oteldata.template.version} +index_patterns: ["metrics-service_transaction.10m.otel-*"] +priority: 130 +data_stream: + hidden: true +allow_auto_create: true +_meta: + description: aggregated APM metrics template installed by x-pack + managed: true +composed_of: + - metrics@tsdb-settings + - otel@mappings + - metrics-otel@mappings + - semconv-resource-to-ecs@mappings + - metrics@custom + - metrics-otel@custom + - metrics-10m.otel@custom + - ecs-tsdb@mappings +ignore_missing_component_templates: + - metrics@custom + - metrics-otel@custom + - metrics-10m.otel@custom +template: + settings: + index: + mode: time_series + mappings: + properties: + data_stream.type: + type: constant_keyword + value: metrics + metricset: + properties: + interval: + type: constant_keyword + value: 10m + name: + type: constant_keyword + value: service_transaction diff --git a/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_transaction.1m.otel@template.yaml b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_transaction.1m.otel@template.yaml new file mode 100644 index 0000000000000..303ac2c406fd0 --- /dev/null +++ b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_transaction.1m.otel@template.yaml @@ -0,0 +1,39 @@ +--- +version: ${xpack.oteldata.template.version} +index_patterns: ["metrics-service_transaction.1m.otel-*"] +priority: 130 +data_stream: {} +allow_auto_create: true +_meta: + description: aggregated APM metrics template installed by x-pack + managed: true +composed_of: + - metrics@tsdb-settings + - otel@mappings + - metrics-otel@mappings + - semconv-resource-to-ecs@mappings + - metrics@custom + - metrics-otel@custom + - metrics-1m.otel@custom + - ecs-tsdb@mappings +ignore_missing_component_templates: + - metrics@custom + - metrics-otel@custom + - metrics-1m.otel@custom +template: + settings: + index: + mode: time_series + mappings: + properties: + data_stream.type: + type: constant_keyword + value: metrics + metricset: + properties: + interval: + type: constant_keyword + value: 1m + name: + type: constant_keyword + value: service_transaction diff --git a/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_transaction.60m.otel@template.yaml b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_transaction.60m.otel@template.yaml new file mode 100644 index 0000000000000..ea42079ced4dd --- /dev/null +++ b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-service_transaction.60m.otel@template.yaml @@ -0,0 +1,40 @@ +--- +version: ${xpack.oteldata.template.version} +index_patterns: ["metrics-service_transaction.60m.otel-*"] +priority: 130 +data_stream: + hidden: true +allow_auto_create: true +_meta: + description: aggregated APM metrics template installed by x-pack + managed: true +composed_of: + - metrics@tsdb-settings + - otel@mappings + - metrics-otel@mappings + - semconv-resource-to-ecs@mappings + - metrics@custom + - metrics-otel@custom + - metrics-60m.otel@custom + - ecs-tsdb@mappings +ignore_missing_component_templates: + - metrics@custom + - metrics-otel@custom + - metrics-60m.otel@custom +template: + settings: + index: + mode: time_series + mappings: + properties: + data_stream.type: + type: constant_keyword + value: metrics + metricset: + properties: + interval: + type: constant_keyword + value: 60m + name: + type: constant_keyword + value: service_transaction diff --git a/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-transaction.10m.otel@template.yaml b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-transaction.10m.otel@template.yaml new file mode 100644 index 0000000000000..81e70cc3361fc --- /dev/null +++ b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-transaction.10m.otel@template.yaml @@ -0,0 +1,40 @@ +--- +version: ${xpack.oteldata.template.version} +index_patterns: ["metrics-transaction.10m.otel-*"] +priority: 130 +data_stream: + hidden: true +allow_auto_create: true +_meta: + description: aggregated APM metrics template installed by x-pack + managed: true +composed_of: + - metrics@tsdb-settings + - otel@mappings + - metrics-otel@mappings + - semconv-resource-to-ecs@mappings + - metrics@custom + - metrics-otel@custom + - metrics-10m.otel@custom + - ecs-tsdb@mappings +ignore_missing_component_templates: + - metrics@custom + - metrics-otel@custom + - metrics-10m.otel@custom +template: + settings: + index: + mode: time_series + mappings: + properties: + data_stream.type: + type: constant_keyword + value: metrics + metricset: + properties: + interval: + type: constant_keyword + value: 10m + name: + type: constant_keyword + value: transaction diff --git a/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-transaction.1m.otel@template.yaml b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-transaction.1m.otel@template.yaml new file mode 100644 index 0000000000000..c54b90bf8b683 --- /dev/null +++ b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-transaction.1m.otel@template.yaml @@ -0,0 +1,39 @@ +--- +version: ${xpack.oteldata.template.version} +index_patterns: ["metrics-transaction.1m.otel-*"] +priority: 130 +data_stream: {} +allow_auto_create: true +_meta: + description: aggregated APM metrics template installed by x-pack + managed: true +composed_of: + - metrics@tsdb-settings + - otel@mappings + - metrics-otel@mappings + - semconv-resource-to-ecs@mappings + - metrics@custom + - metrics-otel@custom + - metrics-1m.otel@custom + - ecs-tsdb@mappings +ignore_missing_component_templates: + - metrics@custom + - metrics-otel@custom + - metrics-1m.otel@custom +template: + settings: + index: + mode: time_series + mappings: + properties: + data_stream.type: + type: constant_keyword + value: metrics + metricset: + properties: + interval: + type: constant_keyword + value: 1m + name: + type: constant_keyword + value: transaction diff --git a/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-transaction.60m.otel@template.yaml b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-transaction.60m.otel@template.yaml new file mode 100644 index 0000000000000..8afe8b87951c0 --- /dev/null +++ b/x-pack/plugin/otel-data/src/main/resources/index-templates/metrics-transaction.60m.otel@template.yaml @@ -0,0 +1,40 @@ +--- +version: ${xpack.oteldata.template.version} +index_patterns: ["metrics-transaction.60m.otel-*"] +priority: 130 +data_stream: + hidden: true +allow_auto_create: true +_meta: + description: aggregated APM metrics template installed by x-pack + managed: true +composed_of: + - metrics@tsdb-settings + - otel@mappings + - metrics-otel@mappings + - semconv-resource-to-ecs@mappings + - metrics@custom + - metrics-otel@custom + - metrics-60m.otel@custom + - ecs-tsdb@mappings +ignore_missing_component_templates: + - metrics@custom + - metrics-otel@custom + - metrics-60m.otel@custom +template: + settings: + index: + mode: time_series + mappings: + properties: + data_stream.type: + type: constant_keyword + value: metrics + metricset: + properties: + interval: + type: constant_keyword + value: 60m + name: + type: constant_keyword + value: transaction diff --git a/x-pack/plugin/otel-data/src/main/resources/resources.yaml b/x-pack/plugin/otel-data/src/main/resources/resources.yaml index ba219b09388fb..e32037901a49c 100644 --- a/x-pack/plugin/otel-data/src/main/resources/resources.yaml +++ b/x-pack/plugin/otel-data/src/main/resources/resources.yaml @@ -1,7 +1,7 @@ # "version" holds the version of the templates and ingest pipelines installed # by xpack-plugin otel-data. This must be increased whenever an existing template is # changed, in order for it to be updated on Elasticsearch upgrade. -version: 3 +version: 4 component-templates: - otel@mappings @@ -9,7 +9,20 @@ component-templates: - semconv-resource-to-ecs@mappings - metrics-otel@mappings - traces-otel@mappings + - ecs-tsdb@mappings index-templates: - logs-otel@template - metrics-otel@template - traces-otel@template + - metrics-transaction.60m.otel@template + - metrics-transaction.10m.otel@template + - metrics-transaction.1m.otel@template + - metrics-service_transaction.60m.otel@template + - metrics-service_transaction.10m.otel@template + - metrics-service_transaction.1m.otel@template + - metrics-service_summary.60m.otel@template + - metrics-service_summary.10m.otel@template + - metrics-service_summary.1m.otel@template + - metrics-service_destination.60m@template + - metrics-service_destination.10m@template + - metrics-service_destination.1m@template diff --git a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml index 657453bf4ae9f..fc162d0647d08 100644 --- a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml +++ b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml @@ -70,3 +70,23 @@ setup: - match: { hits.hits.0.fields.error\.exception\.type: ["MyException"] } - match: { hits.hits.0.fields.error\.exception\.message: ["foo"] } - match: { hits.hits.0.fields.error\.stack_trace: ["Exception in thread \"main\" java.lang.RuntimeException: Test exception\n at com.example.GenerateTrace.methodB(GenerateTrace.java:13)\n at com.example.GenerateTrace.methodA(GenerateTrace.java:9)\n at com.example.GenerateTrace.main(GenerateTrace.java:5)"] } +--- +"resource.attributes.host.name @timestamp should be used as sort fields": + - do: + bulk: + index: logs-generic.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:49:33.467654000Z","data_stream":{"dataset":"generic.otel","namespace":"default"}, "body_text":"error1"}' + - is_false: errors + - do: + indices.get_data_stream: + name: logs-generic.otel-default + - set: { data_streams.0.indices.0.index_name: datastream-backing-index } + - do: + indices.get_settings: + index: $datastream-backing-index + - is_true: $datastream-backing-index + - match: { .$datastream-backing-index.settings.index.sort.field.0: "resource.attributes.host.name" } + - match: { .$datastream-backing-index.settings.index.sort.field.1: "@timestamp" } diff --git a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_traces_tests.yml b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_traces_tests.yml index abdb8d49d774c..d5b87c9b45116 100644 --- a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_traces_tests.yml +++ b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_traces_tests.yml @@ -76,19 +76,47 @@ setup: - match: { hits.hits.0._source.links.1.trace_id: "4aaa9f33312b3dbb8b2c2c62bb7abe1a1" } - match: { hits.hits.0._source.links.1.span_id: "b3b7d1f1f1b4e1e1" } --- -"Default data_stream.type must be traces": +Conflicting attribute types: - do: bulk: index: traces-generic.otel-default refresh: true body: - create: {} - - '{"@timestamp":"2024-02-18T14:48:33.467654000Z","data_stream":{"dataset":"generic.otel","type":"traces","namespace":"default"},"resource":{"attributes":{"service.name":"OtelSample","telemetry.sdk.language":"dotnet","telemetry.sdk.name":"opentelemetry"}},"name":"foo","trace_id":"7bba9f33312b3dbb8b2c2c62bb7abe2d","span_id":"086e83747d0e381e","kind":"SERVER","status":{"code":"2xx"}}' + - "@timestamp": 2024-10-04T00:00:00 + attributes: + http.status_code: 200 + - create: {} + - "@timestamp": 2024-10-04T01:00:00 + attributes: + http.status_code: "foo" - is_false: errors - do: search: index: traces-generic.otel-default body: - fields: ["data_stream.type"] - - length: { hits.hits: 1 } - - match: { hits.hits.0.fields.data_stream\.type: ["traces"] } + fields: ["*"] + "sort" : [ "@timestamp" ] + - length: { hits.hits: 2 } + - match: { hits.hits.0.fields.attributes\.http\.status_code: [200] } + - match: { hits.hits.1._ignored: ["attributes.http.status_code"] } +--- +"resource.attributes.host.name @timestamp should be used as sort fields": + - do: + bulk: + index: traces-generic.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:49:33.467654000Z","data_stream":{"dataset":"generic.otel","namespace":"default"}, "span_id":"1"}' + - is_false: errors + - do: + indices.get_data_stream: + name: traces-generic.otel-default + - set: { data_streams.0.indices.0.index_name: datastream-backing-index } + - do: + indices.get_settings: + index: $datastream-backing-index + - is_true: $datastream-backing-index + - match: { .$datastream-backing-index.settings.index.sort.field.0: "resource.attributes.host.name" } + - match: { .$datastream-backing-index.settings.index.sort.field.1: "@timestamp" } diff --git a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/30_aggregated_metrics_tests.yml b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/30_aggregated_metrics_tests.yml new file mode 100644 index 0000000000000..c26a53d841f59 --- /dev/null +++ b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/30_aggregated_metrics_tests.yml @@ -0,0 +1,428 @@ +--- +setup: + - do: + cluster.health: + wait_for_events: languid + - do: + cluster.put_component_template: + name: metrics-otel@custom + body: + template: + settings: + index: + routing_path: [unit, attributes.*, resource.attributes.*] + mode: time_series + time_series: + start_time: 2024-07-01T13:03:08.138Z +--- +"metrics-service_destination.10m must be hidden": + - do: + bulk: + index: metrics-service_destination.10m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "service_destination"},"resource":{"attributes":{ "metricset.interval": "10m" } } }' + - is_false: errors + - do: + search: + index: metrics-service_destination.10m.otel-default + body: + fields: ["metricset.name", "metricset.interval"] + - length: { hits.hits: 1 } + - match: { hits.hits.0.fields.metricset\.name: ["service_destination"] } + - match: { hits.hits.0.fields.metricset\.interval: ["10m"] } + - do: + indices.get_data_stream: + name: metrics-service_destination.10m.otel-default + - match: { data_streams.0.hidden: true } +--- +"metrics-service_destination.60m must be hidden": + - do: + bulk: + index: metrics-service_destination.60m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "service_destination" },"resource":{"attributes":{ "metricset.interval": "60m" } } }' + - is_false: errors + - do: + search: + index: metrics-service_destination.60m.otel-default + body: + fields: ["metricset.name", "metricset.interval"] + - length: { hits.hits: 1 } + - match: { hits.hits.0.fields.metricset\.name: ["service_destination"] } + - match: { hits.hits.0.fields.metricset\.interval: ["60m"] } + - do: + indices.get_data_stream: + name: metrics-service_destination.60m.otel-default + - match: { data_streams.0.hidden: true } +--- +"metrics-service_summary.10m must be hidden": + - do: + bulk: + index: metrics-service_summary.10m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "service_summary"},"resource":{"attributes":{ "metricset.interval": "10m" } } }' + - is_false: errors + - do: + search: + index: metrics-service_summary.10m.otel-default + body: + fields: ["metricset.name", "metricset.interval"] + - length: { hits.hits: 1 } + - match: { hits.hits.0.fields.metricset\.name: ["service_summary"] } + - match: { hits.hits.0.fields.metricset\.interval: ["10m"] } + - do: + indices.get_data_stream: + name: metrics-service_summary.10m.otel-default + - match: { data_streams.0.hidden: true } +--- +"metrics-service_summary.60m must be hidden": + - do: + bulk: + index: metrics-service_summary.60m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "service_summary" },"resource":{"attributes":{ "metricset.interval": "60m" } } }' + - is_false: errors + - do: + search: + index: metrics-service_summary.60m.otel-default + body: + fields: ["metricset.name", "metricset.interval"] + - length: { hits.hits: 1 } + - match: { hits.hits.0.fields.metricset\.name: ["service_summary"] } + - match: { hits.hits.0.fields.metricset\.interval: ["60m"] } + - do: + indices.get_data_stream: + name: metrics-service_summary.60m.otel-default + - match: { data_streams.0.hidden: true } +--- +"metrics-service_transaction.10m must be hidden": + - do: + bulk: + index: metrics-service_transaction.10m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "service_transaction" },"resource":{"attributes":{ "metricset.interval": "10m" } } }' + - is_false: errors + - do: + search: + index: metrics-service_transaction.10m.otel-default + body: + fields: ["metricset.name", "metricset.interval"] + - length: { hits.hits: 1 } + - match: { hits.hits.0.fields.metricset\.name: ["service_transaction"] } + - match: { hits.hits.0.fields.metricset\.interval: ["10m"] } + - do: + indices.get_data_stream: + name: metrics-service_transaction.10m.otel-default + - match: { data_streams.0.hidden: true } +--- +"metrics-service_transaction.60m must be hidden": + - do: + bulk: + index: metrics-service_transaction.60m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "service_transaction"},"resource":{"attributes":{ "metricset.interval": "60m" } } }' + - is_false: errors + - do: + search: + index: metrics-service_transaction.60m.otel-default + body: + fields: ["metricset.name", "metricset.interval"] + - length: { hits.hits: 1 } + - match: { hits.hits.0.fields.metricset\.name: ["service_transaction"] } + - match: { hits.hits.0.fields.metricset\.interval: ["60m"] } + - do: + indices.get_data_stream: + name: metrics-service_transaction.60m.otel-default + - match: { data_streams.0.hidden: true } +--- +"metrics-transaction.10m must be hidden": + - do: + bulk: + index: metrics-transaction.10m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "transaction"},"resource":{"attributes":{ "metricset.interval": "10m" } } }' + - is_false: errors + - do: + search: + index: metrics-transaction.10m.otel-default + body: + fields: ["metricset.name", "metricset.interval"] + - length: { hits.hits: 1 } + - match: { hits.hits.0.fields.metricset\.name: ["transaction"] } + - match: { hits.hits.0.fields.metricset\.interval: ["10m"] } + - do: + indices.get_data_stream: + name: metrics-transaction.10m.otel-default + - match: { data_streams.0.hidden: true } +--- +"metrics-transaction.60m must be hidden": + - do: + bulk: + index: metrics-transaction.60m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "transaction"},"resource":{"attributes":{ "metricset.interval": "60m" } } }' + - is_false: errors + - do: + search: + index: metrics-transaction.60m.otel-default + body: + fields: ["metricset.name", "metricset.interval"] + - length: { hits.hits: 1 } + - match: { hits.hits.0.fields.metricset\.name: ["transaction"] } + - match: { hits.hits.0.fields.metricset\.interval: ["60m"] } + - do: + indices.get_data_stream: + name: metrics-transaction.60m.otel-default + - match: { data_streams.0.hidden: true } +--- +"Terms aggregation on metricset.interval from metrics-transaction must by default only contain 1m": + - do: + bulk: + index: metrics-transaction.60m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "transaction"},"resource":{"attributes":{ "metricset.interval": "60m" } } }' + - do: + bulk: + index: metrics-transaction.10m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "transaction"},"resource":{"attributes":{ "metricset.interval": "10m" } } }' + - do: + bulk: + index: metrics-transaction.1m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "transaction"},"resource":{"attributes":{ "metricset.interval": "1m" } } }' + - is_false: errors + - do: + search: + index: metrics-*.otel-default + body: > + { + + "size": 0, + "aggs": { + "intervals": { + "terms": { + "field": "metricset.interval" + } + } + } + } + - length: { aggregations.intervals.buckets: 1 } + - match: { aggregations.intervals.buckets.0.key: "1m" } + - match: { aggregations.intervals.buckets.0.doc_count: 1 } + # With including hidden indices, 10m and 60m aggregation also show up + - do: + search: + index: metrics-*.otel-default + expand_wildcards: open,hidden + body: > + { + "size": 0, + "aggs": { + "intervals": { + "terms": { + "field": "metricset.interval" + } + } + } + } + - length: { aggregations.intervals.buckets: 3 } +--- +"Terms aggregation on metricset.interval from metrics-service_transaction must by default only contain 1m": + - do: + bulk: + index: metrics-service_transaction.60m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "service_transaction"},"resource":{"attributes":{ "metricset.interval": "60m" } } }' + - do: + bulk: + index: metrics-service_transaction.10m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "service_transaction"},"resource":{"attributes":{ "metricset.interval": "10m" } } }' + - do: + bulk: + index: metrics-service_transaction.1m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "service_transaction"},"resource":{"attributes":{ "metricset.interval": "1m" } } }' + - is_false: errors + - do: + search: + index: metrics-*.otel-default + body: > + { + + "size": 0, + "aggs": { + "intervals": { + "terms": { + "field": "metricset.interval" + } + } + } + } + - length: { aggregations.intervals.buckets: 1 } + - match: { aggregations.intervals.buckets.0.key: "1m" } + - match: { aggregations.intervals.buckets.0.doc_count: 1 } + # With including hidden indices, 10m and 60m aggregation also show up + - do: + search: + index: metrics-*.otel-default + expand_wildcards: open,hidden + body: > + { + "size": 0, + "aggs": { + "intervals": { + "terms": { + "field": "metricset.interval" + } + } + } + } + - length: { aggregations.intervals.buckets: 3 } +--- +"Terms aggregation on metricset.interval from metrics-service_summary must by default only contain 1m": + - do: + bulk: + index: metrics-service_summary.60m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "service_summary"},"resource":{"attributes":{ "metricset.interval": "60m" } } }' + - do: + bulk: + index: metrics-service_summary.10m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "service_summary"},"resource":{"attributes":{ "metricset.interval": "10m" } } }' + - do: + bulk: + index: metrics-service_summary.1m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "service_summary"},"resource":{"attributes":{ "metricset.interval": "1m" } } }' + - is_false: errors + - do: + search: + index: metrics-*.otel-default + body: > + { + + "size": 0, + "aggs": { + "intervals": { + "terms": { + "field": "metricset.interval" + } + } + } + } + - length: { aggregations.intervals.buckets: 1 } + - match: { aggregations.intervals.buckets.0.key: "1m" } + - match: { aggregations.intervals.buckets.0.doc_count: 1 } + # With including hidden indices, 10m and 60m aggregation also show up + - do: + search: + index: metrics-*.otel-default + expand_wildcards: open,hidden + body: > + { + "size": 0, + "aggs": { + "intervals": { + "terms": { + "field": "metricset.interval" + } + } + } + } + - length: { aggregations.intervals.buckets: 3 } +--- +"Terms aggregation on metricset.interval from metrics-service_destination must by default only contain 1m": + - do: + bulk: + index: metrics-service_destination.60m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "service_destination"},"resource":{"attributes":{ "metricset.interval": "60m" } } }' + - do: + bulk: + index: metrics-service_destination.10m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "service_destination"},"resource":{"attributes":{ "metricset.interval": "10m" } } }' + - do: + bulk: + index: metrics-service_destination.1m.otel-default + refresh: true + body: + - create: {} + - '{"@timestamp":"2024-07-18T14:48:33.467654000Z" ,"attributes":{"processor.event":"metric","transaction.root":false, "metricset.name" : "service_destination"},"resource":{"attributes":{ "metricset.interval": "1m" } } }' + - is_false: errors + - do: + search: + index: metrics-*.otel-default + body: > + { + + "size": 0, + "aggs": { + "intervals": { + "terms": { + "field": "metricset.interval" + } + } + } + } + - length: { aggregations.intervals.buckets: 1 } + - match: { aggregations.intervals.buckets.0.key: "1m" } + - match: { aggregations.intervals.buckets.0.doc_count: 1 } + # With including hidden indices, 10m and 60m aggregation also show up + - do: + search: + index: metrics-*.otel-default + expand_wildcards: open,hidden + body: > + { + "size": 0, + "aggs": { + "intervals": { + "terms": { + "field": "metricset.interval" + } + } + } + } + - length: { aggregations.intervals.buckets: 3 } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java index e9757d3806b69..19747ec6e49f3 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java @@ -9,19 +9,19 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.TriConsumer; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.common.xcontent.ChunkedToXContentBuilder; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContent; -import java.util.Collections; import java.util.Iterator; import java.util.Map; import java.util.Objects; -import java.util.function.BiFunction; +import java.util.function.Consumer; public class GetStackTracesResponse extends ActionResponse implements ChunkedToXContentObject { @Nullable @@ -91,34 +91,33 @@ public long getTotalSamples() { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return Iterators.concat( - ChunkedToXContentHelper.startObject(), - optional("stack_traces", stackTraces, ChunkedToXContentHelper::xContentValuesMap), - optional("stack_frames", stackFrames, ChunkedToXContentHelper::xContentValuesMap), - optional("executables", executables, ChunkedToXContentHelper::map), + return ChunkedToXContent.builder(params).object(ob -> { + ob.execute(optional("stack_traces", stackTraces, ChunkedToXContentBuilder::xContentObjectFields)); + ob.execute(optional("stack_frames", stackFrames, ChunkedToXContentBuilder::xContentObjectFields)); + ob.execute(optional("executables", executables, ChunkedToXContentBuilder::object)); // render only count for backwards-compatibility - optional( - "stack_trace_events", - stackTraceEvents, - (n, v) -> ChunkedToXContentHelper.map(n, v, entry -> (b, p) -> b.field(entry.getKey(), entry.getValue().count)) - ), - Iterators.single((b, p) -> b.field("total_frames", totalFrames)), - Iterators.single((b, p) -> b.field("sampling_rate", samplingRate)), + ob.execute( + optional( + "stack_trace_events", + stackTraceEvents, + (steb, n, v) -> steb.object(n, v.entrySet().iterator(), e -> (b, p) -> b.field(e.getKey(), e.getValue().count)) + ) + ); + ob.field("total_frames", totalFrames); + ob.field("sampling_rate", samplingRate); // the following fields are intentionally not written to the XContent representation (only needed on the transport layer): - // // * start // * end // * totalSamples - ChunkedToXContentHelper.endObject() - ); + }); } - private static Iterator optional( + private static Consumer optional( String name, Map values, - BiFunction, Iterator> supplier + TriConsumer> function ) { - return (values != null) ? supplier.apply(name, values) : Collections.emptyIterator(); + return values != null ? b -> function.apply(b, name, values) : b -> {}; } @Override diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java index 8b924af48c631..2e7bc44811bf6 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java @@ -33,6 +33,7 @@ import java.util.Collection; import java.util.List; +import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -97,7 +98,7 @@ protected void setupIndex() { } } """; - createIndex(INDEX, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 5).build()); + createIndex(INDEX, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0).build()); admin().indices().preparePutMapping(INDEX).setSource(mapping, XContentType.JSON).get(); indexDoc(INDEX, "doc_1", DOC_FIELD, "doc_1", TOPIC_FIELD, "technology", TEXT_FIELD, "term"); indexDoc( @@ -301,7 +302,7 @@ public void testRRFWithCollapse() { }); } - public void testRankDocsRetrieverWithCollapseAndAggs() { + public void testRRFRetrieverWithCollapseAndAggs() { final int rankWindowSize = 100; final int rankConstant = 10; SearchSourceBuilder source = new SearchSourceBuilder(); diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java index 3a4ace9b6754a..512874e5009f3 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java @@ -21,6 +21,7 @@ import java.util.Arrays; +import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.hamcrest.Matchers.equalTo; @@ -67,7 +68,7 @@ protected void setupIndex() { } } """; - createIndex(INDEX, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 5).build()); + createIndex(INDEX, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0).build()); admin().indices().preparePutMapping(INDEX).setSource(mapping, XContentType.JSON).get(); indexDoc(INDEX, "doc_1", DOC_FIELD, "doc_1", TOPIC_FIELD, "technology", TEXT_FIELD, "term", LAST_30D_FIELD, 100); indexDoc( diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java index 38dd7116acce4..19c18bf855b4e 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; +import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; @@ -58,12 +59,11 @@ import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING; import static org.elasticsearch.xcontent.XContentType.JSON; import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7; +import static org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper.RESERVED_ROLE_MAPPING_SUFFIX; import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; @@ -270,21 +270,28 @@ private void assertRoleMappingsSaveOK(CountDownLatch savedClusterState, AtomicLo assertThat(resolveRolesFuture.get(), containsInAnyOrder("kibana_user", "fleet_user")); } - // the role mappings are not retrievable by the role mapping action (which only accesses "native" i.e. index-based role mappings) - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); - assertThat(response.mappings(), emptyArray()); - - // role mappings (with the same names) can also be stored in the "native" store - var putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")).actionGet(); - assertTrue(putRoleMappingResponse.isCreated()); - putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet(); - assertTrue(putRoleMappingResponse.isCreated()); + // the role mappings are retrievable by the role mapping action for BWC + assertGetResponseHasMappings(true, "everyone_kibana", "everyone_fleet"); + + // role mappings (with the same names) can be stored in the "native" store + { + PutRoleMappingResponse response = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")) + .actionGet(); + assertTrue(response.isCreated()); + response = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet(); + assertTrue(response.isCreated()); + } + { + // deleting role mappings that exist in the native store and in cluster-state should result in success + var response = client().execute(DeleteRoleMappingAction.INSTANCE, deleteRequest("everyone_kibana")).actionGet(); + assertTrue(response.isFound()); + response = client().execute(DeleteRoleMappingAction.INSTANCE, deleteRequest("everyone_fleet")).actionGet(); + assertTrue(response.isFound()); + } + } - public void testRoleMappingsApplied() throws Exception { + public void testClusterStateRoleMappingsAddedThenDeleted() throws Exception { ensureGreen(); var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); @@ -293,6 +300,12 @@ public void testRoleMappingsApplied() throws Exception { assertRoleMappingsSaveOK(savedClusterState.v1(), savedClusterState.v2()); logger.info("---> cleanup cluster settings..."); + { + // Deleting non-existent native role mappings returns not found even if they exist in config file + var response = client().execute(DeleteRoleMappingAction.INSTANCE, deleteRequest("everyone_kibana")).get(); + assertFalse(response.isFound()); + } + savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); @@ -307,48 +320,95 @@ public void testRoleMappingsApplied() throws Exception { clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) ); - // native role mappings are not affected by the removal of the cluster-state based ones + // cluster-state role mapping was removed and is not returned in the API anymore { var request = new GetRoleMappingsRequest(); request.setNames("everyone_kibana", "everyone_fleet"); var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertTrue(response.hasMappings()); - assertThat( - Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), - containsInAnyOrder("everyone_kibana", "everyone_fleet") - ); + assertFalse(response.hasMappings()); } - // and roles are resolved based on the native role mappings + // no role mappings means no roles are resolved for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); userRoleMapper.resolveRoles( new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), resolveRolesFuture ); - assertThat(resolveRolesFuture.get(), contains("kibana_user_native")); + assertThat(resolveRolesFuture.get(), empty()); } + } - { - var request = new DeleteRoleMappingRequest(); - request.setName("everyone_kibana"); - var response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); - assertTrue(response.isFound()); - request = new DeleteRoleMappingRequest(); - request.setName("everyone_fleet"); - response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); - assertTrue(response.isFound()); + public void testGetRoleMappings() throws Exception { + ensureGreen(); + + final List nativeMappings = List.of("everyone_kibana", "_everyone_kibana", "zzz_mapping", "123_mapping"); + for (var mapping : nativeMappings) { + client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest(mapping)).actionGet(); } - // no roles are resolved now, because both native and cluster-state based stores have been cleared - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), empty()); + var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); + writeJSONFile(internalCluster().getMasterName(), testJSON, logger, versionCounter); + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + var request = new GetRoleMappingsRequest(); + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), + containsInAnyOrder( + "everyone_kibana", + "everyone_kibana" + RESERVED_ROLE_MAPPING_SUFFIX, + "_everyone_kibana", + "everyone_fleet" + RESERVED_ROLE_MAPPING_SUFFIX, + "zzz_mapping", + "123_mapping" + ) + ); + + int readOnlyCount = 0; + // assert that cluster-state role mappings come last + for (ExpressionRoleMapping mapping : response.mappings()) { + readOnlyCount = mapping.getName().endsWith(RESERVED_ROLE_MAPPING_SUFFIX) ? readOnlyCount + 1 : readOnlyCount; } + // Two sourced from cluster-state + assertEquals(readOnlyCount, 2); + + // it's possible to delete overlapping native role mapping + assertTrue(client().execute(DeleteRoleMappingAction.INSTANCE, deleteRequest("everyone_kibana")).actionGet().isFound()); + + // Fetch a specific file based role + request = new GetRoleMappingsRequest(); + request.setNames("everyone_kibana" + RESERVED_ROLE_MAPPING_SUFFIX); + response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), + containsInAnyOrder("everyone_kibana" + RESERVED_ROLE_MAPPING_SUFFIX) + ); + + savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); + writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); + awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + final ClusterStateResponse clusterStateResponse = clusterAdmin().state( + new ClusterStateRequest(TEST_REQUEST_TIMEOUT).waitForMetadataVersion(savedClusterState.v2().get()) + ).get(); + + assertNull( + clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) + ); + + // Make sure remaining native mappings can still be fetched + request = new GetRoleMappingsRequest(); + response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), + containsInAnyOrder("_everyone_kibana", "zzz_mapping", "123_mapping") + ); } public static Tuple setupClusterStateListenerForError( @@ -433,11 +493,8 @@ public void testRoleMappingApplyWithSecurityIndexClosed() throws Exception { boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); - // no native role mappings exist - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); + // even if index is closed, cluster-state role mappings are still returned + assertGetResponseHasMappings(true, "everyone_kibana", "everyone_fleet"); // cluster state settings are also applied var clusterStateResponse = clusterAdmin().state( @@ -476,6 +533,12 @@ public void testRoleMappingApplyWithSecurityIndexClosed() throws Exception { } } + private DeleteRoleMappingRequest deleteRequest(String name) { + var request = new DeleteRoleMappingRequest(); + request.setName(name); + return request; + } + private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { var json = """ { @@ -494,4 +557,17 @@ private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { return new PutRoleMappingRequestBuilder(null).source(name, parser).request(); } } + + private static void assertGetResponseHasMappings(boolean readOnly, String... mappings) throws InterruptedException, ExecutionException { + var request = new GetRoleMappingsRequest(); + request.setNames(mappings); + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), + containsInAnyOrder( + Arrays.stream(mappings).map(mapping -> mapping + (readOnly ? RESERVED_ROLE_MAPPING_SUFFIX : "")).toArray(String[]::new) + ) + ); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 79a00fa1293bd..f4d9360d1ed84 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -897,7 +897,8 @@ Collection createComponents( reservedRealm ); components.add(nativeUsersStore); - components.add(new PluginComponentBinding<>(NativeRoleMappingStore.class, nativeRoleMappingStore)); + components.add(clusterStateRoleMapper); + components.add(nativeRoleMappingStore); components.add(new PluginComponentBinding<>(UserRoleMapper.class, userRoleMapper)); components.add(reservedRealm); components.add(realms); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java index e8d248233415c..569cdc1a79fd9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -17,6 +18,7 @@ import org.elasticsearch.xpack.core.security.action.role.DeleteRoleAction; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleRequest; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleResponse; +import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authz.ReservedRoleNameChecker; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; @@ -25,16 +27,20 @@ public class TransportDeleteRoleAction extends TransportAction { + if (clusterStateRoleMapper.hasMapping(request.name())) { + // Allow to delete a mapping with the same name in the native role mapping store as the file_settings namespace, but + // add a warning header to signal to the caller that this could be a problem. + HeaderWarning.addWarning( + "A read only role mapping with the same name [" + + request.name() + + "] has been previously been defined in a configuration file. " + + "The read only role mapping will still be active." + ); + } + return new DeleteRoleResponse(found); + })); } catch (Exception e) { logger.error((Supplier) () -> "failed to delete role [" + request.name() + "]", e); listener.onFailure(e); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java index 74129facae70a..467cc1c8a9027 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -16,17 +17,20 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingResponse; +import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; public class TransportDeleteRoleMappingAction extends HandledTransportAction { private final NativeRoleMappingStore roleMappingStore; + private final ClusterStateRoleMapper clusterStateRoleMapper; @Inject public TransportDeleteRoleMappingAction( ActionFilters actionFilters, TransportService transportService, - NativeRoleMappingStore roleMappingStore + NativeRoleMappingStore roleMappingStore, + ClusterStateRoleMapper clusterStateRoleMapper ) { super( DeleteRoleMappingAction.NAME, @@ -36,10 +40,22 @@ public TransportDeleteRoleMappingAction( EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.roleMappingStore = roleMappingStore; + this.clusterStateRoleMapper = clusterStateRoleMapper; } @Override protected void doExecute(Task task, DeleteRoleMappingRequest request, ActionListener listener) { + if (clusterStateRoleMapper.hasMapping(request.getName())) { + // Since it's allowed to add a mapping with the same name in the native role mapping store as the file_settings namespace, + // a warning header is added to signal to the caller that this could be a problem. + HeaderWarning.addWarning( + "A read only role mapping with the same name [" + + request.getName() + + "] has been previously been defined in a configuration file. The role mapping [" + + request.getName() + + "] defined in the configuration file is read only, will not be deleted, and will remain active." + ); + } roleMappingStore.deleteRoleMapping(request, listener.safeMap(DeleteRoleMappingResponse::new)); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java index ac0d3177cca09..db0ee01af70e4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java @@ -17,21 +17,30 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; +import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import java.util.Arrays; +import java.util.Comparator; import java.util.HashSet; +import java.util.List; import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper.RESERVED_ROLE_MAPPING_SUFFIX; public class TransportGetRoleMappingsAction extends HandledTransportAction { private final NativeRoleMappingStore roleMappingStore; + private final ClusterStateRoleMapper clusterStateRoleMapper; @Inject public TransportGetRoleMappingsAction( ActionFilters actionFilters, TransportService transportService, - NativeRoleMappingStore nativeRoleMappingStore + NativeRoleMappingStore nativeRoleMappingStore, + ClusterStateRoleMapper clusterStateRoleMapper ) { super( GetRoleMappingsAction.NAME, @@ -41,6 +50,7 @@ public TransportGetRoleMappingsAction( EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.roleMappingStore = nativeRoleMappingStore; + this.clusterStateRoleMapper = clusterStateRoleMapper; } @Override @@ -51,9 +61,32 @@ protected void doExecute(Task task, final GetRoleMappingsRequest request, final } else { names = new HashSet<>(Arrays.asList(request.getNames())); } - this.roleMappingStore.getRoleMappings(names, ActionListener.wrap(mappings -> { - ExpressionRoleMapping[] array = mappings.toArray(new ExpressionRoleMapping[mappings.size()]); - listener.onResponse(new GetRoleMappingsResponse(array)); + roleMappingStore.getRoleMappings(names, ActionListener.wrap(mappings -> { + List combinedRoleMappings = Stream.concat( + mappings.stream(), + clusterStateRoleMapper.getMappings(names == null ? null : names.stream().map(name -> { + // If a read-only role is fetched by name including suffix, remove suffix + return name.endsWith(RESERVED_ROLE_MAPPING_SUFFIX) + ? name.substring(0, name.length() - RESERVED_ROLE_MAPPING_SUFFIX.length()) + : name; + }).collect(Collectors.toSet())) + .stream() + .map(this::cloneAndMarkAsReadOnly) + .sorted(Comparator.comparing(ExpressionRoleMapping::getName)) + ).toList(); + listener.onResponse(new GetRoleMappingsResponse(combinedRoleMappings)); }, listener::onFailure)); } + + private ExpressionRoleMapping cloneAndMarkAsReadOnly(ExpressionRoleMapping mapping) { + // Mark role mappings from cluster state as "read only" by adding a suffix to their name + return new ExpressionRoleMapping( + mapping.getName() + RESERVED_ROLE_MAPPING_SUFFIX, + mapping.getExpression(), + mapping.getRoles(), + mapping.getRoleTemplates(), + mapping.getMetadata(), + mapping.isEnabled() + ); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java index 82a3b4f000064..76f520bed517e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -16,27 +17,52 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; +import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import static org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper.RESERVED_ROLE_MAPPING_SUFFIX; + public class TransportPutRoleMappingAction extends HandledTransportAction { private final NativeRoleMappingStore roleMappingStore; + private final ClusterStateRoleMapper clusterStateRoleMapper; @Inject public TransportPutRoleMappingAction( ActionFilters actionFilters, TransportService transportService, - NativeRoleMappingStore roleMappingStore + NativeRoleMappingStore roleMappingStore, + ClusterStateRoleMapper clusterStateRoleMapper ) { super(PutRoleMappingAction.NAME, transportService, actionFilters, PutRoleMappingRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.roleMappingStore = roleMappingStore; + this.clusterStateRoleMapper = clusterStateRoleMapper; } @Override protected void doExecute(Task task, final PutRoleMappingRequest request, final ActionListener listener) { + validateMappingName(request.getName()); + if (clusterStateRoleMapper.hasMapping(request.getName())) { + // Allow to define a mapping with the same name in the native role mapping store as the file_settings namespace, but add a + // warning header to signal to the caller that this could be a problem. + HeaderWarning.addWarning( + "A read only role mapping with the same name [" + + request.getName() + + "] has been previously been defined in a configuration file. " + + "Both role mappings will be used to determine role assignments." + ); + } roleMappingStore.putRoleMapping( request, ActionListener.wrap(created -> listener.onResponse(new PutRoleMappingResponse(created)), listener::onFailure) ); } + + private static void validateMappingName(String mappingName) { + if (mappingName.endsWith(RESERVED_ROLE_MAPPING_SUFFIX)) { + throw new IllegalArgumentException( + "Invalid mapping name [" + mappingName + "]. [" + RESERVED_ROLE_MAPPING_SUFFIX + "] is not an allowed suffix" + ); + } + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java index b345178e205c3..8b3f8ec09675a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java @@ -228,7 +228,8 @@ public static byte[] readFileContents(final String jwkSetConfigKeyPkc, final Str throws SettingsException { try { final Path path = JwtUtil.resolvePath(environment, jwkSetPathPkc); - return Files.readAllBytes(path); + byte[] bytes = AccessController.doPrivileged((PrivilegedExceptionAction) () -> Files.readAllBytes(path)); + return bytes; } catch (Exception e) { throw new SettingsException( "Failed to read contents for setting [" + jwkSetConfigKeyPkc + "] value [" + jwkSetPathPkc + "].", diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java index 6601d27d5a431..d5ef90f7f1664 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java @@ -29,6 +29,8 @@ import java.nio.file.Files; import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedAction; import java.util.Collections; import java.util.List; import java.util.Map; @@ -101,19 +103,26 @@ public KerberosRealm(final RealmConfig config, final UserRoleMapper userRoleMapp this.threadPool = threadPool; this.keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); - if (Files.exists(keytabPath) == false) { + validateKeytab(this.keytabPath); + + this.enableKerberosDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); + this.removeRealmName = config.getSetting(KerberosRealmSettings.SETTING_REMOVE_REALM_NAME); + this.delegatedRealms = null; + } + + private static void validateKeytab(Path keytabPath) { + boolean fileExists = AccessController.doPrivileged((PrivilegedAction) () -> Files.exists(keytabPath)); + if (fileExists == false) { throw new IllegalArgumentException("configured service key tab file [" + keytabPath + "] does not exist"); } - if (Files.isDirectory(keytabPath)) { + boolean pathIsDir = AccessController.doPrivileged((PrivilegedAction) () -> Files.isDirectory(keytabPath)); + if (pathIsDir) { throw new IllegalArgumentException("configured service key tab file [" + keytabPath + "] is a directory"); } - if (Files.isReadable(keytabPath) == false) { + boolean isReadable = AccessController.doPrivileged((PrivilegedAction) () -> Files.isReadable(keytabPath)); + if (isReadable == false) { throw new IllegalArgumentException("configured service key tab file [" + keytabPath + "] must have read permission"); } - - this.enableKerberosDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); - this.removeRealmName = config.getSetting(KerberosRealmSettings.SETTING_REMOVE_REALM_NAME); - this.delegatedRealms = null; } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java index c2e0caf7234cb..aa1946f445670 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java @@ -93,6 +93,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings; import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.security.PrivilegedFileWatcher; import org.elasticsearch.xpack.security.authc.jwt.JwtUtil; import java.io.IOException; @@ -366,8 +367,14 @@ private void validateAccessToken(AccessToken accessToken, JWT idToken) { private JWKSet readJwkSetFromFile(String jwkSetPath) throws IOException, ParseException { final Path path = realmConfig.env().configFile().resolve(jwkSetPath); // avoid using JWKSet.loadFile() as it does not close FileInputStream internally - String jwkSet = Files.readString(path, StandardCharsets.UTF_8); - return JWKSet.parse(jwkSet); + try { + String jwkSet = AccessController.doPrivileged( + (PrivilegedExceptionAction) () -> Files.readString(path, StandardCharsets.UTF_8) + ); + return JWKSet.parse(jwkSet); + } catch (PrivilegedActionException ex) { + throw (IOException) ex.getException(); + } } /** @@ -808,7 +815,7 @@ IDTokenValidator createIdTokenValidator(boolean addFileWatcherIfRequired) { private void setMetadataFileWatcher(String jwkSetPath) throws IOException { final Path path = realmConfig.env().configFile().resolve(jwkSetPath); - FileWatcher watcher = new FileWatcher(path); + FileWatcher watcher = new PrivilegedFileWatcher(path); watcher.addListener(new FileListener(LOGGER, () -> this.idTokenValidator.set(createIdTokenValidator(false)))); watcherService.add(watcher, ResourceWatcherService.Frequency.MEDIUM); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java index 704875efa18f6..9adfd15e23207 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java @@ -49,6 +49,7 @@ import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.ssl.CertParsingUtils; import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.security.PrivilegedFileWatcher; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.TokenService; import org.elasticsearch.xpack.security.authc.support.DelegatedAuthorizationSupport; @@ -774,7 +775,11 @@ private static final class SamlFilesystemMetadataResolver extends FilesystemMeta @Override protected byte[] fetchMetadata() throws ResolverException { assert assertNotTransportThread("fetching SAML metadata from a file"); - return super.fetchMetadata(); + try { + return AccessController.doPrivileged((PrivilegedExceptionAction) () -> super.fetchMetadata()); + } catch (PrivilegedActionException e) { + throw (ResolverException) e.getException(); + } } } @@ -806,7 +811,7 @@ private static Tuple(resolver, () -> resolveEntityDescriptor(resolver, entityId, path.toString(), true)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java index 9a6e9e75c4685..baea5970b4637 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java @@ -14,13 +14,16 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; import org.elasticsearch.script.ScriptService; import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.core.security.authz.RoleMappingMetadata; +import java.util.Arrays; import java.util.Objects; import java.util.Set; +import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.security.SecurityExtension.SecurityComponents; @@ -28,8 +31,7 @@ * A role mapper the reads the role mapping rules (i.e. {@link ExpressionRoleMapping}s) from the cluster state * (i.e. {@link RoleMappingMetadata}). This is not enabled by default. */ -public final class ClusterStateRoleMapper extends AbstractRoleMapperClearRealmCache implements ClusterStateListener { - +public class ClusterStateRoleMapper extends AbstractRoleMapperClearRealmCache implements ClusterStateListener { /** * This setting is never registered by the xpack security plugin - in order to enable the * cluster-state based role mapper another plugin must register it as a boolean setting @@ -45,6 +47,7 @@ public final class ClusterStateRoleMapper extends AbstractRoleMapperClearRealmCa * */ public static final String CLUSTER_STATE_ROLE_MAPPINGS_ENABLED = "xpack.security.authc.cluster_state_role_mappings.enabled"; + public static final String RESERVED_ROLE_MAPPING_SUFFIX = "-read-only-operator-config"; private static final Logger logger = LogManager.getLogger(ClusterStateRoleMapper.class); private final ScriptService scriptService; @@ -54,8 +57,8 @@ public final class ClusterStateRoleMapper extends AbstractRoleMapperClearRealmCa public ClusterStateRoleMapper(Settings settings, ScriptService scriptService, ClusterService clusterService) { this.scriptService = scriptService; this.clusterService = clusterService; - // this role mapper is disabled by default and only code in other plugins can enable it - this.enabled = settings.getAsBoolean(CLUSTER_STATE_ROLE_MAPPINGS_ENABLED, false); + // this role mapper is enabled by default and only code in other plugins can disable it + this.enabled = settings.getAsBoolean(CLUSTER_STATE_ROLE_MAPPINGS_ENABLED, true); if (this.enabled) { clusterService.addListener(this); } @@ -81,12 +84,30 @@ public void clusterChanged(ClusterChangedEvent event) { } } + public boolean hasMapping(String name) { + return getMappings().stream().map(ExpressionRoleMapping::getName).anyMatch(name::equals); + } + + public Set getMappings(@Nullable Set names) { + if (enabled == false) { + return Set.of(); + } + final Set mappings = getMappings(); + if (names == null || names.isEmpty()) { + return mappings; + } + return mappings.stream().filter(it -> names.contains(it.getName())).collect(Collectors.toSet()); + } + private Set getMappings() { if (enabled == false) { return Set.of(); } else { final Set mappings = RoleMappingMetadata.getFromClusterState(clusterService.state()).getRoleMappings(); - logger.trace("Retrieved [{}] mapping(s) from cluster state", mappings.size()); + logger.trace( + "Retrieved mapping(s) {} from cluster state", + Arrays.toString(mappings.stream().map(ExpressionRoleMapping::getName).toArray(String[]::new)) + ); return mappings; } } diff --git a/x-pack/plugin/security/src/main/plugin-metadata/plugin-security.policy b/x-pack/plugin/security/src/main/plugin-metadata/plugin-security.policy index b3d5e80e09dcd..d814dfbb1c117 100644 --- a/x-pack/plugin/security/src/main/plugin-metadata/plugin-security.policy +++ b/x-pack/plugin/security/src/main/plugin-metadata/plugin-security.policy @@ -6,6 +6,10 @@ grant { permission org.elasticsearch.SecuredConfigFileAccessPermission "x-pack/users"; // other security files specified by settings permission org.elasticsearch.SecuredConfigFileSettingAccessPermission "xpack.security.authc.realms.ldap.*.files.role_mapping"; + permission org.elasticsearch.SecuredConfigFileSettingAccessPermission "xpack.security.authc.realms.pki.*.files.role_mapping"; + permission org.elasticsearch.SecuredConfigFileSettingAccessPermission "xpack.security.authc.realms.jwt.*.pkc_jwkset_path"; + permission org.elasticsearch.SecuredConfigFileSettingAccessPermission "xpack.security.authc.realms.saml.*.idp.metadata.path"; + permission org.elasticsearch.SecuredConfigFileSettingAccessPermission "xpack.security.authc.realms.kerberos.*.keytab.path"; // needed for SAML permission java.util.PropertyPermission "org.apache.xml.security.ignoreLineBreaks", "read,write"; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java index 6d7817db8ec05..ce5aaacdb92b9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java @@ -403,7 +403,7 @@ public static class UnregisteredSecuritySettingsPlugin extends Plugin { ); public static final Setting CLUSTER_STATE_ROLE_MAPPINGS_ENABLED = Setting.boolSetting( "xpack.security.authc.cluster_state_role_mappings.enabled", - false, + true, Setting.Property.NodeScope ); public static final Setting NATIVE_ROLES_ENABLED = Setting.boolSetting( diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java index 84e4dc402c767..d647088017dc1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.core.security.action.role.DeleteRoleResponse; import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; +import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authz.ReservedRoleNameChecker; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; import org.junit.BeforeClass; @@ -66,7 +67,8 @@ public void testReservedRole() { mock(ActionFilters.class), rolesStore, transportService, - new ReservedRoleNameChecker.Default() + new ReservedRoleNameChecker.Default(), + mock(ClusterStateRoleMapper.class) ); DeleteRoleRequest request = new DeleteRoleRequest(); @@ -115,7 +117,8 @@ private void testValidRole(String roleName) { mock(ActionFilters.class), rolesStore, transportService, - new ReservedRoleNameChecker.Default() + new ReservedRoleNameChecker.Default(), + mock(ClusterStateRoleMapper.class) ); DeleteRoleRequest request = new DeleteRoleRequest(); @@ -168,7 +171,8 @@ public void testException() { mock(ActionFilters.class), rolesStore, transportService, - new ReservedRoleNameChecker.Default() + new ReservedRoleNameChecker.Default(), + mock(ClusterStateRoleMapper.class) ); DeleteRoleRequest request = new DeleteRoleRequest(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java index 6e8698f095d32..799e0c334172c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; +import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.hamcrest.Matchers; import org.junit.Before; @@ -34,13 +35,16 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anySet; import static org.mockito.ArgumentMatchers.nullable; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class TransportGetRoleMappingsActionTests extends ESTestCase { private NativeRoleMappingStore store; + private ClusterStateRoleMapper clusterStateRoleMapper; private TransportGetRoleMappingsAction action; private AtomicReference> namesRef; private List result; @@ -49,6 +53,8 @@ public class TransportGetRoleMappingsActionTests extends ESTestCase { @Before public void setupMocks() { store = mock(NativeRoleMappingStore.class); + clusterStateRoleMapper = mock(ClusterStateRoleMapper.class); + when(clusterStateRoleMapper.getMappings(anySet())).thenReturn(Set.of()); TransportService transportService = new TransportService( Settings.EMPTY, mock(Transport.class), @@ -58,7 +64,7 @@ public void setupMocks() { null, Collections.emptySet() ); - action = new TransportGetRoleMappingsAction(mock(ActionFilters.class), transportService, store); + action = new TransportGetRoleMappingsAction(mock(ActionFilters.class), transportService, store, clusterStateRoleMapper); namesRef = new AtomicReference<>(null); result = Collections.emptyList(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java index 6f789a10a3a6c..0bb3e7dd4ac3e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java @@ -19,26 +19,32 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression; +import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.junit.Before; import java.util.Arrays; import java.util.Collections; import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicReference; +import static org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper.RESERVED_ROLE_MAPPING_SUFFIX; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.iterableWithSize; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anySet; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class TransportPutRoleMappingActionTests extends ESTestCase { private NativeRoleMappingStore store; + private ClusterStateRoleMapper clusterStateRoleMapper; private TransportPutRoleMappingAction action; private AtomicReference requestRef; @@ -46,6 +52,9 @@ public class TransportPutRoleMappingActionTests extends ESTestCase { @Before public void setupMocks() { store = mock(NativeRoleMappingStore.class); + clusterStateRoleMapper = mock(ClusterStateRoleMapper.class); + when(clusterStateRoleMapper.getMappings(anySet())).thenReturn(Set.of()); + when(clusterStateRoleMapper.hasMapping(any())).thenReturn(false); TransportService transportService = new TransportService( Settings.EMPTY, mock(Transport.class), @@ -55,7 +64,7 @@ public void setupMocks() { null, Collections.emptySet() ); - action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, store); + action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, store, clusterStateRoleMapper); requestRef = new AtomicReference<>(null); @@ -85,6 +94,25 @@ public void testPutValidMapping() throws Exception { assertThat(mapping.getMetadata().get("dumb"), equalTo(true)); } + public void testPutMappingWithInvalidName() { + final FieldExpression expression = new FieldExpression("username", Collections.singletonList(new FieldExpression.FieldValue("*"))); + IllegalArgumentException illegalArgumentException = expectThrows( + IllegalArgumentException.class, + () -> put("anarchy" + RESERVED_ROLE_MAPPING_SUFFIX, expression, "superuser", Collections.singletonMap("dumb", true)) + ); + + assertThat( + illegalArgumentException.getMessage(), + equalTo( + "Invalid mapping name [anarchy" + + RESERVED_ROLE_MAPPING_SUFFIX + + "]. [" + + RESERVED_ROLE_MAPPING_SUFFIX + + "] is not an allowed suffix" + ) + ); + } + private PutRoleMappingResponse put(String name, FieldExpression expression, String role, Map metadata) throws Exception { final PutRoleMappingRequest request = new PutRoleMappingRequest(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapperTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapperTests.java index 7a9dd65f84c67..515b5ef741a00 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapperTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapperTests.java @@ -56,12 +56,12 @@ public void setup() { () -> 1L ); clusterService = mock(ClusterService.class); - enabledSettings = Settings.builder().put("xpack.security.authc.cluster_state_role_mappings.enabled", true).build(); + disabledSettings = Settings.builder().put("xpack.security.authc.cluster_state_role_mappings.enabled", false).build(); if (randomBoolean()) { - disabledSettings = Settings.builder().put("xpack.security.authc.cluster_state_role_mappings.enabled", false).build(); + enabledSettings = Settings.builder().put("xpack.security.authc.cluster_state_role_mappings.enabled", true).build(); } else { - // the cluster state role mapper is disabled by default - disabledSettings = Settings.EMPTY; + // the cluster state role mapper is enabled by default + enabledSettings = Settings.EMPTY; } } @@ -95,6 +95,9 @@ public void testRoleResolving() throws Exception { verify(mapping1).isEnabled(); verify(mapping2).isEnabled(); verify(mapping3).isEnabled(); + verify(mapping1).getName(); + verify(mapping2).getName(); + verify(mapping3).getName(); verify(mapping2).getExpression(); verify(mapping3).getExpression(); verify(mapping3).getRoleNames(same(scriptService), same(expressionModel)); diff --git a/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 8125c8d9d52ad..fa6a908891400 100644 --- a/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -14,18 +14,15 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.upgrades.ParameterizedFullClusterRestartTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; -import org.junit.Before; import org.junit.ClassRule; import java.io.IOException; @@ -88,13 +85,6 @@ protected Settings restClientSettings() { .build(); } - @Before - public void checkClusterVersion() { - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // always true - var originalClusterSupportsShutdown = oldClusterHasFeature(RestTestLegacyFeatures.SHUTDOWN_SUPPORTED); - assumeTrue("no shutdown in versions before 7.15", originalClusterSupportsShutdown); - } - @SuppressWarnings("unchecked") public void testNodeShutdown() throws Exception { if (isRunningAgainstOldCluster()) { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml index 9607b64385721..939f153b8b0ea 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml @@ -3,7 +3,7 @@ setup: - requires: cluster_features: ["gte_v8.11.0"] reason: "ESQL is available in 8.11+" - test_runner_features: allowed_warnings_regex + test_runner_features: [allowed_warnings_regex, capabilities] - do: indices.create: @@ -385,8 +385,31 @@ setup: - length: { values: 2 } - match: { values.0: [ [ "foo", "bar" ] ] } - match: { values.1: [ "baz" ] } +--- +"reverse text": + - requires: + capabilities: + - method: POST + path: /_query + parameters: [method, path, parameters, capabilities] + capabilities: [fn_reverse] + reason: "reverse not yet added" + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test | SORT name | EVAL job_reversed = REVERSE(job), tag_reversed = REVERSE(tag) | KEEP job_reversed, tag_reversed' + + - match: { columns.0.name: "job_reversed" } + - match: { columns.0.type: "text" } + - match: { columns.1.name: "tag_reversed" } + - match: { columns.1.type: "text" } + - length: { values: 2 } + - match: { values.0: [ "rotceriD TI", "rab oof" ] } + - match: { values.1: [ "tsilaicepS lloryaP", "zab" ] } --- "stats text with raw": - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml index 6aec721b35418..11be68cc764e2 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml @@ -44,15 +44,18 @@ - do: inference.get: inference_id: "*" - - length: { endpoints: 0} + - length: { endpoints: 1} + - match: { endpoints.0.inference_id: ".elser-2" } - do: inference.get: inference_id: _all - - length: { endpoints: 0} + - length: { endpoints: 1} + - match: { endpoints.0.inference_id: ".elser-2" } - do: inference.get: inference_id: "" - - length: { endpoints: 0} + - length: { endpoints: 1} + - match: { endpoints.0.inference_id: ".elser-2" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz_api_keys/40_document_level_security_synthetic_source.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz_api_keys/40_document_level_security_synthetic_source.yml index 769b9d848ba35..52abe0a3d83d7 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz_api_keys/40_document_level_security_synthetic_source.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz_api_keys/40_document_level_security_synthetic_source.yml @@ -186,7 +186,7 @@ Filter on object with stored source: type: keyword obj: type: object - store_array_source: true + synthetic_source_keep: arrays properties: secret: type: keyword diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 303f799e0d9cd..c57e5653d1279 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -361,29 +361,10 @@ public void testApiKeySuperuser() throws IOException { ) ) ); - if (clusterHasFeature(RestTestLegacyFeatures.SECURITY_ROLE_DESCRIPTORS_OPTIONAL)) { - createApiKeyRequest.setJsonEntity(""" - { - "name": "super_legacy_key" - }"""); - } else { - createApiKeyRequest.setJsonEntity(""" - { - "name": "super_legacy_key", - "role_descriptors": { - "super": { - "cluster": [ "all" ], - "indices": [ - { - "names": [ "*" ], - "privileges": [ "all" ], - "allow_restricted_indices": true - } - ] - } - } - }"""); - } + createApiKeyRequest.setJsonEntity(""" + { + "name": "super_legacy_key" + }"""); final Map createApiKeyResponse = entityAsMap(client().performRequest(createApiKeyRequest)); final byte[] keyBytes = (createApiKeyResponse.get("id") + ":" + createApiKeyResponse.get("api_key")).getBytes( StandardCharsets.UTF_8 @@ -393,20 +374,6 @@ public void testApiKeySuperuser() throws IOException { final Request saveApiKeyRequest = new Request("PUT", "/api_keys/_doc/super_legacy_key"); saveApiKeyRequest.setJsonEntity("{\"auth_header\":\"" + apiKeyAuthHeader + "\"}"); assertOK(client().performRequest(saveApiKeyRequest)); - - if (clusterHasFeature(RestTestLegacyFeatures.SYSTEM_INDICES_REST_ACCESS_ENFORCED) == false) { - final Request indexRequest = new Request("POST", ".security/_doc"); - indexRequest.setJsonEntity(""" - { - "doc_type": "foo" - }"""); - if (clusterHasFeature(RestTestLegacyFeatures.SYSTEM_INDICES_REST_ACCESS_DEPRECATED)) { - indexRequest.setOptions(systemIndexWarningHandlerOptions(".security-7").addHeader("Authorization", apiKeyAuthHeader)); - } else { - indexRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", apiKeyAuthHeader)); - } - assertOK(client().performRequest(indexRequest)); - } } else { final Request getRequest = new Request("GET", "/api_keys/_doc/super_legacy_key"); final Map getResponseMap = responseAsMap(client().performRequest(getRequest)); @@ -472,15 +439,7 @@ public void testRollupAfterRestart() throws Exception { // create the rollup job final Request createRollupJobRequest = new Request("PUT", "/_rollup/job/rollup-job-test"); - - String intervalType; - if (clusterHasFeature(RestTestLegacyFeatures.SEARCH_AGGREGATIONS_FORCE_INTERVAL_SELECTION_DATE_HISTOGRAM)) { - intervalType = "fixed_interval"; - } else { - intervalType = "interval"; - } - - createRollupJobRequest.setJsonEntity(Strings.format(""" + createRollupJobRequest.setJsonEntity(""" { "index_pattern": "rollup-*", "rollup_index": "results-rollup", @@ -489,7 +448,7 @@ public void testRollupAfterRestart() throws Exception { "groups": { "date_histogram": { "field": "timestamp", - "%s": "5m" + "fixed_interval": "5m" } }, "metrics": [ @@ -498,7 +457,7 @@ public void testRollupAfterRestart() throws Exception { "metrics": [ "min", "max", "sum" ] } ] - }""", intervalType)); + }"""); Map createRollupJobResponse = entityAsMap(client().performRequest(createRollupJobRequest)); assertThat(createRollupJobResponse.get("acknowledged"), equalTo(Boolean.TRUE)); @@ -550,11 +509,7 @@ public void testTransformLegacyTemplateCleanup() throws Exception { assertThat(createIndexResponse.get("acknowledged"), equalTo(Boolean.TRUE)); // create a transform - String endpoint = clusterHasFeature(RestTestLegacyFeatures.TRANSFORM_NEW_API_ENDPOINT) - ? "_transform/transform-full-cluster-restart-test" - : "_data_frame/transforms/transform-full-cluster-restart-test"; - final Request createTransformRequest = new Request("PUT", endpoint); - + final Request createTransformRequest = new Request("PUT", "_transform/transform-full-cluster-restart-test"); createTransformRequest.setJsonEntity(""" { "source": { diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java index 3674f811ebb0a..c825de31a7f6e 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java @@ -28,8 +28,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; public class MlConfigIndexMappingsFullClusterRestartIT extends AbstractXpackFullClusterRestartTestCase { @@ -62,13 +60,8 @@ public void testMlConfigIndexMappingsAfterMigration() throws Exception { if (isRunningAgainstOldCluster()) { // trigger .ml-config index creation createAnomalyDetectorJob(OLD_CLUSTER_JOB_ID); - if (clusterHasFeature(RestTestLegacyFeatures.ML_ANALYTICS_MAPPINGS)) { - // .ml-config has mappings for analytics as the feature was introduced in 7.3.0 - assertThat(getDataFrameAnalysisMappings().keySet(), hasItem("outlier_detection")); - } else { - // .ml-config does not yet have correct mappings, it will need an update after cluster is upgraded - assertThat(getDataFrameAnalysisMappings(), is(nullValue())); - } + // .ml-config has mappings for analytics as the feature was introduced in 7.3.0 + assertThat(getDataFrameAnalysisMappings().keySet(), hasItem("outlier_detection")); } else { // trigger .ml-config index mappings update createAnomalyDetectorJob(NEW_CLUSTER_JOB_ID); diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java index 16345a19fc950..7dc0a2f48bbc9 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java @@ -38,7 +38,6 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; public class MlHiddenIndicesFullClusterRestartIT extends AbstractXpackFullClusterRestartTestCase { @@ -78,36 +77,6 @@ public void testMlIndicesBecomeHidden() throws Exception { // trigger ML indices creation createAnomalyDetectorJob(JOB_ID); openAnomalyDetectorJob(JOB_ID); - - if (clusterHasFeature(RestTestLegacyFeatures.ML_INDICES_HIDDEN) == false) { - Map indexSettingsMap = contentAsMap(getMlIndicesSettings()); - Map aliasesMap = contentAsMap(getMlAliases()); - - assertThat("Index settings map was: " + indexSettingsMap, indexSettingsMap, is(aMapWithSize(greaterThanOrEqualTo(4)))); - for (Map.Entry e : indexSettingsMap.entrySet()) { - String indexName = e.getKey(); - @SuppressWarnings("unchecked") - Map settings = (Map) e.getValue(); - assertThat(settings, is(notNullValue())); - assertThat( - "Index " + indexName + " expected not to be hidden but was, settings = " + settings, - XContentMapValues.extractValue(settings, "settings", "index", "hidden"), - is(nullValue()) - ); - } - - for (Tuple, String> indexAndAlias : EXPECTED_INDEX_ALIAS_PAIRS) { - List indices = indexAndAlias.v1(); - String alias = indexAndAlias.v2(); - for (String index : indices) { - assertThat( - indexAndAlias + " expected not be hidden but was, aliasesMap = " + aliasesMap, - XContentMapValues.extractValue(aliasesMap, index, "aliases", alias, "is_hidden"), - is(nullValue()) - ); - } - } - } } else { // The 5 operations in MlInitializationService.makeMlInternalIndicesHidden() run sequentially, so might // not all be finished when this test runs. The desired state should exist within a few seconds of startup, diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/WatcherMappingUpdateIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/WatcherMappingUpdateIT.java index 767f27d4e4f93..fee6910fcf6c0 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/WatcherMappingUpdateIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/WatcherMappingUpdateIT.java @@ -76,12 +76,7 @@ public void testMappingsAreUpdated() throws Exception { """); client().performRequest(putWatchRequest); - if (clusterHasFeature(RestTestLegacyFeatures.WATCHES_VERSION_IN_META)) { - assertMappingVersion(".watches", getOldClusterVersion()); - } else { - // watches indices from before 7.10 do not have mapping versions in _meta - assertNoMappingVersion(".watches"); - } + assertMappingVersion(".watches", getOldClusterVersion()); } else { assertMappingVersion(".watches", Build.current().version()); } @@ -101,9 +96,7 @@ private void assertNoMappingVersion(String index) throws Exception { assertBusy(() -> { Request mappingRequest = new Request("GET", index + "/_mappings"); assert isRunningAgainstOldCluster(); - if (clusterHasFeature(RestTestLegacyFeatures.SYSTEM_INDICES_REST_ACCESS_DEPRECATED)) { - mappingRequest.setOptions(getWarningHandlerOptions(index)); - } + mappingRequest.setOptions(getWarningHandlerOptions(index)); Response response = client().performRequest(mappingRequest); String responseBody = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); assertThat(responseBody, not(containsString("\"version\":\""))); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java index af67ab5751e96..4324aed5fee18 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java @@ -7,7 +7,6 @@ package org.elasticsearch.upgrades; import org.elasticsearch.Build; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.io.Streams; @@ -15,7 +14,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Booleans; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.test.SecuritySettingsSourceField; import org.junit.Before; @@ -49,16 +47,6 @@ protected static boolean isOriginalClusterCurrent() { return UPGRADE_FROM_VERSION.equals(Build.current().version()); } - @Deprecated(forRemoval = true) - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - // Tests should be reworked to rely on features from the current cluster (old, mixed or upgraded). - // Version test against the original cluster will be removed - protected static boolean isOriginalClusterVersionAtLeast(Version supportedVersion) { - // Always assume non-semantic versions are OK: this method will be removed in V9, we are testing the pre-upgrade cluster version, - // and non-semantic versions are always V8+ - return parseLegacyVersion(UPGRADE_FROM_VERSION).map(x -> x.onOrAfter(supportedVersion)).orElse(true); - } - @Override protected boolean resetFeatureStates() { return false; diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java index 4de2c610e5c48..8c051d03d5f04 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java @@ -8,13 +8,11 @@ package org.elasticsearch.upgrades; import org.apache.http.util.EntityUtils; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Strings; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.XContentType; import org.junit.After; import org.junit.Before; @@ -101,11 +99,6 @@ public void removeLogging() throws IOException { } public void testTrainedModelDeployment() throws Exception { - @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // upgrade will always be from v8, condition can be removed - var originalClusterAtLeastV8 = isOriginalClusterVersionAtLeast(Version.V_8_0_0); - // These tests assume the original cluster is v8 - testing for features on the _current_ cluster will break for NEW - assumeTrue("NLP model deployments added in 8.0", originalClusterAtLeastV8); - final String modelId = "upgrade-deployment-test"; switch (CLUSTER_TYPE) { @@ -140,11 +133,6 @@ public void testTrainedModelDeployment() throws Exception { } public void testTrainedModelDeploymentStopOnMixedCluster() throws Exception { - @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // upgrade will always be from v8, condition can be removed - var originalClusterAtLeastV8 = isOriginalClusterVersionAtLeast(Version.V_8_0_0); - // These tests assume the original cluster is v8 - testing for features on the _current_ cluster will break for NEW - assumeTrue("NLP model deployments added in 8.0", originalClusterAtLeastV8); - final String modelId = "upgrade-deployment-test-stop-mixed-cluster"; switch (CLUSTER_TYPE) { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java index 7cefaa2edb388..74165eeb07b8a 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java @@ -7,14 +7,12 @@ package org.elasticsearch.upgrades; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Strings; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.rest.RestTestLegacyFeatures; @@ -71,10 +69,6 @@ public class MlAssignmentPlannerUpgradeIT extends AbstractUpgradeTestCase { @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101926") public void testMlAssignmentPlannerUpgrade() throws Exception { - @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // upgrade will always be from v8, condition can be removed - var originalClusterAtLeastV8 = isOriginalClusterVersionAtLeast(Version.V_8_0_0); - // These tests assume the original cluster is v8 - testing for features on the _current_ cluster will break for NEW - assumeTrue("NLP model deployments added in 8.0", originalClusterAtLeastV8); assumeFalse("This test deploys multiple models which cannot be accommodated on a single processor", IS_SINGLE_PROCESSOR_TEST); logger.info("Starting testMlAssignmentPlannerUpgrade, model size {}", RAW_MODEL_SIZE);