diff --git a/docs/changelog/114951.yaml b/docs/changelog/114951.yaml new file mode 100644 index 0000000000000..4d40a063e2b02 --- /dev/null +++ b/docs/changelog/114951.yaml @@ -0,0 +1,5 @@ +pr: 114951 +summary: Expose cluster-state role mappings in APIs +area: Authentication +type: bug +issues: [] diff --git a/docs/changelog/115041.yaml b/docs/changelog/115041.yaml new file mode 100644 index 0000000000000..f4c047c1569ec --- /dev/null +++ b/docs/changelog/115041.yaml @@ -0,0 +1,6 @@ +pr: 115041 +summary: Increase default `queue_capacity` to 10_000 and decrease max `queue_capacity` + to 100_000 +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/115102.yaml b/docs/changelog/115102.yaml new file mode 100644 index 0000000000000..f679bb6c223a6 --- /dev/null +++ b/docs/changelog/115102.yaml @@ -0,0 +1,6 @@ +pr: 115102 +summary: Watch Next Run Interval Resets On Shard Move or Node Restart +area: Watcher +type: bug +issues: + - 111433 diff --git a/docs/changelog/115308.yaml b/docs/changelog/115308.yaml new file mode 100644 index 0000000000000..163f0232a3e58 --- /dev/null +++ b/docs/changelog/115308.yaml @@ -0,0 +1,6 @@ +pr: 115308 +summary: "ESQL: Disable pushdown of WHERE past STATS" +area: ES|QL +type: bug +issues: + - 115281 diff --git a/docs/changelog/115317.yaml b/docs/changelog/115317.yaml new file mode 100644 index 0000000000000..153f7a52f0674 --- /dev/null +++ b/docs/changelog/115317.yaml @@ -0,0 +1,5 @@ +pr: 115317 +summary: Revert "Add `ResolvedExpression` wrapper" +area: Indices APIs +type: bug +issues: [] diff --git a/docs/changelog/115359.yaml b/docs/changelog/115359.yaml new file mode 100644 index 0000000000000..65b3086dfc8d0 --- /dev/null +++ b/docs/changelog/115359.yaml @@ -0,0 +1,6 @@ +pr: 115359 +summary: Adding support for simulate ingest mapping adddition for indices with mappings + that do not come from templates +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/reference/esql/functions/kibana/definition/to_date_nanos.json b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json index bafbcf2bc2038..07ffe84444f02 100644 --- a/docs/reference/esql/functions/kibana/definition/to_date_nanos.json +++ b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json @@ -5,5 +5,6 @@ "description" : "Converts an input to a nanosecond-resolution date value (aka date_nanos).", "note" : "The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch.", "signatures" : [ ], - "preview" : true + "preview" : true, + "snapshot_only" : false } diff --git a/docs/reference/watcher/how-watcher-works.asciidoc b/docs/reference/watcher/how-watcher-works.asciidoc index ed6e49b72e9ce..e34d4f799d99b 100644 --- a/docs/reference/watcher/how-watcher-works.asciidoc +++ b/docs/reference/watcher/how-watcher-works.asciidoc @@ -146,15 +146,18 @@ add, the more distributed the watches can be executed. If you add or remove replicas, all watches need to be reloaded. If a shard is relocated, the primary and all replicas of this particular shard will reload. -Because the watches are executed on the node, where the watch shards are, you can create -dedicated watcher nodes by using shard allocation filtering. +Because the watches are executed on the node, where the watch shards are, you +can create dedicated watcher nodes by using shard allocation filtering. To do this +, configure nodes with a dedicated `node.attr.role: watcher` property. -You could configure nodes with a dedicated `node.attr.role: watcher` property and -then configure the `.watches` index like this: +As the `.watches` index is a system index, you can't use the normal `.watcher/_settings` +endpoint to modify its routing allocation. Instead, you can use the following dedicated +endpoint to adjust the allocation of the `.watches` shards to the nodes with the +`watcher` role attribute: [source,console] ------------------------ -PUT .watches/_settings +PUT _watcher/settings { "index.routing.allocation.include.role": "watcher" } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index eacf2e5a2ee57..e4150005ed1ae 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -43,13 +43,14 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteTransportException; +import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; -import java.util.stream.Collectors; import static org.elasticsearch.ingest.geoip.GeoIpDownloader.DATABASES_INDEX; import static org.elasticsearch.ingest.geoip.GeoIpDownloader.GEOIP_DOWNLOADER; @@ -238,14 +239,11 @@ public void clusterChanged(ClusterChangedEvent event) { } static boolean hasAtLeastOneGeoipProcessor(ClusterState clusterState) { - if (pipelineConfigurationsWithGeoIpProcessor(clusterState, true).isEmpty() == false) { + if (pipelinesWithGeoIpProcessor(clusterState, true).isEmpty() == false) { return true; } - Set checkReferencedPipelines = pipelineConfigurationsWithGeoIpProcessor(clusterState, false).stream() - .map(PipelineConfiguration::getId) - .collect(Collectors.toSet()); - + final Set checkReferencedPipelines = pipelinesWithGeoIpProcessor(clusterState, false); if (checkReferencedPipelines.isEmpty()) { return false; } @@ -258,22 +256,24 @@ static boolean hasAtLeastOneGeoipProcessor(ClusterState clusterState) { } /** - * Retrieve list of pipelines that have at least one geoip processor. + * Retrieve the set of pipeline ids that have at least one geoip processor. * @param clusterState Cluster state. * @param downloadDatabaseOnPipelineCreation Filter the list to include only pipeline with the download_database_on_pipeline_creation * matching the param. - * @return A list of {@link PipelineConfiguration} matching criteria. + * @return A set of pipeline ids matching criteria. */ @SuppressWarnings("unchecked") - private static List pipelineConfigurationsWithGeoIpProcessor( - ClusterState clusterState, - boolean downloadDatabaseOnPipelineCreation - ) { - List pipelineDefinitions = IngestService.getPipelines(clusterState); - return pipelineDefinitions.stream().filter(pipelineConfig -> { - List> processors = (List>) pipelineConfig.getConfigAsMap().get(Pipeline.PROCESSORS_KEY); - return hasAtLeastOneGeoipProcessor(processors, downloadDatabaseOnPipelineCreation); - }).toList(); + private static Set pipelinesWithGeoIpProcessor(ClusterState clusterState, boolean downloadDatabaseOnPipelineCreation) { + List configurations = IngestService.getPipelines(clusterState); + Set ids = new HashSet<>(); + // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph + for (PipelineConfiguration configuration : configurations) { + List> processors = (List>) configuration.getConfigAsMap().get(Pipeline.PROCESSORS_KEY); + if (hasAtLeastOneGeoipProcessor(processors, downloadDatabaseOnPipelineCreation)) { + ids.add(configuration.getId()); + } + } + return Collections.unmodifiableSet(ids); } /** @@ -283,7 +283,15 @@ private static List pipelineConfigurationsWithGeoIpProces * @return true if a geoip processor is found in the processor list. */ private static boolean hasAtLeastOneGeoipProcessor(List> processors, boolean downloadDatabaseOnPipelineCreation) { - return processors != null && processors.stream().anyMatch(p -> hasAtLeastOneGeoipProcessor(p, downloadDatabaseOnPipelineCreation)); + if (processors != null) { + // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph + for (Map processor : processors) { + if (hasAtLeastOneGeoipProcessor(processor, downloadDatabaseOnPipelineCreation)) { + return true; + } + } + } + return false; } /** @@ -317,7 +325,7 @@ private static boolean hasAtLeastOneGeoipProcessor(Map processor } /** - * Check if a processor config is has an on_failure clause containing at least a geoip processor. + * Check if a processor config has an on_failure clause containing at least a geoip processor. * @param processor Processor config. * @param downloadDatabaseOnPipelineCreation Should the download_database_on_pipeline_creation of the geoip processor be true or false. * @return true if a geoip processor is found in the processor list. @@ -327,16 +335,17 @@ private static boolean isProcessorWithOnFailureGeoIpProcessor( Map processor, boolean downloadDatabaseOnPipelineCreation ) { - return processor != null - && processor.values() - .stream() - .anyMatch( - value -> value instanceof Map - && hasAtLeastOneGeoipProcessor( - ((Map>>) value).get("on_failure"), - downloadDatabaseOnPipelineCreation - ) - ); + // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph + for (Object value : processor.values()) { + if (value instanceof Map + && hasAtLeastOneGeoipProcessor( + ((Map>>) value).get("on_failure"), + downloadDatabaseOnPipelineCreation + )) { + return true; + } + } + return false; } /** diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java index cd252fcff2376..5904169308fab 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java @@ -364,8 +364,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { SourceValueFetcher fetcher = SourceValueFetcher.toString(blContext.sourcePaths(name())); // MatchOnlyText never has norms, so we have to use the field names field BlockSourceReader.LeafIteratorLookup lookup = BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()); - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return new BlockSourceReader.BytesRefsBlockLoader(fetcher, lookup, sourceMode); + return new BlockSourceReader.BytesRefsBlockLoader(fetcher, lookup); } @Override diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java index 1f647cb977cf5..b845545133e19 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java @@ -319,8 +319,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { BlockSourceReader.LeafIteratorLookup lookup = isStored() || isIndexed() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) : BlockSourceReader.lookupMatchingAll(); - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return new BlockSourceReader.DoublesBlockLoader(valueFetcher, lookup, sourceMode); + return new BlockSourceReader.DoublesBlockLoader(valueFetcher, lookup); } @Override diff --git a/muted-tests.yml b/muted-tests.yml index 1cb8baa96a942..f59ca0c213279 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -23,12 +23,6 @@ tests: - class: org.elasticsearch.index.store.FsDirectoryFactoryTests method: testPreload issue: https://github.com/elastic/elasticsearch/issues/110211 -- class: org.elasticsearch.backwards.SearchWithMinCompatibleSearchNodeIT - method: testMinVersionAsNewVersion - issue: https://github.com/elastic/elasticsearch/issues/95384 -- class: org.elasticsearch.backwards.SearchWithMinCompatibleSearchNodeIT - method: testCcsMinimizeRoundtripsIsFalse - issue: https://github.com/elastic/elasticsearch/issues/101974 - class: "org.elasticsearch.xpack.searchablesnapshots.FrozenSearchableSnapshotsIntegTests" issue: "https://github.com/elastic/elasticsearch/issues/110408" method: "testCreateAndRestorePartialSearchableSnapshot" @@ -99,9 +93,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/112424 - class: org.elasticsearch.ingest.geoip.IngestGeoIpClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/111497 -- class: org.elasticsearch.smoketest.SmokeTestIngestWithAllDepsClientYamlTestSuiteIT - method: test {yaml=ingest/80_ingest_simulate/Test ingest simulate with reroute and mapping validation from templates} - issue: https://github.com/elastic/elasticsearch/issues/112575 - class: org.elasticsearch.xpack.security.authc.kerberos.SimpleKdcLdapServerTests method: testClientServiceMutualAuthentication issue: https://github.com/elastic/elasticsearch/issues/112529 @@ -146,18 +137,12 @@ tests: - class: org.elasticsearch.action.admin.cluster.node.stats.NodeStatsTests method: testChunking issue: https://github.com/elastic/elasticsearch/issues/113139 -- class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests - method: testResponse - issue: https://github.com/elastic/elasticsearch/issues/113148 - class: org.elasticsearch.packaging.test.WindowsServiceTests method: test30StartStop issue: https://github.com/elastic/elasticsearch/issues/113160 - class: org.elasticsearch.packaging.test.WindowsServiceTests method: test33JavaChanged issue: https://github.com/elastic/elasticsearch/issues/113177 -- class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests - method: testErrorMidStream - issue: https://github.com/elastic/elasticsearch/issues/113179 - class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT method: test {categorize.Categorize SYNC} issue: https://github.com/elastic/elasticsearch/issues/113054 @@ -170,9 +155,6 @@ tests: - class: org.elasticsearch.packaging.test.WindowsServiceTests method: test80JavaOptsInEnvVar issue: https://github.com/elastic/elasticsearch/issues/113219 -- class: org.elasticsearch.xpack.esql.expression.function.aggregate.AvgTests - method: "testFold {TestCase= #2}" - issue: https://github.com/elastic/elasticsearch/issues/113225 - class: org.elasticsearch.packaging.test.WindowsServiceTests method: test81JavaOptsInJvmOptions issue: https://github.com/elastic/elasticsearch/issues/113313 @@ -212,8 +194,6 @@ tests: - class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT method: test {categorize.Categorize SYNC} issue: https://github.com/elastic/elasticsearch/issues/113722 -- class: org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDateNanosTests - issue: https://github.com/elastic/elasticsearch/issues/113661 - class: org.elasticsearch.ingest.geoip.DatabaseNodeServiceIT method: testNonGzippedDatabase issue: https://github.com/elastic/elasticsearch/issues/113821 @@ -244,9 +224,6 @@ tests: - class: org.elasticsearch.xpack.inference.InferenceCrudIT method: testGet issue: https://github.com/elastic/elasticsearch/issues/114135 -- class: org.elasticsearch.xpack.esql.expression.function.aggregate.AvgTests - method: "testFold {TestCase= #7}" - issue: https://github.com/elastic/elasticsearch/issues/114175 - class: org.elasticsearch.xpack.ilm.ExplainLifecycleIT method: testStepInfoPreservedOnAutoRetry issue: https://github.com/elastic/elasticsearch/issues/114220 @@ -265,9 +242,6 @@ tests: - class: org.elasticsearch.xpack.inference.InferenceRestIT method: test {p0=inference/40_semantic_text_query/Query a field that uses the default ELSER 2 endpoint} issue: https://github.com/elastic/elasticsearch/issues/114376 -- class: org.elasticsearch.search.retriever.RankDocsRetrieverBuilderTests - method: testRewrite - issue: https://github.com/elastic/elasticsearch/issues/114467 - class: org.elasticsearch.packaging.test.DockerTests method: test022InstallPluginsFromLocalArchive issue: https://github.com/elastic/elasticsearch/issues/111063 @@ -276,15 +250,6 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultElserIT method: testInferCreatesDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114503 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=synonyms/60_synonym_rule_get/Synonym set not found} - issue: https://github.com/elastic/elasticsearch/issues/114432 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=synonyms/60_synonym_rule_get/Get a synonym rule} - issue: https://github.com/elastic/elasticsearch/issues/114443 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=synonyms/60_synonym_rule_get/Synonym rule not found} - issue: https://github.com/elastic/elasticsearch/issues/114444 - class: org.elasticsearch.xpack.inference.integration.ModelRegistryIT method: testGetModel issue: https://github.com/elastic/elasticsearch/issues/114657 @@ -308,24 +273,30 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114913 -- class: org.elasticsearch.upgrades.MultiVersionRepositoryAccessIT - method: testCreateAndRestoreSnapshot - issue: https://github.com/elastic/elasticsearch/issues/114998 -- class: org.elasticsearch.index.mapper.TextFieldMapperTests - method: testBlockLoaderFromRowStrideReaderWithSyntheticSource - issue: https://github.com/elastic/elasticsearch/issues/115066 -- class: org.elasticsearch.index.mapper.TextFieldMapperTests - method: testBlockLoaderFromColumnReaderWithSyntheticSource - issue: https://github.com/elastic/elasticsearch/issues/115073 -- class: org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapperTests - method: testBlockLoaderFromColumnReaderWithSyntheticSource - issue: https://github.com/elastic/elasticsearch/issues/115074 -- class: org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapperTests - method: testBlockLoaderFromRowStrideReaderWithSyntheticSource - issue: https://github.com/elastic/elasticsearch/issues/115076 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=esql/60_usage/Basic ESQL usage output (telemetry)} issue: https://github.com/elastic/elasticsearch/issues/115231 +- class: org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests + method: testAddWithNoLastCheckedTimeButHasActivationTimeExecutesBeforeInitialInterval + issue: https://github.com/elastic/elasticsearch/issues/115339 +- class: org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests + method: testWatchWithLastCheckedTimeExecutesBeforeInitialInterval + issue: https://github.com/elastic/elasticsearch/issues/115354 +- class: org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests + method: testAddWithLastCheckedTimeExecutesBeforeInitialInterval + issue: https://github.com/elastic/elasticsearch/issues/115356 +- class: org.elasticsearch.xpack.inference.DefaultEndPointsIT + method: testInferDeploysDefaultE5 + issue: https://github.com/elastic/elasticsearch/issues/115361 +- class: org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleEngineTests + method: testWatchWithNoLastCheckedTimeButHasActivationTimeExecutesBeforeInitialInterval + issue: https://github.com/elastic/elasticsearch/issues/115368 +- class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests + method: testProcessFileChanges + issue: https://github.com/elastic/elasticsearch/issues/115280 +- class: org.elasticsearch.smoketest.SmokeTestIngestWithAllDepsClientYamlTestSuiteIT + method: test {yaml=ingest/80_ingest_simulate/Test mapping addition works with legacy templates} + issue: https://github.com/elastic/elasticsearch/issues/115412 # Examples: # diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 73f291da15ead..92a704f793dc2 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -1203,15 +1202,8 @@ public void testClosedIndices() throws Exception { closeIndex(index); } - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_INDEXING) // This check can be removed (always assume true) - var originalClusterSupportsReplicationOfClosedIndices = oldClusterHasFeature(RestTestLegacyFeatures.REPLICATION_OF_CLOSED_INDICES); - - if (originalClusterSupportsReplicationOfClosedIndices) { - ensureGreenLongWait(index); - assertClosedIndex(index, true); - } else { - assertClosedIndex(index, false); - } + ensureGreenLongWait(index); + assertClosedIndex(index, true); if (isRunningAgainstOldCluster() == false) { openIndex(index); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java index 3275f3e0e136f..b3d4dfc68d399 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FileSettingsRoleMappingUpgradeIT.java @@ -25,9 +25,12 @@ import java.io.IOException; import java.util.List; +import java.util.Map; import java.util.function.Supplier; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -106,6 +109,10 @@ public void testRoleMappingsAppliedOnUpgrade() throws IOException { ); assertThat(roleMappings, is(not(nullValue()))); assertThat(roleMappings.size(), equalTo(1)); + assertThat(roleMappings, is(instanceOf(Map.class))); + @SuppressWarnings("unchecked") + Map roleMapping = (Map) roleMappings; + assertThat(roleMapping.keySet(), contains("everyone_kibana-read-only-operator-mapping")); } } } diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml index d4aa2f1ad4467..4d1a62c6f179e 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml @@ -1229,7 +1229,7 @@ setup: - requires: cluster_features: ["simulate.mapping.addition"] - reason: "ingest simulate mapping addition added in 8.16" + reason: "ingest simulate mapping addition added in 8.17" - do: headers: @@ -1465,7 +1465,7 @@ setup: - requires: cluster_features: ["simulate.mapping.addition"] - reason: "ingest simulate mapping addition added in 8.16" + reason: "ingest simulate mapping addition added in 8.17" - do: indices.put_template: @@ -1571,3 +1571,143 @@ setup: - match: { docs.0.doc._source.foo: 3 } - match: { docs.0.doc._source.bar: "not a boolean" } - not_exists: docs.0.doc.error + +--- +"Test mapping addition works with indices without templates": + # In this test, we make sure that when we have an index that has mapping but was not built with a template, that the additional_mapping + # is merged in with that template. + + - skip: + features: + - headers + - allowed_warnings + + - requires: + cluster_features: ["simulate.support.non.template.mapping"] + reason: "ingest simulate support for indices with mappings that didn't come from templates added in 8.17" + + # First, make sure that validation fails before we create the index (since we are only defining to bar field but trying to index a value + # for foo. + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: foo-1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "foo": 3, + "bar": "some text value" + } + } + ], + "mapping_addition": { + "dynamic": "strict", + "properties": { + "bar": { + "type": "keyword" + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "foo-1" } + - match: { docs.0.doc._source.foo: 3 } + - match: { docs.0.doc._source.bar: "some text value" } + - match: { docs.0.doc.error.type: "strict_dynamic_mapping_exception" } + + - do: + indices.create: + index: foo-1 + body: + mappings: + dynamic: strict + properties: + foo: + type: integer + - match: { acknowledged: true } + + # Now make sure that the mapping for the newly-created index is getting picked up. Validation fails because it only defined a mapping + # for foo, not for bar. + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: foo-1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "foo": 3, + "bar": "some text value" + } + } + ] + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "foo-1" } + - match: { docs.0.doc._source.foo: 3 } + - match: { docs.0.doc._source.bar: "some text value" } + - match: { docs.0.doc.error.type: "strict_dynamic_mapping_exception" } + + # Now we make sure that the index's mapping gets merged with the mapping_addition: + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: foo-1 + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "foo": 3, + "bar": "some text value" + } + } + ], + "mapping_addition": { + "dynamic": "strict", + "properties": { + "bar": { + "type": "keyword" + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "foo-1" } + - match: { docs.0.doc._source.foo: 3 } + - match: { docs.0.doc._source.bar: "some text value" } + - not_exists: docs.0.doc.error + + # This last call to simulate is just making sure that if there are no templates, no index mappings, no substitutions, and no mapping + # addition, then validation does not fail + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: nonexistent + body: > + { + "docs": [ + { + "_id": "asdf", + "_source": { + "foo": 3, + "bar": "some text value" + } + } + ] + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "nonexistent" } + - match: { docs.0.doc._source.foo: 3 } + - match: { docs.0.doc._source.bar: "some text value" } + - not_exists: docs.0.doc.error diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/10_synonyms_put.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/10_synonyms_put.yml index bcd58f3f7bd64..675b98133ce11 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/10_synonyms_put.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/10_synonyms_put.yml @@ -15,6 +15,10 @@ setup: - match: { result: "created" } + - do: + cluster.health: + wait_for_no_initializing_shards: true + - do: synonyms.get_synonym: id: test-update-synonyms @@ -58,6 +62,10 @@ setup: - match: { result: "created" } + - do: + cluster.health: + wait_for_no_initializing_shards: true + - do: synonyms.get_synonym: id: test-empty-synonyms diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/110_synonyms_invalid.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/110_synonyms_invalid.yml index d3d0a3bb4df70..4e77e10495109 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/110_synonyms_invalid.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/110_synonyms_invalid.yml @@ -11,6 +11,11 @@ setup: synonyms_set: synonyms: "foo => bar, baz" + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + wait_for_no_initializing_shards: true + - do: indices.create: index: test_index diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/20_synonyms_get.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/20_synonyms_get.yml index 3494f33466ce4..5e6d4ec2341ad 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/20_synonyms_get.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/20_synonyms_get.yml @@ -14,6 +14,10 @@ setup: - synonyms: "test => check" id: "test-id-3" + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + wait_for_no_initializing_shards: true --- "Get synonyms set": @@ -31,7 +35,6 @@ setup: id: "test-id-2" - synonyms: "test => check" id: "test-id-3" - --- "Get synonyms set - not found": - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml index 351ff4e186d8a..23c907f6a1137 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml @@ -12,6 +12,10 @@ setup: - synonyms: "bye => goodbye" id: "test-id-2" + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + wait_for_no_initializing_shards: true --- "Delete synonyms set": - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml index 723c41e163eb8..7c145dafd81cd 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml @@ -9,6 +9,12 @@ setup: synonyms_set: - synonyms: "hello, hi" - synonyms: "goodbye, bye" + + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + wait_for_no_initializing_shards: true + - do: synonyms.put_synonym: id: test-synonyms-1 @@ -23,21 +29,8 @@ setup: body: synonyms_set: - synonyms: "pc, computer" - # set logging to debug for issue: https://github.com/elastic/elasticsearch/issues/102261 - - do: - cluster.put_settings: - body: - persistent: - logger.org.elasticsearch.synonyms: DEBUG --- -teardown: - - do: - cluster.put_settings: - body: - persistent: - logger.org.elasticsearch.synonyms: null ---- "List synonyms set": - do: synonyms.get_synonyms_sets: { } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/50_synonym_rule_put.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/50_synonym_rule_put.yml index f3711bb0774ca..d8611000fe465 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/50_synonym_rule_put.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/50_synonym_rule_put.yml @@ -14,7 +14,10 @@ setup: - synonyms: "test => check" id: "test-id-3" - + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + wait_for_no_initializing_shards: true --- "Update a synonyms rule": - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml index 2a7c8aff89d8e..0c962b51e08cb 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml @@ -13,11 +13,12 @@ setup: id: "test-id-2" - synonyms: "test => check" id: "test-id-3" + + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - index: .synonyms - timeout: 1m - wait_for_status: green + wait_for_no_initializing_shards: true + --- "Get a synonym rule": diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/70_synonym_rule_delete.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/70_synonym_rule_delete.yml index a4853b0b6d414..41ab293158a35 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/70_synonym_rule_delete.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/70_synonym_rule_delete.yml @@ -14,6 +14,11 @@ setup: - synonyms: "test => check" id: "test-id-3" + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + wait_for_no_initializing_shards: true + --- "Delete synonym rule": - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml index 89ad933370e1c..3aba0f0b4b78b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml @@ -2,7 +2,6 @@ setup: - requires: cluster_features: ["gte_v8.10.0"] reason: Loading synonyms from index is introduced in 8.10.0 - # Create a new synonyms set - do: synonyms.put_synonym: @@ -14,6 +13,11 @@ setup: - synonyms: "bye => goodbye" id: "synonym-rule-2" + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + wait_for_no_initializing_shards: true + # Create an index with synonym_filter that uses that synonyms set - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml index dc94b36222402..1ceb5b43b8129 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml @@ -3,7 +3,6 @@ - requires: cluster_features: ["gte_v8.10.0"] reason: Reloading analyzers for specific synonym set is introduced in 8.10.0 - # Create synonyms_set1 - do: synonyms.put_synonym: @@ -26,6 +25,11 @@ - synonyms: "bye => goodbye" id: "synonym-rule-2" + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + wait_for_no_initializing_shards: true + # Create my_index1 with synonym_filter that uses synonyms_set1 - do: indices.create: diff --git a/server/src/internalClusterTest/java/org/elasticsearch/env/NodeEnvironmentIT.java b/server/src/internalClusterTest/java/org/elasticsearch/env/NodeEnvironmentIT.java index f813932ebe924..ecd5c5af8649f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/env/NodeEnvironmentIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/env/NodeEnvironmentIT.java @@ -123,7 +123,7 @@ public Settings onNodeStopped(String nodeName) { public void testFailsToStartIfDowngraded() { final IllegalStateException illegalStateException = expectThrowsOnRestart( - dataPaths -> PersistedClusterStateService.overrideVersion(NodeMetadataTests.tooNewVersion(), dataPaths) + dataPaths -> PersistedClusterStateService.overrideVersion(NodeMetadataTests.tooNewBuildVersion(), dataPaths) ); assertThat( illegalStateException.getMessage(), @@ -133,7 +133,7 @@ public void testFailsToStartIfDowngraded() { public void testFailsToStartIfUpgradedTooFar() { final IllegalStateException illegalStateException = expectThrowsOnRestart( - dataPaths -> PersistedClusterStateService.overrideVersion(NodeMetadataTests.tooOldVersion(), dataPaths) + dataPaths -> PersistedClusterStateService.overrideVersion(NodeMetadataTests.tooOldBuildVersion(), dataPaths) ); assertThat( illegalStateException.getMessage(), diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java index b855f2cee7613..9ffef1f178f44 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; @@ -85,7 +84,7 @@ protected void masterOperation( String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request); Map> routingMap = indexNameExpressionResolver.resolveSearchRouting(state, request.routing(), request.indices()); Map indicesAndFilters = new HashMap<>(); - Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); + Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); for (String index : concreteIndices) { final AliasFilter aliasFilter = indicesService.buildAliasFilter(clusterState, index, indicesAndAliases); final String[] aliases = indexNameExpressionResolver.indexAliases( diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java index f5c100b7884bb..5c5c71bc002b3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -566,8 +565,8 @@ static void resolveIndices( if (names.length == 1 && (Metadata.ALL.equals(names[0]) || Regex.isMatchAllPattern(names[0]))) { names = new String[] { "**" }; } - Set resolvedIndexAbstractions = resolver.resolveExpressions(clusterState, indicesOptions, true, names); - for (ResolvedExpression s : resolvedIndexAbstractions) { + Set resolvedIndexAbstractions = resolver.resolveExpressions(clusterState, indicesOptions, true, names); + for (String s : resolvedIndexAbstractions) { enrichIndexAbstraction(clusterState, s, indices, aliases, dataStreams); } indices.sort(Comparator.comparing(ResolvedIndexAbstraction::getName)); @@ -598,12 +597,12 @@ private static void mergeResults( private static void enrichIndexAbstraction( ClusterState clusterState, - ResolvedExpression indexAbstraction, + String indexAbstraction, List indices, List aliases, List dataStreams ) { - IndexAbstraction ia = clusterState.metadata().getIndicesLookup().get(indexAbstraction.resource()); + IndexAbstraction ia = clusterState.metadata().getIndicesLookup().get(indexAbstraction); if (ia != null) { switch (ia.getType()) { case CONCRETE_INDEX -> { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java index 5e3799cd14518..94d9b87467ea8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java @@ -48,6 +48,7 @@ import java.time.Instant; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -270,6 +271,7 @@ public static Template resolveTemplate( // First apply settings sourced from index settings providers final var now = Instant.now(); Settings.Builder additionalSettings = Settings.builder(); + Set overrulingSettings = new HashSet<>(); for (var provider : indexSettingProviders) { Settings result = provider.getAdditionalIndexSettings( indexName, @@ -283,8 +285,21 @@ public static Template resolveTemplate( MetadataCreateIndexService.validateAdditionalSettings(provider, result, additionalSettings); dummySettings.put(result); additionalSettings.put(result); + if (provider.overrulesTemplateAndRequestSettings()) { + overrulingSettings.addAll(result.keySet()); + } } - // Then apply settings resolved from templates: + + if (overrulingSettings.isEmpty() == false) { + // Filter any conflicting settings from overruling providers, to avoid overwriting their values from templates. + final Settings.Builder filtered = Settings.builder().put(templateSettings); + for (String setting : overrulingSettings) { + filtered.remove(setting); + } + templateSettings = filtered.build(); + } + + // Apply settings resolved from templates. dummySettings.put(templateSettings); final IndexMetadata indexMetadata = IndexMetadata.builder(indexName) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index e01f364712676..4e9830fe0d14e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -21,7 +21,6 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; @@ -134,7 +133,7 @@ protected void doExecute(Task task, ValidateQueryRequest request, ActionListener @Override protected ShardValidateQueryRequest newShardRequest(int numShards, ShardRouting shard, ValidateQueryRequest request) { final ClusterState clusterState = clusterService.state(); - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); final AliasFilter aliasFilter = searchService.buildAliasFilter(clusterState, shard.getIndexName(), indicesAndAliases); return new ShardValidateQueryRequest(shard.shardId(), aliasFilter, request); } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java index 22cf8a2260d87..62a9b88cb6a57 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java @@ -19,6 +19,7 @@ import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_ADDITION; import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_VALIDATION; import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_VALIDATION_TEMPLATES; +import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_SUPPORT_NON_TEMPLATE_MAPPING; public class BulkFeatures implements FeatureSpecification { public Set getFeatures() { @@ -27,7 +28,8 @@ public Set getFeatures() { SIMULATE_MAPPING_VALIDATION_TEMPLATES, SIMULATE_COMPONENT_TEMPLATE_SUBSTITUTIONS, SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS, - SIMULATE_MAPPING_ADDITION + SIMULATE_MAPPING_ADDITION, + SIMULATE_SUPPORT_NON_TEMPLATE_MAPPING ); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java index 0888b70f5399c..1353fa78595ef 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java @@ -84,6 +84,7 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction { ); public static final NodeFeature SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS = new NodeFeature("simulate.index.template.substitutions"); public static final NodeFeature SIMULATE_MAPPING_ADDITION = new NodeFeature("simulate.mapping.addition"); + public static final NodeFeature SIMULATE_SUPPORT_NON_TEMPLATE_MAPPING = new NodeFeature("simulate.support.non.template.mapping"); private final IndicesService indicesService; private final NamedXContentRegistry xContentRegistry; private final Set indexSettingProviders; @@ -258,6 +259,10 @@ private Exception validateMappings( String matchingTemplate = findV2Template(simulatedState.metadata(), request.index(), false); if (matchingTemplate != null) { + /* + * The index matches a v2 template (including possibly one or more of the substitutions passed in). So we use this + * template, and then possibly apply the mapping addition if it is not null, and validate. + */ final Template template = TransportSimulateIndexTemplateAction.resolveTemplate( matchingTemplate, request.index(), @@ -273,13 +278,36 @@ private Exception validateMappings( validateUpdatedMappings(mappings, mergedMappings, request, sourceToParse); } else { List matchingTemplates = findV1Templates(simulatedState.metadata(), request.index(), false); - final Map mappingsMap = MetadataCreateIndexService.parseV1Mappings( - "{}", - matchingTemplates.stream().map(IndexTemplateMetadata::getMappings).collect(toList()), - xContentRegistry - ); - final CompressedXContent combinedMappings = mergeMappings(new CompressedXContent(mappingsMap), mappingAddition); - validateUpdatedMappings(null, combinedMappings, request, sourceToParse); + if (matchingTemplates.isEmpty() == false) { + /* + * The index matches v1 mappings. These are not compatible with component_template_substitutions or + * index_template_substitutions, but we can apply a mapping_addition. + */ + final Map mappingsMap = MetadataCreateIndexService.parseV1Mappings( + "{}", + matchingTemplates.stream().map(IndexTemplateMetadata::getMappings).collect(toList()), + xContentRegistry + ); + final CompressedXContent combinedMappings = mergeMappings(new CompressedXContent(mappingsMap), mappingAddition); + validateUpdatedMappings(null, combinedMappings, request, sourceToParse); + } else if (indexAbstraction != null && mappingAddition.isEmpty() == false) { + /* + * The index matched no templates of any kind, including the substitutions. But it might have a mapping. So we + * merge in the mapping addition if it exists, and validate. + */ + MappingMetadata mappingFromIndex = clusterService.state().metadata().index(indexAbstraction.getName()).mapping(); + CompressedXContent currentIndexCompressedXContent = mappingFromIndex == null ? null : mappingFromIndex.source(); + CompressedXContent combinedMappings = mergeMappings(currentIndexCompressedXContent, mappingAddition); + validateUpdatedMappings(null, combinedMappings, request, sourceToParse); + } else { + /* + * The index matched no templates and had no mapping of its own. If there were component template substitutions + * or index template substitutions, they didn't match anything. So just apply the mapping addition if it exists, + * and validate. + */ + final CompressedXContent combinedMappings = mergeMappings(null, mappingAddition); + validateUpdatedMappings(null, combinedMappings, request, sourceToParse); + } } } } catch (Exception e) { diff --git a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java index 84c6df7b8a66f..9c82d032014f2 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; @@ -110,7 +109,7 @@ protected boolean resolveIndex(ExplainRequest request) { @Override protected void resolveRequest(ClusterState state, InternalRequest request) { - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(state, request.request().index()); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(state, request.request().index()); final AliasFilter aliasFilter = searchService.buildAliasFilter(state, request.concreteIndex(), indicesAndAliases); request.request().filteringAlias(aliasFilter); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index b5864f64a7824..1645a378446a4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -37,7 +37,6 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.GroupShardsIterator; @@ -111,7 +110,6 @@ import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.LongSupplier; -import java.util.stream.Collectors; import static org.elasticsearch.action.search.SearchType.DFS_QUERY_THEN_FETCH; import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH; @@ -205,7 +203,7 @@ public TransportSearchAction( private Map buildPerIndexOriginalIndices( ClusterState clusterState, - Set indicesAndAliases, + Set indicesAndAliases, String[] indices, IndicesOptions indicesOptions ) { @@ -213,9 +211,6 @@ private Map buildPerIndexOriginalIndices( var blocks = clusterState.blocks(); // optimization: mostly we do not have any blocks so there's no point in the expensive per-index checking boolean hasBlocks = blocks.global().isEmpty() == false || blocks.indices().isEmpty() == false; - // Get a distinct set of index abstraction names present from the resolved expressions to help with the reverse resolution from - // concrete index to the expression that produced it. - Set indicesAndAliasesResources = indicesAndAliases.stream().map(ResolvedExpression::resource).collect(Collectors.toSet()); for (String index : indices) { if (hasBlocks) { blocks.indexBlockedRaiseException(ClusterBlockLevel.READ, index); @@ -232,8 +227,8 @@ private Map buildPerIndexOriginalIndices( String[] finalIndices = Strings.EMPTY_ARRAY; if (aliases == null || aliases.length == 0 - || indicesAndAliasesResources.contains(index) - || hasDataStreamRef(clusterState, indicesAndAliasesResources, index)) { + || indicesAndAliases.contains(index) + || hasDataStreamRef(clusterState, indicesAndAliases, index)) { finalIndices = new String[] { index }; } if (aliases != null) { @@ -252,11 +247,7 @@ private static boolean hasDataStreamRef(ClusterState clusterState, Set i return indicesAndAliases.contains(ret.getParentDataStream().getName()); } - Map buildIndexAliasFilters( - ClusterState clusterState, - Set indicesAndAliases, - Index[] concreteIndices - ) { + Map buildIndexAliasFilters(ClusterState clusterState, Set indicesAndAliases, Index[] concreteIndices) { final Map aliasFilterMap = new HashMap<>(); for (Index index : concreteIndices) { clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index.getName()); @@ -1246,10 +1237,7 @@ private void executeSearch( } else { final Index[] indices = resolvedIndices.getConcreteLocalIndices(); concreteLocalIndices = Arrays.stream(indices).map(Index::getName).toArray(String[]::new); - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions( - clusterState, - searchRequest.indices() - ); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices()); aliasFilter = buildIndexAliasFilters(clusterState, indicesAndAliases, indices); aliasFilter.putAll(remoteAliasMap); localShardIterators = getLocalShardsIterator( @@ -1822,7 +1810,7 @@ List getLocalShardsIterator( ClusterState clusterState, SearchRequest searchRequest, String clusterAlias, - Set indicesAndAliases, + Set indicesAndAliases, String[] concreteIndices ) { var routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), searchRequest.indices()); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java index b94bd95c93d8a..f418b5617b2a1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.index.Index; @@ -128,10 +127,7 @@ public void searchShards(Task task, SearchShardsRequest searchShardsRequest, Act searchService.getRewriteContext(timeProvider::absoluteStartMillis, resolvedIndices, null), listener.delegateFailureAndWrap((delegate, searchRequest) -> { Index[] concreteIndices = resolvedIndices.getConcreteLocalIndices(); - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions( - clusterState, - searchRequest.indices() - ); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices()); final Map aliasFilters = transportSearchAction.buildIndexAliasFilters( clusterState, indicesAndAliases, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java index 6d1a874e1c72b..ae7cff6312155 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java @@ -28,6 +28,8 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; @@ -189,9 +191,14 @@ public List getRequiredComponentTemplates() { if (ignoreMissingComponentTemplates == null) { return componentTemplates; } - return componentTemplates.stream() - .filter(componentTemplate -> ignoreMissingComponentTemplates.contains(componentTemplate) == false) - .toList(); + // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph + List required = new ArrayList<>(componentTemplates.size()); + for (String template : componentTemplates) { + if (ignoreMissingComponentTemplates.contains(template) == false) { + required.add(template); + } + } + return Collections.unmodifiableList(required); } @Nullable diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index eaf54034b22e0..2229166a2d779 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -74,15 +74,6 @@ public IndexNameExpressionResolver(ThreadContext threadContext, SystemIndices sy this.systemIndices = Objects.requireNonNull(systemIndices, "System Indices must not be null"); } - /** - * This contains the resolved expression in the form of the resource. - * Soon it will facilitate the index component selector. - * @param resource the resolved resolvedExpression - */ - public record ResolvedExpression(String resource) { - - } - /** * Same as {@link #concreteIndexNames(ClusterState, IndicesOptions, String...)}, but the index expressions and options * are encapsulated in the specified request. @@ -200,9 +191,8 @@ public List dataStreamNames(ClusterState state, IndicesOptions options, getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressions(context, indexExpressions); return expressions.stream() - .map(ResolvedExpression::resource) .map(x -> state.metadata().getIndicesLookup().get(x)) .filter(Objects::nonNull) .filter(ia -> ia.getType() == Type.DATA_STREAM) @@ -231,11 +221,10 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, request.index()); + final Collection expressions = resolveExpressions(context, request.index()); if (expressions.size() == 1) { - ResolvedExpression resolvedExpression = expressions.iterator().next(); - IndexAbstraction ia = state.metadata().getIndicesLookup().get(resolvedExpression.resource()); + IndexAbstraction ia = state.metadata().getIndicesLookup().get(expressions.iterator().next()); if (ia.getType() == Type.ALIAS) { Index writeIndex = ia.getWriteIndex(); if (writeIndex == null) { @@ -257,14 +246,14 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit } } - protected static Collection resolveExpressions(Context context, String... expressions) { + protected static Collection resolveExpressions(Context context, String... expressions) { if (context.getOptions().expandWildcardExpressions() == false) { if (expressions == null || expressions.length == 0 || expressions.length == 1 && Metadata.ALL.equals(expressions[0])) { return List.of(); } else { return ExplicitResourceNameFilter.filterUnavailable( context, - DateMathExpressionResolver.resolve(context, Arrays.stream(expressions).map(ResolvedExpression::new).toList()) + DateMathExpressionResolver.resolve(context, List.of(expressions)) ); } } else { @@ -275,10 +264,7 @@ protected static Collection resolveExpressions(Context conte } else { return WildcardExpressionResolver.resolve( context, - ExplicitResourceNameFilter.filterUnavailable( - context, - DateMathExpressionResolver.resolve(context, Arrays.stream(expressions).map(ResolvedExpression::new).toList()) - ) + ExplicitResourceNameFilter.filterUnavailable(context, DateMathExpressionResolver.resolve(context, List.of(expressions))) ); } } @@ -353,12 +339,12 @@ String[] concreteIndexNames(Context context, String... indexExpressions) { } Index[] concreteIndices(Context context, String... indexExpressions) { - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressions(context, indexExpressions); final Set concreteIndicesResult = Sets.newLinkedHashSetWithExpectedSize(expressions.size()); final Map indicesLookup = context.getState().metadata().getIndicesLookup(); - for (ResolvedExpression resolvedExpression : expressions) { - final IndexAbstraction indexAbstraction = indicesLookup.get(resolvedExpression.resource()); + for (String expression : expressions) { + final IndexAbstraction indexAbstraction = indicesLookup.get(expression); assert indexAbstraction != null; if (indexAbstraction.getType() == Type.ALIAS && context.isResolveToWriteIndex()) { Index writeIndex = indexAbstraction.getWriteIndex(); @@ -392,7 +378,7 @@ Index[] concreteIndices(Context context, String... indexExpressions) { throw new IllegalArgumentException( indexAbstraction.getType().getDisplayName() + " [" - + resolvedExpression.resource() + + expression + "] has more than one index associated with it " + Arrays.toString(indexNames) + ", can't execute a single index op" @@ -656,7 +642,7 @@ public Index concreteSingleIndex(ClusterState state, IndicesRequest request) { * Utility method that allows to resolve an index expression to its corresponding single write index. * * @param state the cluster state containing all the data to resolve to expression to a concrete index - * @param request The request that defines how an alias or an index need to be resolved to a concrete index + * @param request The request that defines how the an alias or an index need to be resolved to a concrete index * and the expression that can be resolved to an alias or an index name. * @throws IllegalArgumentException if the index resolution does not lead to an index, or leads to more than one index * @return the write index obtained as a result of the index resolution @@ -748,7 +734,7 @@ public static String resolveDateMathExpression(String dateExpression, long time) /** * Resolve an array of expressions to the set of indices and aliases that these expressions match. */ - public Set resolveExpressions(ClusterState state, String... expressions) { + public Set resolveExpressions(ClusterState state, String... expressions) { return resolveExpressions(state, IndicesOptions.lenientExpandOpen(), false, expressions); } @@ -757,7 +743,7 @@ public Set resolveExpressions(ClusterState state, String... * If {@param preserveDataStreams} is {@code true}, datastreams that are covered by the wildcards from the * {@param expressions} are returned as-is, without expanding them further to their respective backing indices. */ - public Set resolveExpressions( + public Set resolveExpressions( ClusterState state, IndicesOptions indicesOptions, boolean preserveDataStreams, @@ -774,10 +760,10 @@ public Set resolveExpressions( getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - Collection resolved = resolveExpressions(context, expressions); - if (resolved instanceof Set) { + Collection resolved = resolveExpressions(context, expressions); + if (resolved instanceof Set) { // unmodifiable without creating a new collection as it might contain many items - return Collections.unmodifiableSet((Set) resolved); + return Collections.unmodifiableSet((Set) resolved); } else { return Set.copyOf(resolved); } @@ -790,7 +776,7 @@ public Set resolveExpressions( * the index itself - null is returned. Returns {@code null} if no filtering is required. * NOTE: The provided expressions must have been resolved already via {@link #resolveExpressions}. */ - public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) { + public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) { return indexAliases(state, index, AliasMetadata::filteringRequired, DataStreamAlias::filteringRequired, false, resolvedExpressions); } @@ -816,39 +802,39 @@ public String[] indexAliases( Predicate requiredAlias, Predicate requiredDataStreamAlias, boolean skipIdentity, - Set resolvedExpressions + Set resolvedExpressions ) { - if (isAllIndicesExpression(resolvedExpressions)) { + if (isAllIndices(resolvedExpressions)) { return null; } - Set resources = resolvedExpressions.stream().map(ResolvedExpression::resource).collect(Collectors.toSet()); + final IndexMetadata indexMetadata = state.metadata().getIndices().get(index); if (indexMetadata == null) { // Shouldn't happen throw new IndexNotFoundException(index); } - if (skipIdentity == false && resources.contains(index)) { + if (skipIdentity == false && resolvedExpressions.contains(index)) { return null; } IndexAbstraction ia = state.metadata().getIndicesLookup().get(index); DataStream dataStream = ia.getParentDataStream(); if (dataStream != null) { - if (skipIdentity == false && resources.contains(dataStream.getName())) { + if (skipIdentity == false && resolvedExpressions.contains(dataStream.getName())) { // skip the filters when the request targets the data stream name return null; } Map dataStreamAliases = state.metadata().dataStreamAliases(); List aliasesForDataStream; - if (iterateIndexAliases(dataStreamAliases.size(), resources.size())) { + if (iterateIndexAliases(dataStreamAliases.size(), resolvedExpressions.size())) { aliasesForDataStream = dataStreamAliases.values() .stream() - .filter(dataStreamAlias -> resources.contains(dataStreamAlias.getName())) + .filter(dataStreamAlias -> resolvedExpressions.contains(dataStreamAlias.getName())) .filter(dataStreamAlias -> dataStreamAlias.getDataStreams().contains(dataStream.getName())) .toList(); } else { - aliasesForDataStream = resources.stream() + aliasesForDataStream = resolvedExpressions.stream() .map(dataStreamAliases::get) .filter(dataStreamAlias -> dataStreamAlias != null && dataStreamAlias.getDataStreams().contains(dataStream.getName())) .toList(); @@ -873,15 +859,18 @@ public String[] indexAliases( } else { final Map indexAliases = indexMetadata.getAliases(); final AliasMetadata[] aliasCandidates; - if (iterateIndexAliases(indexAliases.size(), resources.size())) { + if (iterateIndexAliases(indexAliases.size(), resolvedExpressions.size())) { // faster to iterate indexAliases aliasCandidates = indexAliases.values() .stream() - .filter(aliasMetadata -> resources.contains(aliasMetadata.alias())) + .filter(aliasMetadata -> resolvedExpressions.contains(aliasMetadata.alias())) .toArray(AliasMetadata[]::new); } else { // faster to iterate resolvedExpressions - aliasCandidates = resources.stream().map(indexAliases::get).filter(Objects::nonNull).toArray(AliasMetadata[]::new); + aliasCandidates = resolvedExpressions.stream() + .map(indexAliases::get) + .filter(Objects::nonNull) + .toArray(AliasMetadata[]::new); } List aliases = null; for (AliasMetadata aliasMetadata : aliasCandidates) { @@ -920,7 +909,12 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection resolvedExpressions = resolveExpressions(context, expressions); + final Collection resolvedExpressions = resolveExpressions(context, expressions); + + // TODO: it appears that this can never be true? + if (isAllIndices(resolvedExpressions)) { + return resolveSearchRoutingAllIndices(state.metadata(), routing); + } Map> routings = null; Set paramRouting = null; @@ -930,8 +924,8 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab paramRouting = Sets.newHashSet(Strings.splitStringByCommaToArray(routing)); } - for (ResolvedExpression resolvedExpression : resolvedExpressions) { - IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(resolvedExpression.resource); + for (String expression : resolvedExpressions) { + IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(expression); if (indexAbstraction != null && indexAbstraction.getType() == Type.ALIAS) { for (Index index : indexAbstraction.getIndices()) { String concreteIndex = index.getName(); @@ -969,7 +963,7 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab } } else { // Index - routings = collectRoutings(routings, paramRouting, norouting, resolvedExpression.resource()); + routings = collectRoutings(routings, paramRouting, norouting, expression); } } @@ -1015,17 +1009,6 @@ public static Map> resolveSearchRoutingAllIndices(Metadata m return null; } - /** - * Identifies whether the array containing index names given as argument refers to all indices - * The empty or null array identifies all indices - * - * @param aliasesOrIndices the array containing index names - * @return true if the provided array maps to all indices, false otherwise - */ - public static boolean isAllIndicesExpression(Collection aliasesOrIndices) { - return isAllIndices(aliasesOrIndices.stream().map(ResolvedExpression::resource).toList()); - } - /** * Identifies whether the array containing index names given as argument refers to all indices * The empty or null array identifies all indices @@ -1266,8 +1249,8 @@ private WildcardExpressionResolver() { * Returns all the indices, datastreams, and aliases, considering the open/closed, system, and hidden context parameters. * Depending on the context, returns the names of the datastreams themselves or their backing indices. */ - public static Collection resolveAll(Context context) { - List concreteIndices = resolveEmptyOrTrivialWildcard(context); + public static Collection resolveAll(Context context) { + List concreteIndices = resolveEmptyOrTrivialWildcard(context); if (context.includeDataStreams() == false && context.getOptions().ignoreAliases()) { return concreteIndices; @@ -1282,7 +1265,7 @@ public static Collection resolveAll(Context context) { .filter(ia -> shouldIncludeIfDataStream(ia, context) || shouldIncludeIfAlias(ia, context)) .filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName())); - Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet()); + Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet()); resolved.addAll(concreteIndices); return resolved; } @@ -1310,17 +1293,17 @@ private static boolean shouldIncludeIfAlias(IndexAbstraction ia, IndexNameExpres * ultimately returned, instead of the alias or datastream name * */ - public static Collection resolve(Context context, List expressions) { + public static Collection resolve(Context context, List expressions) { ExpressionList expressionList = new ExpressionList(context, expressions); // fast exit if there are no wildcards to evaluate if (expressionList.hasWildcard() == false) { return expressions; } - Set result = new HashSet<>(); + Set result = new HashSet<>(); for (ExpressionList.Expression expression : expressionList) { if (expression.isWildcard()) { Stream matchingResources = matchResourcesToWildcard(context, expression.get()); - Stream matchingOpenClosedNames = expandToOpenClosed(context, matchingResources); + Stream matchingOpenClosedNames = expandToOpenClosed(context, matchingResources); AtomicBoolean emptyWildcardExpansion = new AtomicBoolean(false); if (context.getOptions().allowNoIndices() == false) { emptyWildcardExpansion.set(true); @@ -1336,9 +1319,9 @@ public static Collection resolve(Context context, List filterIndicesLookupForSuffixWildcar * Data streams and aliases are interpreted to refer to multiple indices, * then all index resources are filtered by their open/closed status. */ - private static Stream expandToOpenClosed(Context context, Stream resources) { + private static Stream expandToOpenClosed(Context context, Stream resources) { final IndexMetadata.State excludeState = excludeState(context.getOptions()); return resources.flatMap(indexAbstraction -> { if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) { - return Stream.of(new ResolvedExpression(indexAbstraction.getName())); + return Stream.of(indexAbstraction.getName()); } else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) { - return Stream.of(new ResolvedExpression(indexAbstraction.getName())); + return Stream.of(indexAbstraction.getName()); } else { Stream indicesStateStream = Stream.of(); if (shouldIncludeRegularIndices(context.getOptions())) { @@ -1451,20 +1434,18 @@ private static Stream expandToOpenClosed(Context context, St if (excludeState != null) { indicesStateStream = indicesStateStream.filter(indexMeta -> indexMeta.getState() != excludeState); } - return indicesStateStream.map(indexMeta -> new ResolvedExpression(indexMeta.getIndex().getName())); + return indicesStateStream.map(indexMeta -> indexMeta.getIndex().getName()); } }); } - private static List resolveEmptyOrTrivialWildcard(Context context) { + private static List resolveEmptyOrTrivialWildcard(Context context) { final String[] allIndices = resolveEmptyOrTrivialWildcardToAllIndices(context.getOptions(), context.getState().metadata()); - Stream result; if (context.systemIndexAccessLevel == SystemIndexAccessLevel.ALL) { - result = Arrays.stream(allIndices); + return List.of(allIndices); } else { - result = resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(context, allIndices).stream(); + return resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(context, allIndices); } - return result.map(ResolvedExpression::new).toList(); } private static List resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(Context context, String[] allIndices) { @@ -1526,8 +1507,8 @@ private DateMathExpressionResolver() { // utility class } - public static List resolve(Context context, List expressions) { - List result = new ArrayList<>(expressions.size()); + public static List resolve(Context context, List expressions) { + List result = new ArrayList<>(expressions.size()); for (ExpressionList.Expression expression : new ExpressionList(context, expressions)) { result.add(resolveExpression(expression, context::getStartTime)); } @@ -1538,15 +1519,13 @@ static String resolveExpression(String expression) { return resolveExpression(expression, System::currentTimeMillis); } - static ResolvedExpression resolveExpression(ExpressionList.Expression expression, LongSupplier getTime) { - String result; + static String resolveExpression(ExpressionList.Expression expression, LongSupplier getTime) { if (expression.isExclusion()) { // accepts date-math exclusions that are of the form "-<...{}>", i.e. the "-" is outside the "<>" date-math template - result = "-" + resolveExpression(expression.get(), getTime); + return "-" + resolveExpression(expression.get(), getTime); } else { - result = resolveExpression(expression.get(), getTime); + return resolveExpression(expression.get(), getTime); } - return new ResolvedExpression(result); } static String resolveExpression(String expression, LongSupplier getTime) { @@ -1708,26 +1687,25 @@ private ExplicitResourceNameFilter() { * Returns an expression list with "unavailable" (missing or not acceptable) resource names filtered out. * Only explicit resource names are considered for filtering. Wildcard and exclusion expressions are kept in. */ - public static List filterUnavailable(Context context, List expressions) { + public static List filterUnavailable(Context context, List expressions) { ensureRemoteIndicesRequireIgnoreUnavailable(context.getOptions(), expressions); - List result = new ArrayList<>(expressions.size()); + List result = new ArrayList<>(expressions.size()); for (ExpressionList.Expression expression : new ExpressionList(context, expressions)) { validateAliasOrIndex(expression); - if (expression.isWildcard() || expression.isExclusion() || ensureAliasOrIndexExists(context, expression)) { - result.add(expression.resolvedExpression()); + if (expression.isWildcard() || expression.isExclusion() || ensureAliasOrIndexExists(context, expression.get())) { + result.add(expression.expression()); } } return result; } /** - * This returns `true` if the given {@param resolvedExpression} is of a resource that exists. - * Otherwise, it returns `false` if the `ignore_unavailable` option is `true`, or, if `false`, it throws a "not found" type of + * This returns `true` if the given {@param name} is of a resource that exists. + * Otherwise, it returns `false` if the `ignore_unvailable` option is `true`, or, if `false`, it throws a "not found" type of * exception. */ @Nullable - private static boolean ensureAliasOrIndexExists(Context context, ExpressionList.Expression expression) { - String name = expression.get(); + private static boolean ensureAliasOrIndexExists(Context context, String name) { boolean ignoreUnavailable = context.getOptions().ignoreUnavailable(); IndexAbstraction indexAbstraction = context.getState().getMetadata().getIndicesLookup().get(name); if (indexAbstraction == null) { @@ -1759,37 +1737,32 @@ private static boolean ensureAliasOrIndexExists(Context context, ExpressionList. } private static void validateAliasOrIndex(ExpressionList.Expression expression) { - if (Strings.isEmpty(expression.resolvedExpression().resource())) { - throw notFoundException(expression.get()); + if (Strings.isEmpty(expression.expression())) { + throw notFoundException(expression.expression()); } // Expressions can not start with an underscore. This is reserved for APIs. If the check gets here, the API // does not exist and the path is interpreted as an expression. If the expression begins with an underscore, // throw a specific error that is different from the [[IndexNotFoundException]], which is typically thrown // if the expression can't be found. - if (expression.resolvedExpression().resource().charAt(0) == '_') { - throw new InvalidIndexNameException(expression.get(), "must not start with '_'."); + if (expression.expression().charAt(0) == '_') { + throw new InvalidIndexNameException(expression.expression(), "must not start with '_'."); } } - private static void ensureRemoteIndicesRequireIgnoreUnavailable( - IndicesOptions options, - List resolvedExpressions - ) { + private static void ensureRemoteIndicesRequireIgnoreUnavailable(IndicesOptions options, List indexExpressions) { if (options.ignoreUnavailable()) { return; } - for (ResolvedExpression resolvedExpression : resolvedExpressions) { - var index = resolvedExpression.resource(); + for (String index : indexExpressions) { if (RemoteClusterAware.isRemoteIndexName(index)) { - failOnRemoteIndicesNotIgnoringUnavailable(resolvedExpressions); + failOnRemoteIndicesNotIgnoringUnavailable(indexExpressions); } } } - private static void failOnRemoteIndicesNotIgnoringUnavailable(List resolvedExpressions) { + private static void failOnRemoteIndicesNotIgnoringUnavailable(List indexExpressions) { List crossClusterIndices = new ArrayList<>(); - for (ResolvedExpression resolvedExpression : resolvedExpressions) { - String index = resolvedExpression.resource(); + for (String index : indexExpressions) { if (RemoteClusterAware.isRemoteIndexName(index)) { crossClusterIndices.add(index); } @@ -1807,13 +1780,13 @@ public static final class ExpressionList implements Iterable expressionsList; private final boolean hasWildcard; - public record Expression(ResolvedExpression resolvedExpression, boolean isWildcard, boolean isExclusion) { + public record Expression(String expression, boolean isWildcard, boolean isExclusion) { public String get() { if (isExclusion()) { // drop the leading "-" if exclusion because it is easier for callers to handle it like this - return resolvedExpression().resource().substring(1); + return expression().substring(1); } else { - return resolvedExpression().resource(); + return expression(); } } } @@ -1822,17 +1795,16 @@ public String get() { * Creates the expression iterable that can be used to easily check which expression item is a wildcard or an exclusion (or both). * The {@param context} is used to check if wildcards ought to be considered or not. */ - public ExpressionList(Context context, List resolvedExpressions) { - List expressionsList = new ArrayList<>(resolvedExpressions.size()); + public ExpressionList(Context context, List expressionStrings) { + List expressionsList = new ArrayList<>(expressionStrings.size()); boolean wildcardSeen = false; - for (ResolvedExpression resolvedExpression : resolvedExpressions) { - var expressionString = resolvedExpression.resource(); + for (String expressionString : expressionStrings) { boolean isExclusion = expressionString.startsWith("-") && wildcardSeen; if (context.getOptions().expandWildcardExpressions() && isWildcard(expressionString)) { wildcardSeen = true; - expressionsList.add(new Expression(resolvedExpression, true, isExclusion)); + expressionsList.add(new Expression(expressionString, true, isExclusion)); } else { - expressionsList.add(new Expression(resolvedExpression, false, isExclusion)); + expressionsList.add(new Expression(expressionString, false, isExclusion)); } } this.expressionsList = expressionsList; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 3accdd3881c6d..69e3b7b70ff82 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -992,6 +992,7 @@ static Settings aggregateIndexSettings( // additionalIndexSettings map final Settings.Builder additionalIndexSettings = Settings.builder(); final var resolvedAt = Instant.ofEpochMilli(request.getNameResolvedAt()); + Set overrulingSettings = new HashSet<>(); for (IndexSettingProvider provider : indexSettingProviders) { var newAdditionalSettings = provider.getAdditionalIndexSettings( request.index(), @@ -1004,36 +1005,45 @@ static Settings aggregateIndexSettings( ); validateAdditionalSettings(provider, newAdditionalSettings, additionalIndexSettings); additionalIndexSettings.put(newAdditionalSettings); + if (provider.overrulesTemplateAndRequestSettings()) { + overrulingSettings.addAll(newAdditionalSettings.keySet()); + } } - // For all the explicit settings, we go through the template and request level settings - // and see if either a template or the request has "cancelled out" an explicit default - // setting. For example, if a plugin had as an explicit setting: - // "index.mysetting": "blah - // And either a template or create index request had: - // "index.mysetting": null - // We want to remove the explicit setting not only from the explicitly set settings, but - // also from the template and request settings, so that from the newly create index's - // perspective it is as though the setting has not been set at all (using the default - // value). for (String explicitSetting : additionalIndexSettings.keys()) { - if (templateSettings.keys().contains(explicitSetting) && templateSettings.get(explicitSetting) == null) { - logger.debug( - "removing default [{}] setting as it in set to null in a template for [{}] creation", - explicitSetting, - request.index() - ); - additionalIndexSettings.remove(explicitSetting); + if (overrulingSettings.contains(explicitSetting)) { + // Remove any conflicting template and request settings to use the provided values. templateSettings.remove(explicitSetting); - } - if (requestSettings.keys().contains(explicitSetting) && requestSettings.get(explicitSetting) == null) { - logger.debug( - "removing default [{}] setting as it in set to null in the request for [{}] creation", - explicitSetting, - request.index() - ); - additionalIndexSettings.remove(explicitSetting); requestSettings.remove(explicitSetting); + } else { + // For all the explicit settings, we go through the template and request level settings + // and see if either a template or the request has "cancelled out" an explicit default + // setting. For example, if a plugin had as an explicit setting: + // "index.mysetting": "blah + // And either a template or create index request had: + // "index.mysetting": null + // We want to remove the explicit setting not only from the explicitly set settings, but + // also from the template and request settings, so that from the newly create index's + // perspective it is as though the setting has not been set at all (using the default + // value). + if (templateSettings.keys().contains(explicitSetting) && templateSettings.get(explicitSetting) == null) { + logger.debug( + "removing default [{}] setting as it is set to null in a template for [{}] creation", + explicitSetting, + request.index() + ); + additionalIndexSettings.remove(explicitSetting); + templateSettings.remove(explicitSetting); + } + if (requestSettings.keys().contains(explicitSetting) && requestSettings.get(explicitSetting) == null) { + logger.debug( + "removing default [{}] setting as it is set to null in the request for [{}] creation", + explicitSetting, + request.index() + ); + additionalIndexSettings.remove(explicitSetting); + requestSettings.remove(explicitSetting); + } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index ccdfaa5518aee..d6ed28454df96 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -1200,6 +1200,42 @@ static ClusterState innerPutTemplate( return ClusterState.builder(currentState).metadata(builder).build(); } + /** + * A private, local alternative to elements.stream().anyMatch(predicate) for micro-optimization reasons. + */ + private static boolean anyMatch(final List elements, final Predicate predicate) { + for (T e : elements) { + if (predicate.test(e)) { + return true; + } + } + return false; + } + + /** + * A private, local alternative to elements.stream().noneMatch(predicate) for micro-optimization reasons. + */ + private static boolean noneMatch(final List elements, final Predicate predicate) { + for (T e : elements) { + if (predicate.test(e)) { + return false; + } + } + return true; + } + + /** + * A private, local alternative to elements.stream().filter(predicate).findFirst() for micro-optimization reasons. + */ + private static Optional findFirst(final List elements, final Predicate predicate) { + for (T e : elements) { + if (predicate.test(e)) { + return Optional.of(e); + } + } + return Optional.empty(); + } + /** * Finds index templates whose index pattern matched with the given index name. In the case of * hidden indices, a template with a match all pattern or global template will not be returned. @@ -1219,15 +1255,14 @@ public static List findV1Templates(Metadata metadata, Str final List matchedTemplates = new ArrayList<>(); for (IndexTemplateMetadata template : metadata.templates().values()) { if (isHidden == null || isHidden == Boolean.FALSE) { - final boolean matched = template.patterns().stream().anyMatch(patternMatchPredicate); - if (matched) { + if (anyMatch(template.patterns(), patternMatchPredicate)) { matchedTemplates.add(template); } } else { assert isHidden == Boolean.TRUE; - final boolean isNotMatchAllTemplate = template.patterns().stream().noneMatch(Regex::isMatchAllPattern); + final boolean isNotMatchAllTemplate = noneMatch(template.patterns(), Regex::isMatchAllPattern); if (isNotMatchAllTemplate) { - if (template.patterns().stream().anyMatch(patternMatchPredicate)) { + if (anyMatch(template.patterns(), patternMatchPredicate)) { matchedTemplates.add(template); } } @@ -1238,19 +1273,21 @@ public static List findV1Templates(Metadata metadata, Str // this is complex but if the index is not hidden in the create request but is hidden as the result of template application, // then we need to exclude global templates if (isHidden == null) { - final Optional templateWithHiddenSetting = matchedTemplates.stream() - .filter(template -> IndexMetadata.INDEX_HIDDEN_SETTING.exists(template.settings())) - .findFirst(); + final Optional templateWithHiddenSetting = findFirst( + matchedTemplates, + template -> IndexMetadata.INDEX_HIDDEN_SETTING.exists(template.settings()) + ); if (templateWithHiddenSetting.isPresent()) { final boolean templatedIsHidden = IndexMetadata.INDEX_HIDDEN_SETTING.get(templateWithHiddenSetting.get().settings()); if (templatedIsHidden) { // remove the global templates - matchedTemplates.removeIf(current -> current.patterns().stream().anyMatch(Regex::isMatchAllPattern)); + matchedTemplates.removeIf(current -> anyMatch(current.patterns(), Regex::isMatchAllPattern)); } // validate that hidden didn't change - final Optional templateWithHiddenSettingPostRemoval = matchedTemplates.stream() - .filter(template -> IndexMetadata.INDEX_HIDDEN_SETTING.exists(template.settings())) - .findFirst(); + final Optional templateWithHiddenSettingPostRemoval = findFirst( + matchedTemplates, + template -> IndexMetadata.INDEX_HIDDEN_SETTING.exists(template.settings()) + ); if (templateWithHiddenSettingPostRemoval.isEmpty() || templateWithHiddenSetting.get() != templateWithHiddenSettingPostRemoval.get()) { throw new IllegalStateException( @@ -1313,14 +1350,13 @@ static List> findV2CandidateTemplates(Met * built with a template that none of its indices match. */ if (isHidden == false || template.getDataStreamTemplate() != null) { - final boolean matched = template.indexPatterns().stream().anyMatch(patternMatchPredicate); - if (matched) { + if (anyMatch(template.indexPatterns(), patternMatchPredicate)) { candidates.add(Tuple.tuple(name, template)); } } else { - final boolean isNotMatchAllTemplate = template.indexPatterns().stream().noneMatch(Regex::isMatchAllPattern); + final boolean isNotMatchAllTemplate = noneMatch(template.indexPatterns(), Regex::isMatchAllPattern); if (isNotMatchAllTemplate) { - if (template.indexPatterns().stream().anyMatch(patternMatchPredicate)) { + if (anyMatch(template.indexPatterns(), patternMatchPredicate)) { candidates.add(Tuple.tuple(name, template)); } } @@ -1334,7 +1370,7 @@ static List> findV2CandidateTemplates(Met // Checks if a global template specifies the `index.hidden` setting. This check is important because a global // template shouldn't specify the `index.hidden` setting, we leave it up to the caller to handle this situation. private static boolean isGlobalAndHasIndexHiddenSetting(Metadata metadata, ComposableIndexTemplate template, String templateName) { - return template.indexPatterns().stream().anyMatch(Regex::isMatchAllPattern) + return anyMatch(template.indexPatterns(), Regex::isMatchAllPattern) && IndexMetadata.INDEX_HIDDEN_SETTING.exists(resolveSettings(metadata, templateName)); } diff --git a/server/src/main/java/org/elasticsearch/env/BuildVersion.java b/server/src/main/java/org/elasticsearch/env/BuildVersion.java index 0de346249ccbc..42c45a14977eb 100644 --- a/server/src/main/java/org/elasticsearch/env/BuildVersion.java +++ b/server/src/main/java/org/elasticsearch/env/BuildVersion.java @@ -59,7 +59,6 @@ public abstract class BuildVersion { public abstract boolean isFutureVersion(); // temporary - // TODO[wrb]: remove from PersistedClusterStateService // TODO[wrb]: remove from security bootstrap checks @Deprecated public Version toVersion() { diff --git a/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java b/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java index e0531b5a192a0..dcc5ed3aee3f8 100644 --- a/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java +++ b/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java @@ -72,6 +72,6 @@ public int hashCode() { @Override public String toString() { - return Version.fromId(versionId).toString(); + return version.toString(); } } diff --git a/server/src/main/java/org/elasticsearch/env/NodeMetadata.java b/server/src/main/java/org/elasticsearch/env/NodeMetadata.java index 6a72a7e7fcda5..5b2ee39c1b622 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeMetadata.java +++ b/server/src/main/java/org/elasticsearch/env/NodeMetadata.java @@ -42,7 +42,6 @@ public final class NodeMetadata { private final IndexVersion oldestIndexVersion; - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // version should be non-null in the node metadata from v9 onwards private NodeMetadata( final String nodeId, final BuildVersion buildVersion, @@ -112,11 +111,7 @@ public IndexVersion oldestIndexVersion() { return oldestIndexVersion; } - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) public void verifyUpgradeToCurrentVersion() { - // Enable the following assertion for V9: - // assert (nodeVersion.equals(BuildVersion.empty()) == false) : "version is required in the node metadata from v9 onwards"; - if (nodeVersion.onOrAfterMinimumCompatible() == false) { throw new IllegalStateException( "cannot upgrade a node from version [" diff --git a/server/src/main/java/org/elasticsearch/env/OverrideNodeVersionCommand.java b/server/src/main/java/org/elasticsearch/env/OverrideNodeVersionCommand.java index 96158965cddfe..1ddc8d5b26bd9 100644 --- a/server/src/main/java/org/elasticsearch/env/OverrideNodeVersionCommand.java +++ b/server/src/main/java/org/elasticsearch/env/OverrideNodeVersionCommand.java @@ -74,7 +74,7 @@ protected void processDataPaths(Terminal terminal, Path[] paths, OptionSet optio "found [" + nodeMetadata + "] which is compatible with current version [" - + Version.CURRENT + + BuildVersion.current() + "], so there is no need to override the version checks" ); } catch (IllegalStateException e) { @@ -86,10 +86,10 @@ protected void processDataPaths(Terminal terminal, Path[] paths, OptionSet optio (nodeMetadata.nodeVersion().onOrAfterMinimumCompatible() == false ? TOO_OLD_MESSAGE : TOO_NEW_MESSAGE).replace( "V_OLD", nodeMetadata.nodeVersion().toString() - ).replace("V_NEW", nodeMetadata.nodeVersion().toString()).replace("V_CUR", Version.CURRENT.toString()) + ).replace("V_NEW", nodeMetadata.nodeVersion().toString()).replace("V_CUR", BuildVersion.current().toString()) ); - PersistedClusterStateService.overrideVersion(Version.CURRENT, paths); + PersistedClusterStateService.overrideVersion(BuildVersion.current(), paths); terminal.println(SUCCESS_MESSAGE); } diff --git a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java index 0c6cf2c8a0761..92b8686700a05 100644 --- a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java @@ -42,7 +42,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.InfoStream; import org.apache.lucene.util.SetOnce; -import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; @@ -159,8 +158,6 @@ public class PersistedClusterStateService { public static final int IS_LAST_PAGE = 1; public static final int IS_NOT_LAST_PAGE = 0; private static final int COMMIT_DATA_SIZE = 7; - // We added CLUSTER_UUID_KEY and CLUSTER_UUID_COMMITTED_KEY in 8.8 - private static final int COMMIT_DATA_SIZE_BEFORE_8_8 = 5; private static final MergePolicy NO_MERGE_POLICY = noMergePolicy(); private static final MergePolicy DEFAULT_MERGE_POLICY = defaultMergePolicy(); @@ -350,7 +347,7 @@ public record OnDiskStateMetadata( @Nullable public static NodeMetadata nodeMetadata(Path... dataPaths) throws IOException { String nodeId = null; - Version version = null; + BuildVersion version = null; IndexVersion oldestIndexVersion = IndexVersions.ZERO; for (final Path dataPath : dataPaths) { final Path indexPath = dataPath.resolve(METADATA_DIRECTORY_NAME); @@ -367,7 +364,7 @@ public static NodeMetadata nodeMetadata(Path... dataPaths) throws IOException { ); } else if (nodeId == null) { nodeId = thisNodeId; - version = Version.fromId(Integer.parseInt(userData.get(NODE_VERSION_KEY))); + version = BuildVersion.fromVersionId(Integer.parseInt(userData.get(NODE_VERSION_KEY))); if (userData.containsKey(OLDEST_INDEX_VERSION_KEY)) { oldestIndexVersion = IndexVersion.fromId(Integer.parseInt(userData.get(OLDEST_INDEX_VERSION_KEY))); } else { @@ -382,14 +379,13 @@ public static NodeMetadata nodeMetadata(Path... dataPaths) throws IOException { if (nodeId == null) { return null; } - // TODO: remove use of Version here (ES-7343) - return new NodeMetadata(nodeId, BuildVersion.fromVersionId(version.id()), oldestIndexVersion); + return new NodeMetadata(nodeId, version, oldestIndexVersion); } /** * Overrides the version field for the metadata in the given data path */ - public static void overrideVersion(Version newVersion, Path... dataPaths) throws IOException { + public static void overrideVersion(BuildVersion newVersion, Path... dataPaths) throws IOException { for (final Path dataPath : dataPaths) { final Path indexPath = dataPath.resolve(METADATA_DIRECTORY_NAME); if (Files.exists(indexPath)) { @@ -399,7 +395,7 @@ public static void overrideVersion(Version newVersion, Path... dataPaths) throws try (IndexWriter indexWriter = createIndexWriter(new NIOFSDirectory(dataPath.resolve(METADATA_DIRECTORY_NAME)), true)) { final Map commitData = new HashMap<>(userData); - commitData.put(NODE_VERSION_KEY, Integer.toString(newVersion.id)); + commitData.put(NODE_VERSION_KEY, Integer.toString(newVersion.id())); commitData.put(OVERRIDDEN_NODE_VERSION_KEY, Boolean.toString(true)); indexWriter.setLiveCommitData(commitData.entrySet()); indexWriter.commit(); @@ -664,11 +660,9 @@ public OnDiskStateMetadata loadOnDiskStateMetadataFromUserData(Map commitData = Maps.newMapWithExpectedSize(COMMIT_DATA_SIZE); commitData.put(CURRENT_TERM_KEY, Long.toString(currentTerm)); commitData.put(LAST_ACCEPTED_VERSION_KEY, Long.toString(lastAcceptedVersion)); - commitData.put(NODE_VERSION_KEY, Integer.toString(Version.CURRENT.id)); + commitData.put(NODE_VERSION_KEY, Integer.toString(BuildVersion.current().id())); commitData.put(OLDEST_INDEX_VERSION_KEY, Integer.toString(oldestIndexVersion.id())); commitData.put(NODE_ID_KEY, nodeId); commitData.put(CLUSTER_UUID_KEY, clusterUUID); diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java index 0180d2c8df119..6a553d5dc5440 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java @@ -57,4 +57,15 @@ Settings getAdditionalIndexSettings( record Parameters(CheckedFunction mapperServiceFactory) { } + + /** + * Indicates whether the additional settings that this provider returns can overrule the settings defined in matching template + * or in create index request. + * + * Note that this is not used during index template validation, to avoid overruling template settings that may apply to + * different contexts (e.g. the provider is not used, or it returns different setting values). + */ + default boolean overrulesTemplateAndRequestSettings() { + return false; + } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java index d5c94de1c6942..a7560ce6f3caf 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java @@ -132,7 +132,7 @@ public BinaryDocValues getBinary(FieldInfo field) throws IOException { return DocValues.emptyBinary(); } - final IndexInput bytesSlice = data.slice("fixed-binary", entry.dataOffset, entry.dataLength); + final RandomAccessInput bytesSlice = data.randomAccessSlice(entry.dataOffset, entry.dataLength); if (entry.docsWithFieldOffset == -1) { // dense @@ -144,8 +144,7 @@ public BinaryDocValues getBinary(FieldInfo field) throws IOException { @Override public BytesRef binaryValue() throws IOException { - bytesSlice.seek((long) doc * length); - bytesSlice.readBytes(bytes.bytes, 0, length); + bytesSlice.readBytes((long) doc * length, bytes.bytes, 0, length); return bytes; } }; @@ -160,8 +159,7 @@ public BytesRef binaryValue() throws IOException { public BytesRef binaryValue() throws IOException { long startOffset = addresses.get(doc); bytes.length = (int) (addresses.get(doc + 1L) - startOffset); - bytesSlice.seek(startOffset); - bytesSlice.readBytes(bytes.bytes, 0, bytes.length); + bytesSlice.readBytes(startOffset, bytes.bytes, 0, bytes.length); return bytes; } }; @@ -184,8 +182,7 @@ public BytesRef binaryValue() throws IOException { @Override public BytesRef binaryValue() throws IOException { - bytesSlice.seek((long) disi.index() * length); - bytesSlice.readBytes(bytes.bytes, 0, length); + bytesSlice.readBytes((long) disi.index() * length, bytes.bytes, 0, length); return bytes; } }; @@ -201,8 +198,7 @@ public BytesRef binaryValue() throws IOException { final int index = disi.index(); long startOffset = addresses.get(index); bytes.length = (int) (addresses.get(index + 1L) - startOffset); - bytesSlice.seek(startOffset); - bytesSlice.readBytes(bytes.bytes, 0, bytes.length); + bytesSlice.readBytes(startOffset, bytes.bytes, 0, bytes.length); return bytes; } }; @@ -407,7 +403,7 @@ private static class TermsDict extends BaseTermsEnum { final IndexInput bytes; final long blockMask; final LongValues indexAddresses; - final IndexInput indexBytes; + final RandomAccessInput indexBytes; final BytesRef term; long ord = -1; @@ -427,7 +423,7 @@ private static class TermsDict extends BaseTermsEnum { entry.termsIndexAddressesLength ); indexAddresses = DirectMonotonicReader.getInstance(entry.termsIndexAddressesMeta, indexAddressesSlice); - indexBytes = data.slice("terms-index", entry.termsIndexOffset, entry.termsIndexLength); + indexBytes = data.randomAccessSlice(entry.termsIndexOffset, entry.termsIndexLength); term = new BytesRef(entry.maxTermLength); // add the max term length for the dictionary @@ -485,8 +481,7 @@ private BytesRef getTermFromIndex(long index) throws IOException { assert index >= 0 && index <= (entry.termsDictSize - 1) >>> entry.termsDictIndexShift; final long start = indexAddresses.get(index); term.length = (int) (indexAddresses.get(index + 1) - start); - indexBytes.seek(start); - indexBytes.readBytes(term.bytes, 0, term.length); + indexBytes.readBytes(start, term.bytes, 0, term.length); return term; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java index 3512989c115ee..c38b5beeb55a0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java @@ -189,8 +189,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { protected BlockLoader blockLoaderFromSource(BlockLoaderContext blContext) { ValueFetcher fetcher = valueFetcher(blContext.sourcePaths(name()), nullValue, GeometryFormatterFactory.WKB); // TODO consider optimization using BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return new BlockSourceReader.GeometriesBlockLoader(fetcher, BlockSourceReader.lookupMatchingAll(), sourceMode); + return new BlockSourceReader.GeometriesBlockLoader(fetcher, BlockSourceReader.lookupMatchingAll()); } protected abstract Object nullValueAsSource(T nullValue); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java b/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java index 105943c732a5e..19a1cce746172 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java @@ -22,7 +22,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.Set; /** * Loads values from {@code _source}. This whole process is very slow and cast-tastic, @@ -30,14 +29,6 @@ * slow. */ public abstract class BlockSourceReader implements BlockLoader.RowStrideReader { - - // _ignored_source is needed when source mode is synthetic. - static final StoredFieldsSpec NEEDS_SOURCE_AND_IGNORED_SOURCE = new StoredFieldsSpec( - true, - false, - Set.of(IgnoredSourceFieldMapper.NAME) - ); - private final ValueFetcher fetcher; private final List ignoredValues = new ArrayList<>(); private final DocIdSetIterator iter; @@ -100,12 +91,10 @@ public interface LeafIteratorLookup { private abstract static class SourceBlockLoader implements BlockLoader { protected final ValueFetcher fetcher; private final LeafIteratorLookup lookup; - private final SourceFieldMapper.Mode sourceMode; - private SourceBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { + private SourceBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { this.fetcher = fetcher; this.lookup = lookup; - this.sourceMode = sourceMode; } @Override @@ -115,7 +104,7 @@ public final ColumnAtATimeReader columnAtATimeReader(LeafReaderContext context) @Override public final StoredFieldsSpec rowStrideStoredFieldSpec() { - return sourceMode == SourceFieldMapper.Mode.SYNTHETIC ? NEEDS_SOURCE_AND_IGNORED_SOURCE : StoredFieldsSpec.NEEDS_SOURCE; + return StoredFieldsSpec.NEEDS_SOURCE; } @Override @@ -151,8 +140,8 @@ public final String toString() { * Load {@code boolean}s from {@code _source}. */ public static class BooleansBlockLoader extends SourceBlockLoader { - public BooleansBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { - super(fetcher, lookup, sourceMode); + public BooleansBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { + super(fetcher, lookup); } @Override @@ -191,8 +180,8 @@ public String toString() { * Load {@link BytesRef}s from {@code _source}. */ public static class BytesRefsBlockLoader extends SourceBlockLoader { - public BytesRefsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { - super(fetcher, lookup, sourceMode); + public BytesRefsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { + super(fetcher, lookup); } @Override @@ -202,7 +191,7 @@ public final Builder builder(BlockFactory factory, int expectedCount) { @Override protected RowStrideReader rowStrideReader(LeafReaderContext context, DocIdSetIterator iter) throws IOException { - return new BytesRefs(fetcher, iter, null); + return new BytesRefs(fetcher, iter); } @Override @@ -212,8 +201,8 @@ protected String name() { } public static class GeometriesBlockLoader extends SourceBlockLoader { - public GeometriesBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { - super(fetcher, lookup, sourceMode); + public GeometriesBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { + super(fetcher, lookup); } @Override @@ -223,7 +212,7 @@ public final Builder builder(BlockFactory factory, int expectedCount) { @Override protected RowStrideReader rowStrideReader(LeafReaderContext context, DocIdSetIterator iter) { - return new Geometries(fetcher, iter, null); + return new Geometries(fetcher, iter); } @Override @@ -235,7 +224,7 @@ protected String name() { private static class BytesRefs extends BlockSourceReader { private final BytesRef scratch = new BytesRef(); - BytesRefs(ValueFetcher fetcher, DocIdSetIterator iter, SourceFieldMapper.Mode sourceMode) { + BytesRefs(ValueFetcher fetcher, DocIdSetIterator iter) { super(fetcher, iter); } @@ -252,7 +241,7 @@ public String toString() { private static class Geometries extends BlockSourceReader { - Geometries(ValueFetcher fetcher, DocIdSetIterator iter, SourceFieldMapper.Mode sourceMode) { + Geometries(ValueFetcher fetcher, DocIdSetIterator iter) { super(fetcher, iter); } @@ -275,8 +264,8 @@ public String toString() { * Load {@code double}s from {@code _source}. */ public static class DoublesBlockLoader extends SourceBlockLoader { - public DoublesBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { - super(fetcher, lookup, sourceMode); + public DoublesBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { + super(fetcher, lookup); } @Override @@ -315,8 +304,8 @@ public String toString() { * Load {@code int}s from {@code _source}. */ public static class IntsBlockLoader extends SourceBlockLoader { - public IntsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { - super(fetcher, lookup, sourceMode); + public IntsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { + super(fetcher, lookup); } @Override @@ -355,8 +344,8 @@ public String toString() { * Load {@code long}s from {@code _source}. */ public static class LongsBlockLoader extends SourceBlockLoader { - public LongsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup, SourceFieldMapper.Mode sourceMode) { - super(fetcher, lookup, sourceMode); + public LongsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { + super(fetcher, lookup); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index c2bf9e18bfeec..5aaaf7dce83c9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -314,7 +314,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { BlockSourceReader.LeafIteratorLookup lookup = isIndexed() || isStored() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) : BlockSourceReader.lookupMatchingAll(); - return new BlockSourceReader.BooleansBlockLoader(fetcher, lookup, blContext.indexSettings().getIndexMappingSourceMode()); + return new BlockSourceReader.BooleansBlockLoader(fetcher, lookup); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index d05f0e477db09..87e4ce5f90479 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -793,8 +793,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { BlockSourceReader.LeafIteratorLookup lookup = isStored() || isIndexed() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) : BlockSourceReader.lookupMatchingAll(); - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return new BlockSourceReader.LongsBlockLoader(sourceValueFetcher(blContext.sourcePaths(name())), lookup, sourceMode); + return new BlockSourceReader.LongsBlockLoader(sourceValueFetcher(blContext.sourcePaths(name())), lookup); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 802680e7f373e..ecc708bc94614 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -634,8 +634,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { return new BlockStoredFieldsReader.BytesFromBytesRefsBlockLoader(name()); } SourceValueFetcher fetcher = sourceValueFetcher(blContext.sourcePaths(name())); - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return new BlockSourceReader.BytesRefsBlockLoader(fetcher, sourceBlockLoaderLookup(blContext), sourceMode); + return new BlockSourceReader.BytesRefsBlockLoader(fetcher, sourceBlockLoaderLookup(blContext)); } private BlockSourceReader.LeafIteratorLookup sourceBlockLoaderLookup(BlockLoaderContext blContext) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 3608e8ab261c1..55ed1e10428aa 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -462,12 +462,8 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource( - SourceValueFetcher sourceValueFetcher, - BlockSourceReader.LeafIteratorLookup lookup, - SourceFieldMapper.Mode sourceMode - ) { - return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup, sourceMode); + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { + return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup); } }, FLOAT("float", NumericType.FLOAT) { @@ -650,12 +646,8 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource( - SourceValueFetcher sourceValueFetcher, - BlockSourceReader.LeafIteratorLookup lookup, - SourceFieldMapper.Mode sourceMode - ) { - return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup, sourceMode); + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { + return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup); } }, DOUBLE("double", NumericType.DOUBLE) { @@ -804,12 +796,8 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource( - SourceValueFetcher sourceValueFetcher, - BlockSourceReader.LeafIteratorLookup lookup, - SourceFieldMapper.Mode sourceMode - ) { - return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup, sourceMode); + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { + return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup); } }, BYTE("byte", NumericType.BYTE) { @@ -921,12 +909,8 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource( - SourceValueFetcher sourceValueFetcher, - BlockSourceReader.LeafIteratorLookup lookup, - SourceFieldMapper.Mode sourceMode - ) { - return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup, sourceMode); + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { + return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup); } private boolean isOutOfRange(Object value) { @@ -1038,12 +1022,8 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource( - SourceValueFetcher sourceValueFetcher, - BlockSourceReader.LeafIteratorLookup lookup, - SourceFieldMapper.Mode sourceMode - ) { - return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup, sourceMode); + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { + return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup); } private boolean isOutOfRange(Object value) { @@ -1229,12 +1209,8 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource( - SourceValueFetcher sourceValueFetcher, - BlockSourceReader.LeafIteratorLookup lookup, - SourceFieldMapper.Mode sourceMode - ) { - return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup, sourceMode); + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { + return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup); } }, LONG("long", NumericType.LONG) { @@ -1380,12 +1356,8 @@ BlockLoader blockLoaderFromDocValues(String fieldName) { } @Override - BlockLoader blockLoaderFromSource( - SourceValueFetcher sourceValueFetcher, - BlockSourceReader.LeafIteratorLookup lookup, - SourceFieldMapper.Mode sourceMode - ) { - return new BlockSourceReader.LongsBlockLoader(sourceValueFetcher, lookup, sourceMode); + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { + return new BlockSourceReader.LongsBlockLoader(sourceValueFetcher, lookup); } private boolean isOutOfRange(Object value) { @@ -1663,11 +1635,7 @@ protected void writeValue(XContentBuilder b, long value) throws IOException { abstract BlockLoader blockLoaderFromDocValues(String fieldName); - abstract BlockLoader blockLoaderFromSource( - SourceValueFetcher sourceValueFetcher, - BlockSourceReader.LeafIteratorLookup lookup, - SourceFieldMapper.Mode sourceMode - ); + abstract BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup); } public static class NumberFieldType extends SimpleMappedFieldType { @@ -1806,8 +1774,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { BlockSourceReader.LeafIteratorLookup lookup = isStored() || isIndexed() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) : BlockSourceReader.lookupMatchingAll(); - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return type.blockLoaderFromSource(sourceValueFetcher(blContext.sourcePaths(name())), lookup, sourceMode); + return type.blockLoaderFromSource(sourceValueFetcher(blContext.sourcePaths(name())), lookup); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 3f77edc819602..253f70f4fda47 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -1007,20 +1007,8 @@ protected String delegatingTo() { if (isStored()) { return new BlockStoredFieldsReader.BytesFromStringsBlockLoader(name()); } - if (isSyntheticSource && syntheticSourceDelegate == null) { - /* - * When we're in synthetic source mode we don't currently - * support text fields that are not stored and are not children - * of perfect keyword fields. We'd have to load from the parent - * field and then convert the result to a string. In this case, - * even if we would synthesize the source, the current field - * would be missing. - */ - return null; - } SourceValueFetcher fetcher = SourceValueFetcher.toString(blContext.sourcePaths(name())); - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return new BlockSourceReader.BytesRefsBlockLoader(fetcher, blockReaderDisiLookup(blContext), sourceMode); + return new BlockSourceReader.BytesRefsBlockLoader(fetcher, blockReaderDisiLookup(blContext)); } /** diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 2dc5e7c28ad0b..706f788e8a310 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -38,7 +38,6 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RecoverySource; @@ -1714,7 +1713,7 @@ interface IndexDeletionAllowedPredicate { IndexSettings indexSettings) -> canDeleteIndexContents(index); private final IndexDeletionAllowedPredicate ALWAYS_TRUE = (Index index, IndexSettings indexSettings) -> true; - public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { + public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { /* Being static, parseAliasFilter doesn't have access to whatever guts it needs to parse a query. Instead of passing in a bunch * of dependencies we pass in a function that can perform the parsing. */ CheckedFunction filterParser = bytes -> { diff --git a/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java b/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java index 5059272aa2e23..97a68d9807688 100644 --- a/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java +++ b/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java @@ -12,6 +12,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; @@ -239,7 +240,7 @@ private static Boolean readBoolean(String processorType, String processorTag, St processorType, processorTag, propertyName, - "property isn't a boolean, but of type [" + value.getClass().getName() + "]" + Strings.format("property isn't a boolean, but of type [%s]", value.getClass().getName()) ); } diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java index 92e248f160f0f..c85997f72cc78 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java @@ -205,8 +205,8 @@ static boolean checkMetadataVersion( namespace, newVersion, switch (versionCheck) { - case ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION -> "less than"; - case ReservedStateVersionCheck.HIGHER_VERSION_ONLY -> "less than or equal to"; + case HIGHER_OR_SAME_VERSION -> "less than"; + case HIGHER_VERSION_ONLY -> "less than or equal to"; }, currentVersion ) diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 3a900a8a9b8a6..be96b4e25d841 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedSupplier; @@ -1619,7 +1618,7 @@ public boolean isForceExecution() { } } - public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { + public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { return indicesService.buildAliasFilter(state, index, resolvedExpressions); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java index 1faeabb6acbf7..834bacd9e6a04 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; @@ -230,19 +229,9 @@ public void testResolveHiddenProperlyWithDateMath() { .metadata(buildMetadata(new Object[][] {}, indices)) .build(); String[] requestedIndex = new String[] { "" }; - Set resolvedIndices = resolver.resolveExpressions( - clusterState, - IndicesOptions.LENIENT_EXPAND_OPEN, - true, - requestedIndex - ); + Set resolvedIndices = resolver.resolveExpressions(clusterState, IndicesOptions.LENIENT_EXPAND_OPEN, true, requestedIndex); assertThat(resolvedIndices.size(), is(1)); - assertThat( - resolvedIndices, - contains( - oneOf(new ResolvedExpression("logs-pgsql-prod-" + todaySuffix), new ResolvedExpression("logs-pgsql-prod-" + tomorrowSuffix)) - ) - ); + assertThat(resolvedIndices, contains(oneOf("logs-pgsql-prod-" + todaySuffix, "logs-pgsql-prod-" + tomorrowSuffix))); } public void testSystemIndexAccess() { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java index 74408b99e92ce..95446149f026b 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java @@ -49,7 +49,9 @@ public void testSettingsProviderIsOverridden() throws Exception { matchingTemplate, ComposableIndexTemplate.builder() .indexPatterns(List.of("test_index*")) - .template(new Template(Settings.builder().put("test-setting", 1).build(), null, null)) + .template( + new Template(Settings.builder().put("test-setting", 1).put("test-setting-2", 2).build(), null, null) + ) .build() ) ) @@ -78,6 +80,24 @@ public Settings getAdditionalIndexSettings( ) { return Settings.builder().put("test-setting", 0).build(); } + }, new IndexSettingProvider() { + @Override + public Settings getAdditionalIndexSettings( + String indexName, + String dataStreamName, + IndexMode templateIndexMode, + Metadata metadata, + Instant resolvedAt, + Settings indexTemplateAndCreateRequestSettings, + List combinedTemplateMappings + ) { + return Settings.builder().put("test-setting-2", 10).build(); + } + + @Override + public boolean overrulesTemplateAndRequestSettings() { + return true; + } }); Template resolvedTemplate = TransportSimulateIndexTemplateAction.resolveTemplate( @@ -92,5 +112,6 @@ public Settings getAdditionalIndexSettings( ); assertThat(resolvedTemplate.settings().getAsInt("test-setting", -1), is(1)); + assertThat(resolvedTemplate.settings().getAsInt("test-setting-2", -1), is(10)); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java index fe0b7926229cb..6be5b48f9d723 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.DateMathExpressionResolver; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; @@ -27,6 +26,7 @@ import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Locale; @@ -52,11 +52,11 @@ private static String formatDate(String pattern, ZonedDateTime zonedDateTime) { public void testNormal() throws Exception { int numIndexExpressions = randomIntBetween(1, 9); - List indexExpressions = new ArrayList<>(numIndexExpressions); + List indexExpressions = new ArrayList<>(numIndexExpressions); for (int i = 0; i < numIndexExpressions; i++) { - indexExpressions.add(new ResolvedExpression(randomAlphaOfLength(10))); + indexExpressions.add(randomAlphaOfLength(10)); } - List result = DateMathExpressionResolver.resolve(context, indexExpressions); + List result = DateMathExpressionResolver.resolve(context, indexExpressions); assertThat(result.size(), equalTo(indexExpressions.size())); for (int i = 0; i < indexExpressions.size(); i++) { assertThat(result.get(i), equalTo(indexExpressions.get(i))); @@ -64,25 +64,25 @@ public void testNormal() throws Exception { } public void testExpression() throws Exception { - List indexExpressions = resolvedExpressions("<.marvel-{now}>", "<.watch_history-{now}>", ""); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); + List indexExpressions = Arrays.asList("<.marvel-{now}>", "<.watch_history-{now}>", ""); + List result = DateMathExpressionResolver.resolve(context, indexExpressions); assertThat(result.size(), equalTo(3)); - assertThat(result.get(0).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(1).resource(), equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2).resource(), equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(1), equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(2), equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); } public void testExpressionWithWildcardAndExclusions() { - List indexExpressions = resolvedExpressions( + List indexExpressions = Arrays.asList( "<-before-inner-{now}>", "-", "", "<-after-inner-{now}>", "-" ); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); + List result = DateMathExpressionResolver.resolve(context, indexExpressions); assertThat( - result.stream().map(ResolvedExpression::resource).toList(), + result, Matchers.contains( equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), equalTo("-"), // doesn't evaluate because it doesn't start with "<" and it is not an exclusion @@ -98,7 +98,7 @@ public void testExpressionWithWildcardAndExclusions() { ); result = DateMathExpressionResolver.resolve(noWildcardExpandContext, indexExpressions); assertThat( - result.stream().map(ResolvedExpression::resource).toList(), + result, Matchers.contains( equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), // doesn't evaluate because it doesn't start with "<" and there can't be exclusions without wildcard expansion @@ -112,24 +112,21 @@ public void testExpressionWithWildcardAndExclusions() { } public void testEmpty() throws Exception { - List result = DateMathExpressionResolver.resolve(context, List.of()); + List result = DateMathExpressionResolver.resolve(context, Collections.emptyList()); assertThat(result.size(), equalTo(0)); } public void testExpression_Static() throws Exception { - List result = DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-test>")); + List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-test>")); assertThat(result.size(), equalTo(1)); - assertThat(result.get(0).resource(), equalTo(".marvel-test")); + assertThat(result.get(0), equalTo(".marvel-test")); } public void testExpression_MultiParts() throws Exception { - List result = DateMathExpressionResolver.resolve( - context, - resolvedExpressions("<.text1-{now/d}-text2-{now/M}>") - ); + List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.text1-{now/d}-text2-{now/M}>")); assertThat(result.size(), equalTo(1)); assertThat( - result.get(0).resource(), + result.get(0), equalTo( ".text1-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) @@ -140,42 +137,33 @@ public void testExpression_MultiParts() throws Exception { } public void testExpression_CustomFormat() throws Exception { - List results = DateMathExpressionResolver.resolve( - context, - resolvedExpressions("<.marvel-{now/d{yyyy.MM.dd}}>") - ); + List results = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd}}>")); assertThat(results.size(), equalTo(1)); - assertThat(results.get(0).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); } public void testExpression_EscapeStatic() throws Exception { - List result = DateMathExpressionResolver.resolve(context, resolvedExpressions("<.mar\\{v\\}el-{now/d}>")); + List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar\\{v\\}el-{now/d}>")); assertThat(result.size(), equalTo(1)); - assertThat(result.get(0).resource(), equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(0), equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); } public void testExpression_EscapeDateFormat() throws Exception { - List result = DateMathExpressionResolver.resolve( - context, - resolvedExpressions("<.marvel-{now/d{'\\{year\\}'yyyy}}>") - ); + List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'yyyy}}>")); assertThat(result.size(), equalTo(1)); - assertThat(result.get(0).resource(), equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(context.getStartTime())))); + assertThat(result.get(0), equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(context.getStartTime())))); } public void testExpression_MixedArray() throws Exception { - List result = DateMathExpressionResolver.resolve( + List result = DateMathExpressionResolver.resolve( context, - resolvedExpressions("name1", "<.marvel-{now/d}>", "name2", "<.logstash-{now/M{uuuu.MM}}>") + Arrays.asList("name1", "<.marvel-{now/d}>", "name2", "<.logstash-{now/M{uuuu.MM}}>") ); assertThat(result.size(), equalTo(4)); - assertThat(result.get(0).resource(), equalTo("name1")); - assertThat(result.get(1).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2).resource(), equalTo("name2")); - assertThat( - result.get(3).resource(), - equalTo(".logstash-" + formatDate("uuuu.MM", dateFromMillis(context.getStartTime()).withDayOfMonth(1))) - ); + assertThat(result.get(0), equalTo("name1")); + assertThat(result.get(1), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(2), equalTo("name2")); + assertThat(result.get(3), equalTo(".logstash-" + formatDate("uuuu.MM", dateFromMillis(context.getStartTime()).withDayOfMonth(1)))); } public void testExpression_CustomTimeZoneInIndexName() throws Exception { @@ -214,19 +202,19 @@ public void testExpression_CustomTimeZoneInIndexName() throws Exception { name -> false, name -> false ); - List results = DateMathExpressionResolver.resolve( + List results = DateMathExpressionResolver.resolve( context, - resolvedExpressions("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>") + Arrays.asList("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>") ); assertThat(results.size(), equalTo(1)); logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, results.get(0)); - assertThat(results.get(0).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); + assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); } public void testExpressionInvalidUnescaped() throws Exception { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.mar}vel-{now/d}>")) + () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar}vel-{now/d}>")) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("invalid character at position [")); @@ -235,7 +223,7 @@ public void testExpressionInvalidUnescaped() throws Exception { public void testExpressionInvalidDateMathFormat() throws Exception { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-{now/d{}>")) + () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}>")) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); @@ -244,7 +232,7 @@ public void testExpressionInvalidDateMathFormat() throws Exception { public void testExpressionInvalidEmptyDateMathFormat() throws Exception { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-{now/d{}}>")) + () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}}>")) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("missing date format")); @@ -253,13 +241,10 @@ public void testExpressionInvalidEmptyDateMathFormat() throws Exception { public void testExpressionInvalidOpenEnded() throws Exception { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-{now/d>")) + () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d>")) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); } - private List resolvedExpressions(String... expressions) { - return Arrays.stream(expressions).map(ResolvedExpression::new).toList(); - } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java index 1df3bf4132b60..1ca59ff402bd8 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java @@ -13,12 +13,10 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ExpressionList; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ExpressionList.Expression; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; -import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.function.Supplier; @@ -41,13 +39,10 @@ public void testEmpty() { public void testExplicitSingleNameExpression() { for (IndicesOptions indicesOptions : List.of(getExpandWildcardsIndicesOptions(), getNoExpandWildcardsIndicesOptions())) { for (String expressionString : List.of("non_wildcard", "-non_exclusion")) { - ExpressionList expressionList = new ExpressionList( - getContextWithOptions(indicesOptions), - resolvedExpressions(expressionString) - ); + ExpressionList expressionList = new ExpressionList(getContextWithOptions(indicesOptions), List.of(expressionString)); assertThat(expressionList.hasWildcard(), is(false)); if (randomBoolean()) { - expressionList = new ExpressionList(getContextWithOptions(indicesOptions), resolvedExpressions((expressionString))); + expressionList = new ExpressionList(getContextWithOptions(indicesOptions), List.of(expressionString)); } Iterator expressionIterator = expressionList.iterator(); assertThat(expressionIterator.hasNext(), is(true)); @@ -67,14 +62,11 @@ public void testWildcardSingleExpression() { for (String wildcardTest : List.of("*", "a*", "*b", "a*b", "a-*b", "a*-b", "-*", "-a*", "-*b", "**", "*-*")) { ExpressionList expressionList = new ExpressionList( getContextWithOptions(getExpandWildcardsIndicesOptions()), - resolvedExpressions(wildcardTest) + List.of(wildcardTest) ); assertThat(expressionList.hasWildcard(), is(true)); if (randomBoolean()) { - expressionList = new ExpressionList( - getContextWithOptions(getExpandWildcardsIndicesOptions()), - resolvedExpressions(wildcardTest) - ); + expressionList = new ExpressionList(getContextWithOptions(getExpandWildcardsIndicesOptions()), List.of(wildcardTest)); } Iterator expressionIterator = expressionList.iterator(); assertThat(expressionIterator.hasNext(), is(true)); @@ -90,13 +82,13 @@ public void testWildcardSingleExpression() { } public void testWildcardLongerExpression() { - List onlyExplicits = randomList(7, () -> new ResolvedExpression(randomAlphaOfLengthBetween(0, 5))); - ResolvedExpression wildcard = new ResolvedExpression(randomFrom("*", "*b", "-*", "*-", "c*", "a*b", "**")); - List expressionList = new ArrayList<>(onlyExplicits.size() + 1); + List onlyExplicits = randomList(7, () -> randomAlphaOfLengthBetween(0, 5)); + String wildcard = randomFrom("*", "*b", "-*", "*-", "c*", "a*b", "**"); + List expressionList = new ArrayList<>(onlyExplicits.size() + 1); expressionList.addAll(randomSubsetOf(onlyExplicits)); int wildcardPos = expressionList.size(); expressionList.add(wildcard); - for (ResolvedExpression item : onlyExplicits) { + for (String item : onlyExplicits) { if (expressionList.contains(item) == false) { expressionList.add(item); } @@ -114,18 +106,18 @@ public void testWildcardLongerExpression() { } else { assertThat(expression.isWildcard(), is(true)); } - assertThat(expression.get(), is(expressionList.get(i++).resource())); + assertThat(expression.get(), is(expressionList.get(i++))); } } public void testWildcardsNoExclusionExpressions() { - for (List wildcardExpression : List.of( - resolvedExpressions("*"), - resolvedExpressions("a", "*"), - resolvedExpressions("-b", "*c"), - resolvedExpressions("-", "a", "c*"), - resolvedExpressions("*", "a*", "*b"), - resolvedExpressions("-*", "a", "b*") + for (List wildcardExpression : List.of( + List.of("*"), + List.of("a", "*"), + List.of("-b", "*c"), + List.of("-", "a", "c*"), + List.of("*", "a*", "*b"), + List.of("-*", "a", "b*") )) { ExpressionList expressionList = new ExpressionList( getContextWithOptions(getExpandWildcardsIndicesOptions()), @@ -138,25 +130,25 @@ public void testWildcardsNoExclusionExpressions() { int i = 0; for (Expression expression : expressionList) { assertThat(expression.isExclusion(), is(false)); - if (wildcardExpression.get(i).resource().contains("*")) { + if (wildcardExpression.get(i).contains("*")) { assertThat(expression.isWildcard(), is(true)); } else { assertThat(expression.isWildcard(), is(false)); } - assertThat(expression.get(), is(wildcardExpression.get(i++).resource())); + assertThat(expression.get(), is(wildcardExpression.get(i++))); } } } public void testWildcardExpressionNoExpandOptions() { - for (List wildcardExpression : List.of( - resolvedExpressions("*"), - resolvedExpressions("a", "*"), - resolvedExpressions("-b", "*c"), - resolvedExpressions("*d", "-"), - resolvedExpressions("*", "-*"), - resolvedExpressions("-", "a", "c*"), - resolvedExpressions("*", "a*", "*b") + for (List wildcardExpression : List.of( + List.of("*"), + List.of("a", "*"), + List.of("-b", "*c"), + List.of("*d", "-"), + List.of("*", "-*"), + List.of("-", "a", "c*"), + List.of("*", "a*", "*b") )) { ExpressionList expressionList = new ExpressionList( getContextWithOptions(getNoExpandWildcardsIndicesOptions()), @@ -170,7 +162,7 @@ public void testWildcardExpressionNoExpandOptions() { for (Expression expression : expressionList) { assertThat(expression.isWildcard(), is(false)); assertThat(expression.isExclusion(), is(false)); - assertThat(expression.get(), is(wildcardExpression.get(i++).resource())); + assertThat(expression.get(), is(wildcardExpression.get(i++))); } } } @@ -180,17 +172,17 @@ public void testSingleExclusionExpression() { int wildcardPos = randomIntBetween(0, 3); String exclusion = randomFrom("-*", "-", "-c*", "-ab", "--"); int exclusionPos = randomIntBetween(wildcardPos + 1, 7); - List exclusionExpression = new ArrayList<>(); + List exclusionExpression = new ArrayList<>(); for (int i = 0; i < wildcardPos; i++) { - exclusionExpression.add(new ResolvedExpression(randomAlphaOfLengthBetween(0, 5))); + exclusionExpression.add(randomAlphaOfLengthBetween(0, 5)); } - exclusionExpression.add(new ResolvedExpression(wildcard)); + exclusionExpression.add(wildcard); for (int i = wildcardPos + 1; i < exclusionPos; i++) { - exclusionExpression.add(new ResolvedExpression(randomAlphaOfLengthBetween(0, 5))); + exclusionExpression.add(randomAlphaOfLengthBetween(0, 5)); } - exclusionExpression.add(new ResolvedExpression(exclusion)); + exclusionExpression.add(exclusion); for (int i = 0; i < randomIntBetween(0, 3); i++) { - exclusionExpression.add(new ResolvedExpression(randomAlphaOfLengthBetween(0, 5))); + exclusionExpression.add(randomAlphaOfLengthBetween(0, 5)); } ExpressionList expressionList = new ExpressionList(getContextWithOptions(getExpandWildcardsIndicesOptions()), exclusionExpression); if (randomBoolean()) { @@ -201,28 +193,28 @@ public void testSingleExclusionExpression() { if (i == wildcardPos) { assertThat(expression.isWildcard(), is(true)); assertThat(expression.isExclusion(), is(false)); - assertThat(expression.get(), is(exclusionExpression.get(i++).resource())); + assertThat(expression.get(), is(exclusionExpression.get(i++))); } else if (i == exclusionPos) { assertThat(expression.isExclusion(), is(true)); - assertThat(expression.isWildcard(), is(exclusionExpression.get(i).resource().contains("*"))); - assertThat(expression.get(), is(exclusionExpression.get(i++).resource().substring(1))); + assertThat(expression.isWildcard(), is(exclusionExpression.get(i).contains("*"))); + assertThat(expression.get(), is(exclusionExpression.get(i++).substring(1))); } else { assertThat(expression.isWildcard(), is(false)); assertThat(expression.isExclusion(), is(false)); - assertThat(expression.get(), is(exclusionExpression.get(i++).resource())); + assertThat(expression.get(), is(exclusionExpression.get(i++))); } } } public void testExclusionsExpression() { - for (Tuple, List> exclusionExpression : List.of( - new Tuple<>(resolvedExpressions("-a", "*", "-a"), List.of(false, false, true)), - new Tuple<>(resolvedExpressions("-b*", "c", "-a"), List.of(false, false, true)), - new Tuple<>(resolvedExpressions("*d", "-", "*b"), List.of(false, true, false)), - new Tuple<>(resolvedExpressions("-", "--", "-*", "", "-*"), List.of(false, false, false, false, true)), - new Tuple<>(resolvedExpressions("*-", "-*", "a", "-b"), List.of(false, true, false, true)), - new Tuple<>(resolvedExpressions("a", "-b", "-*", "-b", "*", "-b"), List.of(false, false, false, true, false, true)), - new Tuple<>(resolvedExpressions("-a", "*d", "-a", "-*b", "-b", "--"), List.of(false, false, true, true, true, true)) + for (Tuple, List> exclusionExpression : List.of( + new Tuple<>(List.of("-a", "*", "-a"), List.of(false, false, true)), + new Tuple<>(List.of("-b*", "c", "-a"), List.of(false, false, true)), + new Tuple<>(List.of("*d", "-", "*b"), List.of(false, true, false)), + new Tuple<>(List.of("-", "--", "-*", "", "-*"), List.of(false, false, false, false, true)), + new Tuple<>(List.of("*-", "-*", "a", "-b"), List.of(false, true, false, true)), + new Tuple<>(List.of("a", "-b", "-*", "-b", "*", "-b"), List.of(false, false, false, true, false, true)), + new Tuple<>(List.of("-a", "*d", "-a", "-*b", "-b", "--"), List.of(false, false, true, true, true, true)) )) { ExpressionList expressionList = new ExpressionList( getContextWithOptions(getExpandWildcardsIndicesOptions()), @@ -235,11 +227,11 @@ public void testExclusionsExpression() { for (Expression expression : expressionList) { boolean isExclusion = exclusionExpression.v2().get(i); assertThat(expression.isExclusion(), is(isExclusion)); - assertThat(expression.isWildcard(), is(exclusionExpression.v1().get(i).resource().contains("*"))); + assertThat(expression.isWildcard(), is(exclusionExpression.v1().get(i).contains("*"))); if (isExclusion) { - assertThat(expression.get(), is(exclusionExpression.v1().get(i++).resource().substring(1))); + assertThat(expression.get(), is(exclusionExpression.v1().get(i++).substring(1))); } else { - assertThat(expression.get(), is(exclusionExpression.v1().get(i++).resource())); + assertThat(expression.get(), is(exclusionExpression.v1().get(i++))); } } } @@ -314,8 +306,4 @@ private Context getContextWithOptions(IndicesOptions indicesOptions) { when(context.getOptions()).thenReturn(indicesOptions); return context; } - - private List resolvedExpressions(String... expressions) { - return Arrays.stream(expressions).map(ResolvedExpression::new).toList(); - } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index da19bd68e288a..d58de5ca65ea0 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata.State; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -1581,27 +1580,16 @@ public void testResolveExpressions() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); + assertEquals(new HashSet<>(Arrays.asList("alias-0", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "alias-*")); assertEquals( - Set.of(new ResolvedExpression("alias-0"), new ResolvedExpression("alias-1")), - indexNameExpressionResolver.resolveExpressions(state, "alias-*") - ); - assertEquals( - Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("alias-0"), new ResolvedExpression("alias-1")), + new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "test-0", "alias-*") ); assertEquals( - Set.of( - new ResolvedExpression("test-0"), - new ResolvedExpression("test-1"), - new ResolvedExpression("alias-0"), - new ResolvedExpression("alias-1") - ), + new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "test-*", "alias-*") ); - assertEquals( - Set.of(new ResolvedExpression("test-1"), new ResolvedExpression("alias-1")), - indexNameExpressionResolver.resolveExpressions(state, "*-1") - ); + assertEquals(new HashSet<>(Arrays.asList("test-1", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "*-1")); } public void testFilteringAliases() { @@ -1610,25 +1598,16 @@ public void testFilteringAliases() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = Set.of(new ResolvedExpression("alias-0"), new ResolvedExpression("alias-1")); + Set resolvedExpressions = new HashSet<>(Arrays.asList("alias-0", "alias-1")); String[] strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertArrayEquals(new String[] { "alias-0" }, strings); // concrete index supersedes filtering alias - resolvedExpressions = Set.of( - new ResolvedExpression("test-0"), - new ResolvedExpression("alias-0"), - new ResolvedExpression("alias-1") - ); + resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); - resolvedExpressions = Set.of( - new ResolvedExpression("test-0"), - new ResolvedExpression("test-1"), - new ResolvedExpression("alias-0"), - new ResolvedExpression("alias-1") - ); + resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); } @@ -1642,7 +1621,7 @@ public void testIndexAliases() { .putAlias(AliasMetadata.builder("test-alias-non-filtering")) ); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "test-*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "test-*"); String[] strings = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); Arrays.sort(strings); @@ -1677,28 +1656,28 @@ public void testIndexAliasesDataStreamAliases() { ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); { // Only resolve aliases with with that refer to dataStreamName1 - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, true, resolvedExpressions); assertThat(result, arrayContainingInAnyOrder("logs_foo", "logs", "logs_bar")); } { // Only resolve aliases with with that refer to dataStreamName2 - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex2.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, true, resolvedExpressions); assertThat(result, arrayContainingInAnyOrder("logs_baz", "logs_baz2")); } { // Null is returned, because skipping identity check and resolvedExpressions contains the backing index name - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex2.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, false, resolvedExpressions); assertThat(result, nullValue()); } { // Null is returned, because the wildcard expands to a list of aliases containing an unfiltered alias for dataStreamName1 - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases( state, @@ -1712,7 +1691,7 @@ public void testIndexAliasesDataStreamAliases() { } { // Null is returned, because an unfiltered alias is targeting the same data stream - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "logs_bar", "logs"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "logs_bar", "logs"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases( state, @@ -1726,7 +1705,7 @@ public void testIndexAliasesDataStreamAliases() { } { // The filtered alias is returned because although we target the data stream name, skipIdentity is true - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, dataStreamName1, "logs"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, dataStreamName1, "logs"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases( state, @@ -1740,7 +1719,7 @@ public void testIndexAliasesDataStreamAliases() { } { // Null is returned because we target the data stream name and skipIdentity is false - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, dataStreamName1, "logs"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, dataStreamName1, "logs"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases( state, @@ -1763,13 +1742,13 @@ public void testIndexAliasesSkipIdentity() { ); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-alias")); + Set resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-alias")); String[] aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, false, resolvedExpressions); assertNull(aliases); aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); assertArrayEquals(new String[] { "test-alias" }, aliases); - resolvedExpressions = Collections.singleton(new ResolvedExpression("other-alias")); + resolvedExpressions = Collections.singleton("other-alias"); aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, false, resolvedExpressions); assertArrayEquals(new String[] { "other-alias" }, aliases); aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); @@ -1790,7 +1769,7 @@ public void testConcreteWriteIndexSuccessful() { x -> true, x -> true, true, - Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-alias")) + new HashSet<>(Arrays.asList("test-0", "test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1872,7 +1851,7 @@ public void testConcreteWriteIndexWithWildcardExpansion() { x -> true, x -> true, true, - Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-1"), new ResolvedExpression("test-alias")) + new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1910,7 +1889,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithSingleIndex() { x -> true, x -> true, true, - Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-alias")) + new HashSet<>(Arrays.asList("test-0", "test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1946,7 +1925,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithMultipleIndices() { x -> true, x -> true, true, - Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-1"), new ResolvedExpression("test-alias")) + new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1987,7 +1966,7 @@ public void testAliasResolutionNotAllowingMultipleIndices() { x -> true, x -> true, true, - Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-1"), new ResolvedExpression("test-alias")) + new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 05382de49087d..96a74d2e23aad 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -44,8 +44,10 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.index.IndexSettingProviders; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; @@ -74,6 +76,7 @@ import org.junit.Before; import java.io.IOException; +import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -691,6 +694,178 @@ public void testAggregateSettingsAppliesSettingsFromTemplatesAndRequest() { assertThat(aggregatedIndexSettings.get("request_setting"), equalTo("value2")); } + public void testAggregateSettingsProviderOverrulesSettingsFromRequest() { + IndexTemplateMetadata templateMetadata = addMatchingTemplate(builder -> { + builder.settings(Settings.builder().put("template_setting", "value1")); + }); + Metadata metadata = new Metadata.Builder().templates(Map.of("template_1", templateMetadata)).build(); + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); + request.settings(Settings.builder().put("request_setting", "value2").build()); + + Settings aggregatedIndexSettings = aggregateIndexSettings( + clusterState, + request, + templateMetadata.settings(), + null, + null, + Settings.EMPTY, + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, + randomShardLimitService(), + Set.of(new IndexSettingProvider() { + @Override + public Settings getAdditionalIndexSettings( + String indexName, + String dataStreamName, + IndexMode templateIndexMode, + Metadata metadata, + Instant resolvedAt, + Settings indexTemplateAndCreateRequestSettings, + List combinedTemplateMappings + ) { + return Settings.builder().put("request_setting", "overrule_value").put("other_setting", "other_value").build(); + } + + @Override + public boolean overrulesTemplateAndRequestSettings() { + return true; + } + }) + ); + + assertThat(aggregatedIndexSettings.get("template_setting"), equalTo("value1")); + assertThat(aggregatedIndexSettings.get("request_setting"), equalTo("overrule_value")); + assertThat(aggregatedIndexSettings.get("other_setting"), equalTo("other_value")); + } + + public void testAggregateSettingsProviderOverrulesNullFromRequest() { + IndexTemplateMetadata templateMetadata = addMatchingTemplate(builder -> { + builder.settings(Settings.builder().put("template_setting", "value1")); + }); + Metadata metadata = new Metadata.Builder().templates(Map.of("template_1", templateMetadata)).build(); + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); + request.settings(Settings.builder().putNull("request_setting").build()); + + Settings aggregatedIndexSettings = aggregateIndexSettings( + clusterState, + request, + templateMetadata.settings(), + null, + null, + Settings.EMPTY, + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, + randomShardLimitService(), + Set.of(new IndexSettingProvider() { + @Override + public Settings getAdditionalIndexSettings( + String indexName, + String dataStreamName, + IndexMode templateIndexMode, + Metadata metadata, + Instant resolvedAt, + Settings indexTemplateAndCreateRequestSettings, + List combinedTemplateMappings + ) { + return Settings.builder().put("request_setting", "overrule_value").put("other_setting", "other_value").build(); + } + + @Override + public boolean overrulesTemplateAndRequestSettings() { + return true; + } + }) + ); + + assertThat(aggregatedIndexSettings.get("template_setting"), equalTo("value1")); + assertThat(aggregatedIndexSettings.get("request_setting"), equalTo("overrule_value")); + assertThat(aggregatedIndexSettings.get("other_setting"), equalTo("other_value")); + } + + public void testAggregateSettingsProviderOverrulesSettingsFromTemplates() { + IndexTemplateMetadata templateMetadata = addMatchingTemplate(builder -> { + builder.settings(Settings.builder().put("template_setting", "value1")); + }); + Metadata metadata = new Metadata.Builder().templates(Map.of("template_1", templateMetadata)).build(); + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); + request.settings(Settings.builder().put("request_setting", "value2").build()); + + Settings aggregatedIndexSettings = aggregateIndexSettings( + clusterState, + request, + templateMetadata.settings(), + null, + null, + Settings.EMPTY, + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, + randomShardLimitService(), + Set.of(new IndexSettingProvider() { + @Override + public Settings getAdditionalIndexSettings( + String indexName, + String dataStreamName, + IndexMode templateIndexMode, + Metadata metadata, + Instant resolvedAt, + Settings indexTemplateAndCreateRequestSettings, + List combinedTemplateMappings + ) { + return Settings.builder().put("template_setting", "overrule_value").put("other_setting", "other_value").build(); + } + + @Override + public boolean overrulesTemplateAndRequestSettings() { + return true; + } + }) + ); + + assertThat(aggregatedIndexSettings.get("template_setting"), equalTo("overrule_value")); + assertThat(aggregatedIndexSettings.get("request_setting"), equalTo("value2")); + assertThat(aggregatedIndexSettings.get("other_setting"), equalTo("other_value")); + } + + public void testAggregateSettingsProviderOverrulesNullFromTemplates() { + IndexTemplateMetadata templateMetadata = addMatchingTemplate(builder -> { + builder.settings(Settings.builder().putNull("template_setting")); + }); + Metadata metadata = new Metadata.Builder().templates(Map.of("template_1", templateMetadata)).build(); + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); + request.settings(Settings.builder().put("request_setting", "value2").build()); + + Settings aggregatedIndexSettings = aggregateIndexSettings( + clusterState, + request, + templateMetadata.settings(), + null, + null, + Settings.EMPTY, + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, + randomShardLimitService(), + Set.of(new IndexSettingProvider() { + @Override + public Settings getAdditionalIndexSettings( + String indexName, + String dataStreamName, + IndexMode templateIndexMode, + Metadata metadata, + Instant resolvedAt, + Settings indexTemplateAndCreateRequestSettings, + List combinedTemplateMappings + ) { + return Settings.builder().put("template_setting", "overrule_value").put("other_setting", "other_value").build(); + } + + @Override + public boolean overrulesTemplateAndRequestSettings() { + return true; + } + }) + ); + + assertThat(aggregatedIndexSettings.get("template_setting"), equalTo("overrule_value")); + assertThat(aggregatedIndexSettings.get("request_setting"), equalTo("value2")); + assertThat(aggregatedIndexSettings.get("other_setting"), equalTo("other_value")); + } + public void testInvalidAliasName() { final String[] invalidAliasNames = new String[] { "-alias1", "+alias2", "_alias3", "a#lias", "al:ias", ".", ".." }; String aliasName = randomFrom(invalidAliasNames); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index 25ed5fb2bdab2..982394ca31b1c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata.State; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; @@ -21,13 +20,13 @@ import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; -import java.util.Set; import java.util.function.Predicate; -import java.util.stream.Collectors; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; import static org.elasticsearch.common.util.set.Sets.newHashSet; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -51,52 +50,50 @@ public void testConvertWildcardsJustIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX"))), - equalTo(resolvedExpressionsSet("testXXX")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testXXX"))), + equalTo(newHashSet("testXXX")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX", "testYYY"))), - equalTo(resolvedExpressionsSet("testXXX", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "testYYY"))), + equalTo(newHashSet("testXXX", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX", "ku*"))), - equalTo(resolvedExpressionsSet("testXXX", "kuku")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "ku*"))), + equalTo(newHashSet("testXXX", "kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*"))), + equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + equalTo(newHashSet("testXXX", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*", "kuku"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "kuku")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "kuku"))), + equalTo(newHashSet("testXXX", "testXYY", "kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("*"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY", "kuku")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*"))), + equalTo(newHashSet("testXXX", "testXYY", "testYYY", "kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("*", "-kuku"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("*", "-kuku"))), + equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); assertThat( newHashSet( IndexNameExpressionResolver.WildcardExpressionResolver.resolve( context, - resolvedExpressions("testX*", "-doe", "-testXXX", "-testYYY") + Arrays.asList("testX*", "-doe", "-testXXX", "-testYYY") ) ), - equalTo(resolvedExpressionsSet("testXYY")) + equalTo(newHashSet("testXYY")) ); if (indicesOptions == IndicesOptions.lenientExpandOpen()) { assertThat( - newHashSet( - IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX", "-testXXX")) - ), - equalTo(resolvedExpressionsSet("testXXX", "-testXXX")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testXXX"))), + equalTo(newHashSet("testXXX", "-testXXX")) ); } else if (indicesOptions == IndicesOptions.strictExpandOpen()) { IndexNotFoundException infe = expectThrows( @@ -106,8 +103,8 @@ public void testConvertWildcardsJustIndicesTests() { assertEquals("-testXXX", infe.getIndex().getName()); } assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX", "-testX*"))), - equalTo(resolvedExpressionsSet("testXXX")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testX*"))), + equalTo(newHashSet("testXXX")) ); } @@ -125,24 +122,24 @@ public void testConvertWildcardsTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testYY*", "alias*"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYY*", "alias*"))), + equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("-kuku"))), - equalTo(resolvedExpressionsSet("-kuku")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("-kuku"))), + equalTo(newHashSet("-kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*", "-testYYY"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("test*", "-testYYY"))), + equalTo(newHashSet("testXXX", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*", "testYYY"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "testYYY"))), + equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testYYY", "testX*"))), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYYY", "testX*"))), + equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); } @@ -162,8 +159,8 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*"))), - equalTo(resolvedExpressionsSet("testXXX", "testXXY", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + equalTo(newHashSet("testXXX", "testXXY", "testXYY")) ); context = new IndexNameExpressionResolver.Context( state, @@ -171,8 +168,8 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*"))), - equalTo(resolvedExpressionsSet("testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + equalTo(newHashSet("testXYY")) ); context = new IndexNameExpressionResolver.Context( state, @@ -180,8 +177,8 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*"))), - equalTo(resolvedExpressionsSet("testXXX", "testXXY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + equalTo(newHashSet("testXXX", "testXXY")) ); context = new IndexNameExpressionResolver.Context( state, @@ -220,27 +217,28 @@ public void testMultipleWildcards() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*X*"))), - equalTo(resolvedExpressionsSet("testXXX", "testXXY", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*"))), + equalTo(newHashSet("testXXX", "testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*X*Y"))), - equalTo(resolvedExpressionsSet("testXXY", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*Y"))), + equalTo(newHashSet("testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("kuku*Y*"))), - equalTo(resolvedExpressionsSet("kukuYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("kuku*Y*"))), + equalTo(newHashSet("kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("*Y*"))), - equalTo(resolvedExpressionsSet("testXXY", "testXYY", "testYYY", "kukuYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*"))), + equalTo(newHashSet("testXXY", "testXYY", "testYYY", "kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*Y*X"))).size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*Y*X"))) + .size(), equalTo(0) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("*Y*X"))).size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*X"))).size(), equalTo(0) ); } @@ -259,11 +257,11 @@ public void testAll() { ); assertThat( newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) + equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); assertThat( newHashSet(IndexNameExpressionResolver.resolveExpressions(context, "_all")), - equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) + equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); IndicesOptions noExpandOptions = IndicesOptions.fromOptions( randomBoolean(), @@ -300,7 +298,7 @@ public void testAllAliases() { IndicesOptions.lenientExpandOpen(), // don't include hidden SystemIndexAccessLevel.NONE ); - assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(Set.of())); + assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet())); } { @@ -321,7 +319,7 @@ public void testAllAliases() { ); assertThat( newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), - equalTo(resolvedExpressionsSet("index-visible-alias")) + equalTo(newHashSet("index-visible-alias")) ); } } @@ -364,7 +362,7 @@ public void testAllDataStreams() { assertThat( newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), - equalTo(resolvedExpressionsSet(DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis))) + equalTo(newHashSet(DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis))) ); } @@ -387,7 +385,7 @@ public void testAllDataStreams() { NONE ); - assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(Set.of())); + assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet())); } } @@ -508,16 +506,16 @@ public void testResolveAliases() { ); { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - resolvedExpressions("foo_a*") + Collections.singletonList("foo_a*") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_index", "bar_index"))); + assertThat(indices, containsInAnyOrder("foo_index", "bar_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesLenientContext, - resolvedExpressions("foo_a*") + Collections.singletonList("foo_a*") ); assertEquals(0, indices.size()); } @@ -526,45 +524,45 @@ public void testResolveAliases() { IndexNotFoundException.class, () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesStrictContext, - resolvedExpressions("foo_a*") + Collections.singletonList("foo_a*") ) ); assertEquals("foo_a*", infe.getIndex().getName()); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - resolvedExpressions("foo*") + Collections.singletonList("foo*") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_foo", "foo_index", "bar_index"))); + assertThat(indices, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesLenientContext, - resolvedExpressions("foo*") + Collections.singletonList("foo*") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_foo", "foo_index"))); + assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesStrictContext, - resolvedExpressions("foo*") + Collections.singletonList("foo*") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_foo", "foo_index"))); + assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - resolvedExpressions("foo_alias") + Collections.singletonList("foo_alias") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_alias"))); + assertThat(indices, containsInAnyOrder("foo_alias")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesLenientContext, - resolvedExpressions("foo_alias") + Collections.singletonList("foo_alias") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_alias"))); + assertThat(indices, containsInAnyOrder("foo_alias")); } { IllegalArgumentException iae = expectThrows( @@ -583,11 +581,11 @@ public void testResolveAliases() { SystemIndexAccessLevel.NONE ); { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( noExpandNoAliasesContext, - resolvedExpressions("foo_alias") + List.of("foo_alias") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_alias"))); + assertThat(indices, containsInAnyOrder("foo_alias")); } IndicesOptions strictNoExpandNoAliasesIndicesOptions = IndicesOptions.fromOptions( false, @@ -656,18 +654,18 @@ public void testResolveDataStreams() { ); // data streams are not included but expression matches the data stream - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - resolvedExpressions("foo_*") + Collections.singletonList("foo_*") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_index", "foo_foo", "bar_index"))); + assertThat(indices, containsInAnyOrder("foo_index", "foo_foo", "bar_index")); // data streams are not included and expression doesn't match the data steram indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - resolvedExpressions("bar_*") + Collections.singletonList("bar_*") ); - assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("bar_bar", "bar_index"))); + assertThat(indices, containsInAnyOrder("bar_bar", "bar_index")); } { @@ -693,39 +691,35 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAliasesAndDataStreamsContext, - resolvedExpressions("foo_*") + Collections.singletonList("foo_*") ); assertThat( - newHashSet(indices), - equalTo( - resolvedExpressionsSet( - "foo_index", - "bar_index", - "foo_foo", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) - ) + indices, + containsInAnyOrder( + "foo_index", + "bar_index", + "foo_foo", + DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), + DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) ) ); // include all wildcard adds the data stream's backing indices indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAliasesAndDataStreamsContext, - resolvedExpressions("*") + Collections.singletonList("*") ); assertThat( - newHashSet(indices), - equalTo( - resolvedExpressionsSet( - "foo_index", - "bar_index", - "foo_foo", - "bar_bar", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) - ) + indices, + containsInAnyOrder( + "foo_index", + "bar_index", + "foo_foo", + "bar_bar", + DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), + DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) ) ); } @@ -754,39 +748,35 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAliasesDataStreamsAndHiddenIndices, - resolvedExpressions("foo_*") + Collections.singletonList("foo_*") ); assertThat( - newHashSet(indices), - equalTo( - resolvedExpressionsSet( - "foo_index", - "bar_index", - "foo_foo", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) - ) + indices, + containsInAnyOrder( + "foo_index", + "bar_index", + "foo_foo", + DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), + DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) ) ); // include all wildcard adds the data stream's backing indices indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAliasesDataStreamsAndHiddenIndices, - resolvedExpressions("*") + Collections.singletonList("*") ); assertThat( - newHashSet(indices), - equalTo( - resolvedExpressionsSet( - "foo_index", - "bar_index", - "foo_foo", - "bar_bar", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) - ) + indices, + containsInAnyOrder( + "foo_index", + "bar_index", + "foo_foo", + "bar_bar", + DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), + DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) ) ); } @@ -818,28 +808,16 @@ public void testMatchesConcreteIndicesWildcardAndAliases() { SystemIndexAccessLevel.NONE ); - Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - indicesAndAliasesContext, - List.of(new ResolvedExpression("*")) - ); - assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("bar_bar", "foo_foo", "foo_index", "bar_index"))); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of(new ResolvedExpression("*"))); - assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("bar_bar", "foo_foo", "foo_index", "bar_index"))); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - indicesAndAliasesContext, - List.of(new ResolvedExpression("foo*")) - ); - assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("foo_foo", "foo_index", "bar_index"))); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - onlyIndicesContext, - List.of(new ResolvedExpression("foo*")) - ); - assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("foo_foo", "foo_index"))); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - indicesAndAliasesContext, - List.of(new ResolvedExpression("foo_alias")) - ); - assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("foo_alias"))); + Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("*")); + assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("*")); + assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo*")); + assertThat(matches, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("foo*")); + assertThat(matches, containsInAnyOrder("foo_foo", "foo_index")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo_alias")); + assertThat(matches, containsInAnyOrder("foo_alias")); IllegalArgumentException iae = expectThrows( IllegalArgumentException.class, () -> IndexNameExpressionResolver.resolveExpressions(onlyIndicesContext, "foo_alias") @@ -862,19 +840,8 @@ private static IndexMetadata.Builder indexBuilder(String index) { private static void assertWildcardResolvesToEmpty(IndexNameExpressionResolver.Context context, String wildcardExpression) { IndexNotFoundException infe = expectThrows( IndexNotFoundException.class, - () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - context, - List.of(new ResolvedExpression(wildcardExpression)) - ) + () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, List.of(wildcardExpression)) ); assertEquals(wildcardExpression, infe.getIndex().getName()); } - - private List resolvedExpressions(String... expressions) { - return Arrays.stream(expressions).map(ResolvedExpression::new).toList(); - } - - private Set resolvedExpressionsSet(String... expressions) { - return Arrays.stream(expressions).map(ResolvedExpression::new).collect(Collectors.toSet()); - } } diff --git a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java index 8bfd4c7c5ac68..22308e15f4845 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java @@ -167,10 +167,6 @@ public void testUpgradeMarksPreviousVersion() { assertThat(nodeMetadata.previousNodeVersion(), equalTo(buildVersion)); } - public static Version tooNewVersion() { - return Version.fromId(between(Version.CURRENT.id + 1, 99999999)); - } - public static IndexVersion tooNewIndexVersion() { return IndexVersion.fromId(between(IndexVersion.current().id() + 1, 99999999)); } @@ -179,10 +175,6 @@ public static BuildVersion tooNewBuildVersion() { return BuildVersion.fromVersionId(between(Version.CURRENT.id() + 1, 99999999)); } - public static Version tooOldVersion() { - return Version.fromId(between(1, Version.CURRENT.minimumCompatibilityVersion().id - 1)); - } - public static BuildVersion tooOldBuildVersion() { return BuildVersion.fromVersionId(between(1, Version.CURRENT.minimumCompatibilityVersion().id - 1)); } diff --git a/server/src/test/java/org/elasticsearch/env/OverrideNodeVersionCommandTests.java b/server/src/test/java/org/elasticsearch/env/OverrideNodeVersionCommandTests.java index bf3fc1697aa44..c7614e2d98eed 100644 --- a/server/src/test/java/org/elasticsearch/env/OverrideNodeVersionCommandTests.java +++ b/server/src/test/java/org/elasticsearch/env/OverrideNodeVersionCommandTests.java @@ -96,7 +96,9 @@ public void testFailsOnEmptyPath() { } public void testFailsIfUnnecessary() throws IOException { - final Version nodeVersion = Version.fromId(between(Version.CURRENT.minimumCompatibilityVersion().id, Version.CURRENT.id)); + final BuildVersion nodeVersion = BuildVersion.fromVersionId( + between(Version.CURRENT.minimumCompatibilityVersion().id, Version.CURRENT.id) + ); PersistedClusterStateService.overrideVersion(nodeVersion, dataPaths); final MockTerminal mockTerminal = MockTerminal.create(); final ElasticsearchException elasticsearchException = expectThrows( @@ -107,7 +109,7 @@ public void testFailsIfUnnecessary() throws IOException { elasticsearchException.getMessage(), allOf( containsString("compatible with current version"), - containsString(Version.CURRENT.toString()), + containsString(BuildVersion.current().toString()), containsString(nodeVersion.toString()) ) ); @@ -115,7 +117,7 @@ public void testFailsIfUnnecessary() throws IOException { } public void testWarnsIfTooOld() throws Exception { - final Version nodeVersion = NodeMetadataTests.tooOldVersion(); + final BuildVersion nodeVersion = NodeMetadataTests.tooOldBuildVersion(); PersistedClusterStateService.overrideVersion(nodeVersion, dataPaths); final MockTerminal mockTerminal = MockTerminal.create(); mockTerminal.addTextInput("n"); @@ -137,11 +139,11 @@ public void testWarnsIfTooOld() throws Exception { expectThrows(IllegalStateException.class, () -> mockTerminal.readText("")); final NodeMetadata nodeMetadata = PersistedClusterStateService.nodeMetadata(dataPaths); - assertThat(nodeMetadata.nodeVersion().toVersion(), equalTo(nodeVersion)); + assertThat(nodeMetadata.nodeVersion(), equalTo(nodeVersion)); } public void testWarnsIfTooNew() throws Exception { - final Version nodeVersion = NodeMetadataTests.tooNewVersion(); + final BuildVersion nodeVersion = NodeMetadataTests.tooNewBuildVersion(); PersistedClusterStateService.overrideVersion(nodeVersion, dataPaths); final MockTerminal mockTerminal = MockTerminal.create(); mockTerminal.addTextInput(randomFrom("yy", "Yy", "n", "yes", "true", "N", "no")); @@ -162,11 +164,11 @@ public void testWarnsIfTooNew() throws Exception { expectThrows(IllegalStateException.class, () -> mockTerminal.readText("")); final NodeMetadata nodeMetadata = PersistedClusterStateService.nodeMetadata(dataPaths); - assertThat(nodeMetadata.nodeVersion().toVersion(), equalTo(nodeVersion)); + assertThat(nodeMetadata.nodeVersion(), equalTo(nodeVersion)); } public void testOverwritesIfTooOld() throws Exception { - final Version nodeVersion = NodeMetadataTests.tooOldVersion(); + final BuildVersion nodeVersion = NodeMetadataTests.tooOldBuildVersion(); PersistedClusterStateService.overrideVersion(nodeVersion, dataPaths); final MockTerminal mockTerminal = MockTerminal.create(); mockTerminal.addTextInput(randomFrom("y", "Y")); @@ -189,7 +191,7 @@ public void testOverwritesIfTooOld() throws Exception { } public void testOverwritesIfTooNew() throws Exception { - final Version nodeVersion = NodeMetadataTests.tooNewVersion(); + final BuildVersion nodeVersion = NodeMetadataTests.tooNewBuildVersion(); PersistedClusterStateService.overrideVersion(nodeVersion, dataPaths); final MockTerminal mockTerminal = MockTerminal.create(); mockTerminal.addTextInput(randomFrom("y", "Y")); diff --git a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java index 450d123f551c8..4428a7e078510 100644 --- a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java @@ -54,6 +54,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.env.BuildVersion; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; @@ -1414,14 +1415,17 @@ public void testOverrideLuceneVersion() throws IOException { assertThat(clusterState.metadata().version(), equalTo(version)); } + @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS) + BuildVersion overrideVersion = BuildVersion.fromVersionId(Version.V_8_0_0.id); + NodeMetadata prevMetadata = PersistedClusterStateService.nodeMetadata(persistedClusterStateService.getDataPaths()); assertEquals(BuildVersion.current(), prevMetadata.nodeVersion()); - PersistedClusterStateService.overrideVersion(Version.V_8_0_0, persistedClusterStateService.getDataPaths()); + PersistedClusterStateService.overrideVersion(overrideVersion, persistedClusterStateService.getDataPaths()); NodeMetadata metadata = PersistedClusterStateService.nodeMetadata(persistedClusterStateService.getDataPaths()); - assertEquals(BuildVersion.fromVersionId(Version.V_8_0_0.id()), metadata.nodeVersion()); + assertEquals(overrideVersion, metadata.nodeVersion()); for (Path p : persistedClusterStateService.getDataPaths()) { NodeMetadata individualMetadata = PersistedClusterStateService.nodeMetadata(p); - assertEquals(BuildVersion.fromVersionId(Version.V_8_0_0.id()), individualMetadata.nodeVersion()); + assertEquals(overrideVersion, individualMetadata.nodeVersion()); } } } diff --git a/server/src/test/java/org/elasticsearch/index/IndexSettingProviderTests.java b/server/src/test/java/org/elasticsearch/index/IndexSettingProviderTests.java index 628de0b047bf5..adac8bf204f3e 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexSettingProviderTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexSettingProviderTests.java @@ -23,15 +23,24 @@ public class IndexSettingProviderTests extends ESSingleNodeTestCase { public void testIndexCreation() throws Exception { - var indexService = createIndex("my-index1"); + Settings settings = Settings.builder().put("index.mapping.depth.limit", 10).build(); + var indexService = createIndex("my-index1", settings); assertFalse(indexService.getIndexSettings().getSettings().hasValue("index.refresh_interval")); + assertEquals("10", indexService.getIndexSettings().getSettings().get("index.mapping.depth.limit")); INDEX_SETTING_PROVIDER1_ENABLED.set(true); - indexService = createIndex("my-index2"); + indexService = createIndex("my-index2", settings); assertTrue(indexService.getIndexSettings().getSettings().hasValue("index.refresh_interval")); + assertEquals("10", indexService.getIndexSettings().getSettings().get("index.mapping.depth.limit")); + INDEX_SETTING_OVERRULING.set(true); + indexService = createIndex("my-index3", settings); + assertTrue(indexService.getIndexSettings().getSettings().hasValue("index.refresh_interval")); + assertEquals("100", indexService.getIndexSettings().getSettings().get("index.mapping.depth.limit")); + + INDEX_SETTING_DEPTH_ENABLED.set(false); INDEX_SETTING_PROVIDER2_ENABLED.set(true); - var e = expectThrows(IllegalArgumentException.class, () -> createIndex("my-index3")); + var e = expectThrows(IllegalArgumentException.class, () -> createIndex("my-index4", settings)); assertEquals( "additional index setting [index.refresh_interval] added by [TestIndexSettingsProvider] is already present", e.getMessage() @@ -47,7 +56,7 @@ public static class Plugin1 extends Plugin { @Override public Collection getAdditionalIndexSettingProviders(IndexSettingProvider.Parameters parameters) { - return List.of(new TestIndexSettingsProvider("index.refresh_interval", "-1", INDEX_SETTING_PROVIDER1_ENABLED)); + return List.of(new TestIndexSettingsProvider("-1", INDEX_SETTING_PROVIDER1_ENABLED)); } } @@ -56,22 +65,22 @@ public static class Plugin2 extends Plugin { @Override public Collection getAdditionalIndexSettingProviders(IndexSettingProvider.Parameters parameters) { - return List.of(new TestIndexSettingsProvider("index.refresh_interval", "100s", INDEX_SETTING_PROVIDER2_ENABLED)); + return List.of(new TestIndexSettingsProvider("100s", INDEX_SETTING_PROVIDER2_ENABLED)); } } private static final AtomicBoolean INDEX_SETTING_PROVIDER1_ENABLED = new AtomicBoolean(false); private static final AtomicBoolean INDEX_SETTING_PROVIDER2_ENABLED = new AtomicBoolean(false); + private static final AtomicBoolean INDEX_SETTING_DEPTH_ENABLED = new AtomicBoolean(true); + private static final AtomicBoolean INDEX_SETTING_OVERRULING = new AtomicBoolean(false); static class TestIndexSettingsProvider implements IndexSettingProvider { - private final String settingName; - private final String settingValue; + private final String intervalValue; private final AtomicBoolean enabled; - TestIndexSettingsProvider(String settingName, String settingValue, AtomicBoolean enabled) { - this.settingName = settingName; - this.settingValue = settingValue; + TestIndexSettingsProvider(String intervalValue, AtomicBoolean enabled) { + this.intervalValue = intervalValue; this.enabled = enabled; } @@ -86,10 +95,19 @@ public Settings getAdditionalIndexSettings( List combinedTemplateMappings ) { if (enabled.get()) { - return Settings.builder().put(settingName, settingValue).build(); + var builder = Settings.builder().put("index.refresh_interval", intervalValue); + if (INDEX_SETTING_DEPTH_ENABLED.get()) { + builder.put("index.mapping.depth.limit", 100); + } + return builder.build(); } else { return Settings.EMPTY; } } + + @Override + public boolean overrulesTemplateAndRequestSettings() { + return INDEX_SETTING_OVERRULING.get(); + } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BlockSourceReaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BlockSourceReaderTests.java index 286be8d12570d..357ada3ad656d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BlockSourceReaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BlockSourceReaderTests.java @@ -51,7 +51,7 @@ public void testEmptyArray() throws IOException { private void loadBlock(LeafReaderContext ctx, Consumer test) throws IOException { ValueFetcher valueFetcher = SourceValueFetcher.toString(Set.of("field")); BlockSourceReader.LeafIteratorLookup lookup = BlockSourceReader.lookupFromNorms("field"); - BlockLoader loader = new BlockSourceReader.BytesRefsBlockLoader(valueFetcher, lookup, null); + BlockLoader loader = new BlockSourceReader.BytesRefsBlockLoader(valueFetcher, lookup); assertThat(loader.columnAtATimeReader(ctx), nullValue()); BlockLoader.RowStrideReader reader = loader.rowStrideReader(ctx); assertThat(loader.rowStrideStoredFieldSpec(), equalTo(StoredFieldsSpec.NEEDS_SOURCE)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 86914cfe9ced7..c2375e948fda0 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -1353,6 +1353,7 @@ private void testBlockLoaderFromParent(boolean columnReader, boolean syntheticSo }; MapperService mapper = createMapperService(syntheticSource ? syntheticSourceMapping(buildFields) : mapping(buildFields)); BlockReaderSupport blockReaderSupport = getSupportedReaders(mapper, "field.sub"); - testBlockLoader(columnReader, example, blockReaderSupport); + var sourceLoader = mapper.mappingLookup().newSourceLoader(SourceFieldMetrics.NOOP); + testBlockLoader(columnReader, example, blockReaderSupport, sourceLoader); } } diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index 17975b7d18dd8..36f7355a541c1 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.IndexGraveyard; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -78,7 +77,6 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.CountDownLatch; -import java.util.stream.Collectors; import java.util.stream.Stream; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; @@ -679,27 +677,27 @@ public void testBuildAliasFilter() { ); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); { - AliasFilter result = indicesService.buildAliasFilter(state, "test-0", resolvedExpressions("test-alias-0")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-0", Set.of("test-alias-0")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-0")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "bar"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-1", resolvedExpressions("test-alias-0")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-1", Set.of("test-alias-0")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-0")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "bar"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-0", resolvedExpressions("test-alias-1")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-0", Set.of("test-alias-1")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-1")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "baz"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-1", resolvedExpressions("test-alias-1")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-1", Set.of("test-alias-1")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-1")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "bax"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-0", resolvedExpressions("test-alias-0", "test-alias-1")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-0", Set.of("test-alias-0", "test-alias-1")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-0", "test-alias-1")); BoolQueryBuilder filter = (BoolQueryBuilder) result.getQueryBuilder(); assertThat(filter.filter(), empty()); @@ -708,7 +706,7 @@ public void testBuildAliasFilter() { assertThat(filter.should(), containsInAnyOrder(QueryBuilders.termQuery("foo", "baz"), QueryBuilders.termQuery("foo", "bar"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-1", resolvedExpressions("test-alias-0", "test-alias-1")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-1", Set.of("test-alias-0", "test-alias-1")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-0", "test-alias-1")); BoolQueryBuilder filter = (BoolQueryBuilder) result.getQueryBuilder(); assertThat(filter.filter(), empty()); @@ -720,7 +718,7 @@ public void testBuildAliasFilter() { AliasFilter result = indicesService.buildAliasFilter( state, "test-0", - resolvedExpressions("test-alias-0", "test-alias-1", "test-alias-non-filtering") + Set.of("test-alias-0", "test-alias-1", "test-alias-non-filtering") ); assertThat(result.getAliases(), emptyArray()); assertThat(result.getQueryBuilder(), nullValue()); @@ -729,7 +727,7 @@ public void testBuildAliasFilter() { AliasFilter result = indicesService.buildAliasFilter( state, "test-1", - resolvedExpressions("test-alias-0", "test-alias-1", "test-alias-non-filtering") + Set.of("test-alias-0", "test-alias-1", "test-alias-non-filtering") ); assertThat(result.getAliases(), emptyArray()); assertThat(result.getQueryBuilder(), nullValue()); @@ -756,19 +754,19 @@ public void testBuildAliasFilterDataStreamAliases() { ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); { String index = backingIndex1.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo")); + AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo")); assertThat(result.getAliases(), arrayContainingInAnyOrder("logs_foo")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "bar"))); } { String index = backingIndex2.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo")); + AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo")); assertThat(result.getAliases(), arrayContainingInAnyOrder("logs_foo")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "baz"))); } { String index = backingIndex1.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo", "logs")); + AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo", "logs")); assertThat(result.getAliases(), arrayContainingInAnyOrder("logs_foo", "logs")); BoolQueryBuilder filter = (BoolQueryBuilder) result.getQueryBuilder(); assertThat(filter.filter(), empty()); @@ -778,7 +776,7 @@ public void testBuildAliasFilterDataStreamAliases() { } { String index = backingIndex2.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo", "logs")); + AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo", "logs")); assertThat(result.getAliases(), arrayContainingInAnyOrder("logs_foo", "logs")); BoolQueryBuilder filter = (BoolQueryBuilder) result.getQueryBuilder(); assertThat(filter.filter(), empty()); @@ -789,13 +787,13 @@ public void testBuildAliasFilterDataStreamAliases() { { // querying an unfiltered and a filtered alias for the same data stream should drop the filters String index = backingIndex1.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo", "logs", "logs_bar")); + AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo", "logs", "logs_bar")); assertThat(result, is(AliasFilter.EMPTY)); } { // similarly, querying the data stream name and a filtered alias should drop the filter String index = backingIndex1.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs", dataStreamName1)); + AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs", dataStreamName1)); assertThat(result, is(AliasFilter.EMPTY)); } } @@ -848,8 +846,4 @@ public void testWithTempIndexServiceHandlesExistingIndex() throws Exception { return null; }); } - - private Set resolvedExpressions(String... expressions) { - return Arrays.stream(expressions).map(ResolvedExpression::new).collect(Collectors.toSet()); - } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/KeywordFieldSyntheticSourceSupport.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/KeywordFieldSyntheticSourceSupport.java index 0d05c3d0cd77b..502ffdde62e5a 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/KeywordFieldSyntheticSourceSupport.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/KeywordFieldSyntheticSourceSupport.java @@ -37,6 +37,11 @@ public class KeywordFieldSyntheticSourceSupport implements MapperTestCase.Synthe this.docValues = useFallbackSyntheticSource == false || ESTestCase.randomBoolean(); } + @Override + public boolean ignoreAbove() { + return ignoreAbove != null; + } + @Override public boolean preservesExactSource() { // We opt in into fallback synthetic source implementation diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index 8bc2666bcfe3b..da04f30ff8023 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -179,6 +179,11 @@ public final MapperService createMapperService(XContentBuilder mappings) throws return createMapperService(getVersion(), mappings); } + public final MapperService createSytheticSourceMapperService(XContentBuilder mappings) throws IOException { + var settings = Settings.builder().put("index.mapping.source.mode", "synthetic").build(); + return createMapperService(getVersion(), settings, () -> true, mappings); + } + protected IndexVersion getVersion() { return IndexVersion.current(); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index 7669ada750c14..c89c0b2e37dd2 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -49,6 +49,7 @@ import org.elasticsearch.script.ScriptFactory; import org.elasticsearch.script.field.DocValuesScriptFieldFactory; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.fetch.StoredFieldsSpec; import org.elasticsearch.search.lookup.LeafStoredFieldsLookup; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.Source; @@ -1103,6 +1104,10 @@ default boolean preservesExactSource() { return false; } + default boolean ignoreAbove() { + return false; + } + /** * Examples that should work when source is generated from doc values. */ @@ -1321,15 +1326,12 @@ private BlockLoader getBlockLoader(boolean columnReader) { return mapper.fieldType(loaderFieldName).blockLoader(new MappedFieldType.BlockLoaderContext() { @Override public String indexName() { - return "test_index"; + return mapper.getIndexSettings().getIndex().getName(); } @Override public IndexSettings indexSettings() { - var imd = IndexMetadata.builder(indexName()) - .settings(MapperTestCase.indexSettings(IndexVersion.current(), 1, 1).put(Settings.EMPTY)) - .build(); - return new IndexSettings(imd, Settings.EMPTY); + return mapper.getIndexSettings(); } @Override @@ -1362,9 +1364,19 @@ public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() { private void testBlockLoader(boolean syntheticSource, boolean columnReader) throws IOException { // TODO if we're not using synthetic source use a different sort of example. Or something. - SyntheticSourceExample example = syntheticSourceSupport(false, columnReader).example(5); + var syntheticSourceSupport = syntheticSourceSupport(false, columnReader); + SyntheticSourceExample example = syntheticSourceSupport.example(5); + if (syntheticSource && columnReader == false) { + // The synthetic source testing support can't always handle now the difference between stored and synthetic source mode. + // In case of ignore above, the ignored values are always appended after the valid values + // (both if field has doc values or stored field). While stored source just reads original values (from _source) and there + // is no notion of values that are ignored. + // TODO: fix this by improving block loader support: https://github.com/elastic/elasticsearch/issues/115257 + assumeTrue("inconsistent synthetic source testing support with ignore above", syntheticSourceSupport.ignoreAbove() == false); + } + // TODO: only rely index.mapping.source.mode setting XContentBuilder mapping = syntheticSource ? syntheticSourceFieldMapping(example.mapping) : fieldMapping(example.mapping); - MapperService mapper = createMapperService(mapping); + MapperService mapper = syntheticSource ? createSytheticSourceMapperService(mapping) : createMapperService(mapping); BlockReaderSupport blockReaderSupport = getSupportedReaders(mapper, "field"); if (syntheticSource) { // geo_point and point do not yet support synthetic source @@ -1373,11 +1385,16 @@ private void testBlockLoader(boolean syntheticSource, boolean columnReader) thro blockReaderSupport.syntheticSource ); } - testBlockLoader(columnReader, example, blockReaderSupport); + var sourceLoader = mapper.mappingLookup().newSourceLoader(SourceFieldMetrics.NOOP); + testBlockLoader(columnReader, example, blockReaderSupport, sourceLoader); } - protected final void testBlockLoader(boolean columnReader, SyntheticSourceExample example, BlockReaderSupport blockReaderSupport) - throws IOException { + protected final void testBlockLoader( + boolean columnReader, + SyntheticSourceExample example, + BlockReaderSupport blockReaderSupport, + SourceLoader sourceLoader + ) throws IOException { BlockLoader loader = blockReaderSupport.getBlockLoader(columnReader); Function valuesConvert = loadBlockExpected(blockReaderSupport, columnReader); if (valuesConvert == null) { @@ -1404,9 +1421,15 @@ protected final void testBlockLoader(boolean columnReader, SyntheticSourceExampl return; } } else { + StoredFieldsSpec storedFieldsSpec = loader.rowStrideStoredFieldSpec(); + if (storedFieldsSpec.requiresSource()) { + storedFieldsSpec = storedFieldsSpec.merge( + new StoredFieldsSpec(true, storedFieldsSpec.requiresMetadata(), sourceLoader.requiredStoredFields()) + ); + } BlockLoaderStoredFieldsFromLeafLoader storedFieldsLoader = new BlockLoaderStoredFieldsFromLeafLoader( - StoredFieldLoader.fromSpec(loader.rowStrideStoredFieldSpec()).getLoader(ctx, null), - loader.rowStrideStoredFieldSpec().requiresSource() ? SourceLoader.FROM_STORED_SOURCE.leaf(ctx.reader(), null) : null + StoredFieldLoader.fromSpec(storedFieldsSpec).getLoader(ctx, null), + storedFieldsSpec.requiresSource() ? sourceLoader.leaf(ctx.reader(), null) : null ); storedFieldsLoader.advanceTo(0); BlockLoader.Builder builder = loader.builder(TestBlock.factory(ctx.reader().numDocs()), 1); diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/TextFieldFamilySyntheticSourceTestSetup.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/TextFieldFamilySyntheticSourceTestSetup.java index b6a031c9ff906..97ded7f9a06f2 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/TextFieldFamilySyntheticSourceTestSetup.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/TextFieldFamilySyntheticSourceTestSetup.java @@ -51,24 +51,9 @@ public static MapperTestCase.BlockReaderSupport getSupportedReaders(MapperServic } public static Function loadBlockExpected(MapperTestCase.BlockReaderSupport blockReaderSupport, boolean columnReader) { - if (nullLoaderExpected(blockReaderSupport.mapper(), blockReaderSupport.loaderFieldName())) { - return null; - } return v -> ((BytesRef) v).utf8ToString(); } - private static boolean nullLoaderExpected(MapperService mapper, String fieldName) { - MappedFieldType type = mapper.fieldType(fieldName); - if (type instanceof TextFieldMapper.TextFieldType t) { - if (t.isSyntheticSource() == false || t.canUseSyntheticSourceDelegateForQuerying() || t.isStored()) { - return false; - } - String parentField = mapper.mappingLookup().parentField(fieldName); - return parentField == null || nullLoaderExpected(mapper, parentField); - } - return false; - } - public static void validateRoundTripReader(String syntheticSource, DirectoryReader reader, DirectoryReader roundTripReader) { // `reader` here is reader of original document and `roundTripReader` reads document // created from synthetic source. @@ -98,6 +83,11 @@ private static class TextFieldFamilySyntheticSourceSupport implements MapperTest ); } + @Override + public boolean ignoreAbove() { + return keywordMultiFieldSyntheticSourceSupport.ignoreAbove(); + } + @Override public MapperTestCase.SyntheticSourceExample example(int maxValues) { if (store) { diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 8bc81fef2157d..7a72a7bd0daf0 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -34,7 +34,6 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.plugins.Plugin; @@ -366,15 +365,9 @@ protected static Settings.Builder indexSettingsNoReplicas(int shards) { /** * Randomly write an empty snapshot of an older version to an empty repository to simulate an older repository metadata format. */ - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) - // This used to pick an index version from 7.0.0 to 8.9.0. The minimum now is 8.0.0 but it's not clear what the upper range should be protected void maybeInitWithOldSnapshotVersion(String repoName, Path repoPath) throws Exception { if (randomBoolean() && randomBoolean()) { - initWithSnapshotVersion( - repoName, - repoPath, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersions.V_8_9_0) - ); + initWithSnapshotVersion(repoName, repoPath, IndexVersionUtils.randomVersion()); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index fd376fcd07688..18c591166e720 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -39,7 +39,6 @@ import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; @@ -50,7 +49,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.tasks.MockTaskManager; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.BytesTransportRequest; import org.elasticsearch.transport.ClusterConnectionManager; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectionProfile; @@ -586,13 +584,8 @@ public void sendRequest( // poor mans request cloning... BytesStreamOutput bStream = new BytesStreamOutput(); request.writeTo(bStream); - final TransportRequest clonedRequest; - if (request instanceof BytesTransportRequest) { - clonedRequest = copyRawBytesForBwC(bStream); - } else { - RequestHandlerRegistry reg = MockTransportService.this.getRequestHandler(action); - clonedRequest = reg.newRequest(bStream.bytes().streamInput()); - } + RequestHandlerRegistry reg = MockTransportService.this.getRequestHandler(action); + final TransportRequest clonedRequest = reg.newRequest(bStream.bytes().streamInput()); assert clonedRequest.getClass().equals(MasterNodeRequestHelper.unwrapTermOverride(request).getClass()) : clonedRequest + " vs " + request; @@ -640,15 +633,6 @@ protected void doRun() throws IOException { } } - // Some request handlers read back a BytesTransportRequest - // into a different class that cannot be re-serialized (i.e. JOIN_VALIDATE_ACTION_NAME), - // in those cases we just copy the raw bytes back to a BytesTransportRequest. - // This is only needed for the BwC for JOIN_VALIDATE_ACTION_NAME and can be removed in the next major - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) - private static TransportRequest copyRawBytesForBwC(BytesStreamOutput bStream) throws IOException { - return new BytesTransportRequest(bStream.bytes().streamInput()); - } - @Override public void clearCallback() { synchronized (this) { diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java index f517c03468bc2..753ce8283afca 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java @@ -156,8 +156,12 @@ public void testNestedWithinAutoDateHistogram() throws IOException { AggTestConfig aggTestConfig = new AggTestConfig(tsBuilder, timeStampField(), counterField("counter_field")) .withSplitLeavesIntoSeperateAggregators(false); expectThrows(IllegalArgumentException.class, () -> testCase(iw -> { - iw.addDocuments(docs(2000, "1", 15, 37, 60, /*reset*/ 14)); - iw.addDocuments(docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)); + for (Document document : docs(2000, "1", 15, 37, 60, /*reset*/ 14)) { + iw.addDocument(document); + } + for (Document document : docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)) { + iw.addDocument(document); + } }, verifier, aggTestConfig)); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java index 3674103eda215..f8cb371687d72 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java @@ -25,6 +25,7 @@ import java.io.IOException; import java.util.Collections; import java.util.EnumSet; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -92,10 +93,13 @@ public OperationMode getOperationMode() { } public Map getPolicies() { - return policyMetadatas.values() - .stream() - .map(LifecyclePolicyMetadata::getPolicy) - .collect(Collectors.toMap(LifecyclePolicy::getName, Function.identity())); + // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph + Map policies = new HashMap<>(policyMetadatas.size()); + for (LifecyclePolicyMetadata policyMetadata : policyMetadatas.values()) { + LifecyclePolicy policy = policyMetadata.getPolicy(); + policies.put(policy.getName(), policy); + } + return Collections.unmodifiableMap(policies); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java index ca789fee7b744..b298d486c9e03 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java @@ -71,7 +71,7 @@ public class StartTrainedModelDeploymentAction extends ActionType implements ToXCon /** * If the queue is created then we can OOM when we create the queue. */ - private static final int MAX_QUEUE_CAPACITY = 1_000_000; + private static final int MAX_QUEUE_CAPACITY = 100_000; public static final ParseField MODEL_ID = new ParseField("model_id"); public static final ParseField DEPLOYMENT_ID = new ParseField("deployment_id"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/ExpressionRoleMapping.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/ExpressionRoleMapping.java index 17088cff8718b..c504ebe56ed45 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/ExpressionRoleMapping.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/ExpressionRoleMapping.java @@ -54,6 +54,18 @@ */ public class ExpressionRoleMapping implements ToXContentObject, Writeable { + /** + * Reserved suffix for read-only operator-defined role mappings. + * This suffix is added to the name of all cluster-state role mappings returned via + * the {@code TransportGetRoleMappingsAction} action. + */ + public static final String READ_ONLY_ROLE_MAPPING_SUFFIX = "-read-only-operator-mapping"; + /** + * Reserved metadata field to mark role mappings as read-only. + * This field is added to the metadata of all cluster-state role mappings returned via + * the {@code TransportGetRoleMappingsAction} action. + */ + public static final String READ_ONLY_ROLE_MAPPING_METADATA_FLAG = "_read_only"; private static final ObjectParser PARSER = new ObjectParser<>("role-mapping", Builder::new); /** @@ -136,6 +148,28 @@ public ExpressionRoleMapping(StreamInput in) throws IOException { this.metadata = in.readGenericMap(); } + public static boolean hasReadOnlySuffix(String name) { + return name.endsWith(READ_ONLY_ROLE_MAPPING_SUFFIX); + } + + public static void validateNoReadOnlySuffix(String name) { + if (hasReadOnlySuffix(name)) { + throw new IllegalArgumentException( + "Invalid mapping name [" + name + "]. [" + READ_ONLY_ROLE_MAPPING_SUFFIX + "] is not an allowed suffix" + ); + } + } + + public static String addReadOnlySuffix(String name) { + return name + READ_ONLY_ROLE_MAPPING_SUFFIX; + } + + public static String removeReadOnlySuffixIfPresent(String name) { + return name.endsWith(READ_ONLY_ROLE_MAPPING_SUFFIX) + ? name.substring(0, name.length() - READ_ONLY_ROLE_MAPPING_SUFFIX.length()) + : name; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(name); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java index b38b33e082382..74c6223b1ebdd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.core.security.authz; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.AbstractNamedDiffable; @@ -26,8 +28,10 @@ import java.io.IOException; import java.util.Collection; import java.util.EnumSet; +import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashSet; +import java.util.Map; import java.util.Objects; import java.util.Set; @@ -36,7 +40,11 @@ public final class RoleMappingMetadata extends AbstractNamedDiffable implements Metadata.Custom { + private static final Logger logger = LogManager.getLogger(RoleMappingMetadata.class); + public static final String TYPE = "role_mappings"; + public static final String METADATA_NAME_FIELD = "_es_reserved_role_mapping_name"; + public static final String FALLBACK_NAME = "name_not_available_after_deserialization"; @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( @@ -46,12 +54,7 @@ public final class RoleMappingMetadata extends AbstractNamedDiffable ExpressionRoleMapping.parse("name_not_available_after_deserialization", p), - new ParseField(TYPE) - ); + PARSER.declareObjectArray(constructorArg(), (p, c) -> parseWithNameFromMetadata(p), new ParseField(TYPE)); } private static final RoleMappingMetadata EMPTY = new RoleMappingMetadata(Set.of()); @@ -153,4 +156,64 @@ public EnumSet context() { // are not persisted. return ALL_CONTEXTS; } + + /** + * Ensures role mapping names are preserved when stored on disk using XContent format, + * which omits names. This method copies the role mapping's name into a reserved metadata field + * during serialization, allowing recovery during deserialization (e.g., after a master-node restart). + * {@link #parseWithNameFromMetadata(XContentParser)} restores the name during parsing. + */ + public static ExpressionRoleMapping copyWithNameInMetadata(ExpressionRoleMapping roleMapping) { + Map metadata = new HashMap<>(roleMapping.getMetadata()); + // note: can't use Maps.copyWith... since these create maps that don't support `null` values in map entries + if (metadata.put(METADATA_NAME_FIELD, roleMapping.getName()) != null) { + logger.error( + "Metadata field [{}] is reserved and will be overwritten with an internal system value. " + + "Rename this field in your role mapping configuration.", + METADATA_NAME_FIELD + ); + } + return new ExpressionRoleMapping( + roleMapping.getName(), + roleMapping.getExpression(), + roleMapping.getRoles(), + roleMapping.getRoleTemplates(), + metadata, + roleMapping.isEnabled() + ); + } + + /** + * If a role mapping does not yet have a name persisted in metadata, it will use a constant fallback name. This method checks if a + * role mapping has the fallback name. + */ + public static boolean hasFallbackName(ExpressionRoleMapping expressionRoleMapping) { + return expressionRoleMapping.getName().equals(FALLBACK_NAME); + } + + /** + * Parse a role mapping from XContent, restoring the name from a reserved metadata field. + * Used to parse a role mapping annotated with its name in metadata via @see {@link #copyWithNameInMetadata(ExpressionRoleMapping)}. + */ + public static ExpressionRoleMapping parseWithNameFromMetadata(XContentParser parser) throws IOException { + ExpressionRoleMapping roleMapping = ExpressionRoleMapping.parse(FALLBACK_NAME, parser); + return new ExpressionRoleMapping( + getNameFromMetadata(roleMapping), + roleMapping.getExpression(), + roleMapping.getRoles(), + roleMapping.getRoleTemplates(), + roleMapping.getMetadata(), + roleMapping.isEnabled() + ); + } + + private static String getNameFromMetadata(ExpressionRoleMapping roleMapping) { + Map metadata = roleMapping.getMetadata(); + if (metadata.containsKey(METADATA_NAME_FIELD) && metadata.get(METADATA_NAME_FIELD) instanceof String name) { + return name; + } else { + // This is valid the first time we recover from cluster-state: the old format metadata won't have a name stored in metadata yet + return FALLBACK_NAME; + } + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java index 5ba5c1fd1218a..23c93226d5494 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java @@ -53,7 +53,7 @@ public final class ApplicationPermission { return new PermissionEntry( appPriv, Sets.union(existing.resourceNames, resourceNames), - Automatons.unionAndMinimize(Arrays.asList(existing.resourceAutomaton, patterns)) + Automatons.unionAndDeterminize(Arrays.asList(existing.resourceAutomaton, patterns)) ); } })); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java index 4e608281a7858..5f3da8f73a708 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java @@ -137,7 +137,7 @@ public ClusterPermission build() { } List checks = this.permissionChecks; if (false == actionAutomatons.isEmpty()) { - final Automaton mergedAutomaton = Automatons.unionAndMinimize(this.actionAutomatons); + final Automaton mergedAutomaton = Automatons.unionAndDeterminize(this.actionAutomatons); checks = new ArrayList<>(this.permissionChecks.size() + 1); checks.add(new AutomatonPermissionCheck(mergedAutomaton)); checks.addAll(this.permissionChecks); @@ -156,7 +156,7 @@ private static Automaton createAutomaton(Set allowedActionPatterns, Set< } else { final Automaton allowedAutomaton = Automatons.patterns(allowedActionPatterns); final Automaton excludedAutomaton = Automatons.patterns(excludeActionPatterns); - return Automatons.minusAndMinimize(allowedAutomaton, excludedAutomaton); + return Automatons.minusAndDeterminize(allowedAutomaton, excludedAutomaton); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java index 235d7419d2bf0..ed7bbf9158278 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java @@ -147,7 +147,7 @@ public static Automaton initializePermittedFieldsAutomaton(FieldPermissionsDefin List automatonList = groups.stream() .map(g -> FieldPermissions.buildPermittedFieldsAutomaton(g.getGrantedFields(), g.getExcludedFields())) .collect(Collectors.toList()); - return Automatons.unionAndMinimize(automatonList); + return Automatons.unionAndDeterminize(automatonList); } /** @@ -189,7 +189,7 @@ public static Automaton buildPermittedFieldsAutomaton(final String[] grantedFiel ); } - grantedFieldsAutomaton = Automatons.minusAndMinimize(grantedFieldsAutomaton, deniedFieldsAutomaton); + grantedFieldsAutomaton = Automatons.minusAndDeterminize(grantedFieldsAutomaton, deniedFieldsAutomaton); return grantedFieldsAutomaton; } @@ -206,7 +206,10 @@ public static Automaton buildPermittedFieldsAutomaton(final String[] grantedFiel public FieldPermissions limitFieldPermissions(FieldPermissions limitedBy) { if (hasFieldLevelSecurity() && limitedBy != null && limitedBy.hasFieldLevelSecurity()) { // TODO: cache the automaton computation with FieldPermissionsCache - Automaton _permittedFieldsAutomaton = Automatons.intersectAndMinimize(getIncludeAutomaton(), limitedBy.getIncludeAutomaton()); + Automaton _permittedFieldsAutomaton = Automatons.intersectAndDeterminize( + getIncludeAutomaton(), + limitedBy.getIncludeAutomaton() + ); return new FieldPermissions( CollectionUtils.concatLists(fieldPermissionsDefinitions, limitedBy.fieldPermissionsDefinitions), _permittedFieldsAutomaton diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissionsCache.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissionsCache.java index 46261937a0228..a1e14bfde8aa5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissionsCache.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissionsCache.java @@ -107,7 +107,7 @@ FieldPermissions union(Collection fieldPermissionsCollection) List automatonList = fieldPermissionsCollection.stream() .map(FieldPermissions::getIncludeAutomaton) .collect(Collectors.toList()); - return new FieldPermissions(key, Automatons.unionAndMinimize(automatonList)); + return new FieldPermissions(key, Automatons.unionAndDeterminize(automatonList)); }); } catch (ExecutionException e) { throw new ElasticsearchException("unable to compute field permissions", e); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java index 558f8e6f22ac1..cdd5a6f6ff72d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java @@ -283,14 +283,14 @@ public boolean checkResourcePrivileges( for (String forIndexPattern : checkForIndexPatterns) { Automaton checkIndexAutomaton = Automatons.patterns(forIndexPattern); if (false == allowRestrictedIndices && false == isConcreteRestrictedIndex(forIndexPattern)) { - checkIndexAutomaton = Automatons.minusAndMinimize(checkIndexAutomaton, restrictedIndices.getAutomaton()); + checkIndexAutomaton = Automatons.minusAndDeterminize(checkIndexAutomaton, restrictedIndices.getAutomaton()); } if (false == Operations.isEmpty(checkIndexAutomaton)) { Automaton allowedIndexPrivilegesAutomaton = null; for (var indexAndPrivilegeAutomaton : indexGroupAutomatons.entrySet()) { if (Automatons.subsetOf(checkIndexAutomaton, indexAndPrivilegeAutomaton.getValue())) { if (allowedIndexPrivilegesAutomaton != null) { - allowedIndexPrivilegesAutomaton = Automatons.unionAndMinimize( + allowedIndexPrivilegesAutomaton = Automatons.unionAndDeterminize( Arrays.asList(allowedIndexPrivilegesAutomaton, indexAndPrivilegeAutomaton.getKey()) ); } else { @@ -342,7 +342,7 @@ public Automaton allowedActionsMatcher(String index) { automatonList.add(group.privilege.getAutomaton()); } } - return automatonList.isEmpty() ? Automatons.EMPTY : Automatons.unionAndMinimize(automatonList); + return automatonList.isEmpty() ? Automatons.EMPTY : Automatons.unionAndDeterminize(automatonList); } /** @@ -704,7 +704,7 @@ private Map indexGroupAutomatons(boolean combine) { Automaton indexAutomaton = group.getIndexMatcherAutomaton(); allAutomatons.compute( group.privilege().getAutomaton(), - (key, value) -> value == null ? indexAutomaton : Automatons.unionAndMinimize(List.of(value, indexAutomaton)) + (key, value) -> value == null ? indexAutomaton : Automatons.unionAndDeterminize(List.of(value, indexAutomaton)) ); if (combine) { List> combinedAutomatons = new ArrayList<>(); @@ -714,7 +714,7 @@ private Map indexGroupAutomatons(boolean combine) { group.privilege().getAutomaton() ); if (Operations.isEmpty(intersectingPrivileges) == false) { - Automaton indexPatternAutomaton = Automatons.unionAndMinimize( + Automaton indexPatternAutomaton = Automatons.unionAndDeterminize( List.of(indexAndPrivilegeAutomatons.getValue(), indexAutomaton) ); combinedAutomatons.add(new Tuple<>(intersectingPrivileges, indexPatternAutomaton)); @@ -723,7 +723,7 @@ private Map indexGroupAutomatons(boolean combine) { combinedAutomatons.forEach( automatons -> allAutomatons.compute( automatons.v1(), - (key, value) -> value == null ? automatons.v2() : Automatons.unionAndMinimize(List.of(value, automatons.v2())) + (key, value) -> value == null ? automatons.v2() : Automatons.unionAndDeterminize(List.of(value, automatons.v2())) ) ); } @@ -768,7 +768,7 @@ public Group( this.indexNameMatcher = StringMatcher.of(indices).and(name -> restrictedIndices.isRestricted(name) == false); this.indexNameAutomaton = () -> indexNameAutomatonMemo.computeIfAbsent( indices, - k -> Automatons.minusAndMinimize(Automatons.patterns(indices), restrictedIndices.getAutomaton()) + k -> Automatons.minusAndDeterminize(Automatons.patterns(indices), restrictedIndices.getAutomaton()) ); } this.fieldPermissions = Objects.requireNonNull(fieldPermissions); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java index ea32ba13ae576..e4d283aba75a3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java @@ -212,7 +212,7 @@ public IsResourceAuthorizedPredicate allowedIndicesMatcher(String action) { public Automaton allowedActionsMatcher(String index) { final Automaton allowedMatcher = baseRole.allowedActionsMatcher(index); final Automaton limitedByMatcher = limitedByRole.allowedActionsMatcher(index); - return Automatons.intersectAndMinimize(allowedMatcher, limitedByMatcher); + return Automatons.intersectAndDeterminize(allowedMatcher, limitedByMatcher); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java index 7174b2f616c2a..f4df99dcefea4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java @@ -57,7 +57,7 @@ import static java.util.Map.entry; import static org.elasticsearch.xpack.core.security.support.Automatons.patterns; -import static org.elasticsearch.xpack.core.security.support.Automatons.unionAndMinimize; +import static org.elasticsearch.xpack.core.security.support.Automatons.unionAndDeterminize; /** * The name of an index related action always being with `indices:` followed by a sequence of slash-separated terms @@ -110,7 +110,7 @@ public final class IndexPrivilege extends Privilege { private static final Automaton DELETE_AUTOMATON = patterns("indices:data/write/delete*", "indices:data/write/bulk*"); private static final Automaton WRITE_AUTOMATON = patterns("indices:data/write/*", TransportAutoPutMappingAction.TYPE.name()); private static final Automaton MONITOR_AUTOMATON = patterns("indices:monitor/*"); - private static final Automaton MANAGE_AUTOMATON = unionAndMinimize( + private static final Automaton MANAGE_AUTOMATON = unionAndDeterminize( Arrays.asList( MONITOR_AUTOMATON, patterns("indices:admin/*", TransportFieldCapabilitiesAction.NAME + "*", GetRollupIndexCapsAction.NAME + "*") @@ -303,7 +303,7 @@ private static IndexPrivilege resolve(Set name) { if (actions.isEmpty() == false) { automata.add(patterns(actions)); } - return new IndexPrivilege(name, unionAndMinimize(automata)); + return new IndexPrivilege(name, unionAndDeterminize(automata)); } static Map values() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java index 201cb4b69e472..d3790ea64ba4b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java @@ -112,7 +112,7 @@ public static Automaton patterns(Collection patterns) { private static Automaton buildAutomaton(Collection patterns) { if (patterns.size() == 1) { - return minimize(pattern(patterns.iterator().next())); + return determinize(pattern(patterns.iterator().next())); } final Function, Automaton> build = strings -> { @@ -121,7 +121,7 @@ private static Automaton buildAutomaton(Collection patterns) { final Automaton patternAutomaton = pattern(pattern); automata.add(patternAutomaton); } - return unionAndMinimize(automata); + return unionAndDeterminize(automata); }; // We originally just compiled each automaton separately and then unioned them all. @@ -188,7 +188,7 @@ private static Automaton buildAutomaton(Collection patterns) { if (misc.isEmpty() == false) { automata.add(build.apply(misc)); } - return unionAndMinimize(automata); + return unionAndDeterminize(automata); } /** @@ -277,22 +277,22 @@ static Automaton wildcard(String text) { return Operations.determinize(concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } - public static Automaton unionAndMinimize(Collection automata) { + public static Automaton unionAndDeterminize(Collection automata) { Automaton res = automata.size() == 1 ? automata.iterator().next() : union(automata); - return minimize(res); + return determinize(res); } - public static Automaton minusAndMinimize(Automaton a1, Automaton a2) { + public static Automaton minusAndDeterminize(Automaton a1, Automaton a2) { Automaton res = minus(a1, a2, maxDeterminizedStates); - return minimize(res); + return determinize(res); } - public static Automaton intersectAndMinimize(Automaton a1, Automaton a2) { + public static Automaton intersectAndDeterminize(Automaton a1, Automaton a2) { Automaton res = intersection(a1, a2); - return minimize(res); + return determinize(res); } - private static Automaton minimize(Automaton automaton) { + private static Automaton determinize(Automaton automaton) { return Operations.determinize(automaton, maxDeterminizedStates); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java index 42fc7c196bbcf..7b0bd8a8108e9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java @@ -16,13 +16,13 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.UpdateForV9; import java.io.IOException; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; public class UpdateWatcherSettingsAction extends ActionType { @@ -34,6 +34,16 @@ public class UpdateWatcherSettingsAction extends ActionType ALLOWED_SETTINGS_PREFIXES = Set.of( + IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_PREFIX, + IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_PREFIX, + IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + ); + + public static final Set EXPLICITLY_DENIED_SETTINGS = Set.of( + IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_PREFIX + "._tier_preference" + ); + public UpdateWatcherSettingsAction() { super(NAME); } @@ -79,13 +89,25 @@ public Map settings() { @Override public ActionRequestValidationException validate() { - Set forbiddenSettings = Sets.difference(settings.keySet(), ALLOWED_SETTING_KEYS); - if (forbiddenSettings.size() > 0) { + Set forbiddenSettings = settings.keySet() + .stream() + .filter( + setting -> (ALLOWED_SETTING_KEYS.contains(setting) == false + && ALLOWED_SETTINGS_PREFIXES.stream().noneMatch(prefix -> setting.startsWith(prefix + "."))) + || EXPLICITLY_DENIED_SETTINGS.contains(setting) + ) + .collect(Collectors.toSet()); + + if (forbiddenSettings.isEmpty() == false) { return ValidateActions.addValidationError( "illegal settings: " + forbiddenSettings + ", these settings may not be configured. Only the following settings may be configured: " - + ALLOWED_SETTING_KEYS, + + ALLOWED_SETTING_KEYS + + ", " + + ALLOWED_SETTINGS_PREFIXES.stream().map(s -> s + ".*").collect(Collectors.toSet()) + + " excluding the following explicitly denied settings: " + + EXPLICITLY_DENIED_SETTINGS, null ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentRequestTests.java index 730d994fc5e35..46fc8a36c2c2b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentRequestTests.java @@ -67,7 +67,7 @@ public static Request createRandom() { request.setNumberOfAllocations(randomIntBetween(1, 8)); } if (randomBoolean()) { - request.setQueueCapacity(randomIntBetween(1, 1000000)); + request.setQueueCapacity(randomIntBetween(1, 100_000)); } if (randomBoolean()) { request.setPriority(randomFrom(Priority.values()).toString()); @@ -168,7 +168,7 @@ public void testValidate_GivenQueueCapacityIsNegative() { public void testValidate_GivenQueueCapacityIsAtLimit() { Request request = createRandom(); - request.setQueueCapacity(1_000_000); + request.setQueueCapacity(100_000); ActionRequestValidationException e = request.validate(); @@ -177,12 +177,12 @@ public void testValidate_GivenQueueCapacityIsAtLimit() { public void testValidate_GivenQueueCapacityIsOverLimit() { Request request = createRandom(); - request.setQueueCapacity(1_000_001); + request.setQueueCapacity(100_001); ActionRequestValidationException e = request.validate(); assertThat(e, is(not(nullValue()))); - assertThat(e.getMessage(), containsString("[queue_capacity] must be less than 1000000")); + assertThat(e.getMessage(), containsString("[queue_capacity] must be less than 100000")); } public void testValidate_GivenTimeoutIsNegative() { @@ -234,6 +234,6 @@ public void testDefaults() { assertThat(request.getNumberOfAllocations(), nullValue()); assertThat(request.computeNumberOfAllocations(), equalTo(1)); assertThat(request.getThreadsPerAllocation(), equalTo(1)); - assertThat(request.getQueueCapacity(), equalTo(1024)); + assertThat(request.getQueueCapacity(), equalTo(10_000)); } } diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle index 61aa2927e46de..6b1c7e42c0fde 100644 --- a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle +++ b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle @@ -5,7 +5,7 @@ * 2.0. */ -import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -26,7 +26,7 @@ restResources { } def supportedVersion = bwcVersion -> { - return bwcVersion.onOrAfter("8.10.0"); + return bwcVersion.onOrAfter("8.10.0") && bwcVersion != VersionProperties.elasticsearchVersion } BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java index ee747d98c26f8..74affb10eaf20 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java @@ -241,6 +241,12 @@ private void loadFromSingleLeaf(Block[] blocks, int shard, int segment, BlockLoa } } + SourceLoader sourceLoader = null; + if (storedFieldsSpec.requiresSource()) { + sourceLoader = shardContexts.get(shard).newSourceLoader.get(); + storedFieldsSpec = storedFieldsSpec.merge(new StoredFieldsSpec(true, false, sourceLoader.requiredStoredFields())); + } + if (rowStrideReaders.isEmpty()) { return; } @@ -259,7 +265,7 @@ private void loadFromSingleLeaf(Block[] blocks, int shard, int segment, BlockLoa } BlockLoaderStoredFieldsFromLeafLoader storedFields = new BlockLoaderStoredFieldsFromLeafLoader( storedFieldLoader.getLoader(ctx, null), - storedFieldsSpec.requiresSource() ? shardContexts.get(shard).newSourceLoader.get().leaf(ctx.reader(), null) : null + sourceLoader != null ? sourceLoader.leaf(ctx.reader(), null) : null ); for (int p = 0; p < docs.count(); p++) { int doc = docs.get(p); @@ -381,13 +387,18 @@ private void fieldsMoved(LeafReaderContext ctx, int shard) throws IOException { FieldWork field = fields[f]; rowStride[f] = field.rowStride(ctx); storedFieldsSpec = storedFieldsSpec.merge(field.loader.rowStrideStoredFieldSpec()); - storedFields = new BlockLoaderStoredFieldsFromLeafLoader( - StoredFieldLoader.fromSpec(storedFieldsSpec).getLoader(ctx, null), - storedFieldsSpec.requiresSource() ? shardContexts.get(shard).newSourceLoader.get().leaf(ctx.reader(), null) : null - ); - if (false == storedFieldsSpec.equals(StoredFieldsSpec.NO_REQUIREMENTS)) { - trackStoredFields(storedFieldsSpec, false); - } + } + SourceLoader sourceLoader = null; + if (storedFieldsSpec.requiresSource()) { + sourceLoader = shardContexts.get(shard).newSourceLoader.get(); + storedFieldsSpec = storedFieldsSpec.merge(new StoredFieldsSpec(true, false, sourceLoader.requiredStoredFields())); + } + storedFields = new BlockLoaderStoredFieldsFromLeafLoader( + StoredFieldLoader.fromSpec(storedFieldsSpec).getLoader(ctx, null), + sourceLoader != null ? sourceLoader.leaf(ctx.reader(), null) : null + ); + if (false == storedFieldsSpec.equals(StoredFieldsSpec.NO_REQUIREMENTS)) { + trackStoredFields(storedFieldsSpec, false); } } diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle index e4223f03c3a03..fb47255e8d52e 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle @@ -1,5 +1,6 @@ import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.util.GradleUtils import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -27,7 +28,7 @@ GradleUtils.extendSourceSet(project, "javaRestTest", "yamlRestTest") // ESQL is available in 8.11 or later def supportedVersion = bwcVersion -> { - return bwcVersion.onOrAfter(Version.fromString("8.11.0")); + return bwcVersion.onOrAfter(Version.fromString("8.11.0")) && bwcVersion != VersionProperties.elasticsearchVersion } BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv index 83a2f3cb1c281..029c3baf3cbfb 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv @@ -7,3 +7,4 @@ millis:date,nanos:date_nanos,num:long 2023-10-23T12:27:28.948Z,2023-10-23T12:27:28.948000000Z,1698064048948000000 2023-10-23T12:15:03.360Z,2023-10-23T12:15:03.360103847Z,1698063303360103847 1999-10-23T12:15:03.360Z,[2023-03-23T12:15:03.360103847Z, 2023-02-23T13:33:34.937193000Z, 2023-01-23T13:55:01.543123456Z], 0 +1999-10-22T12:15:03.360Z,[2023-03-23T12:15:03.360103847Z, 2023-03-23T12:15:03.360103847Z, 2023-03-23T12:15:03.360103847Z], 0 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec index ad7149b0f742f..515e2c9c6587f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec @@ -38,9 +38,10 @@ nanos:date_nanos mv_min on date nanos required_capability: date_nanos_type -FROM date_nanos | SORT millis ASC | EVAL nanos = MV_MIN(nanos) | KEEP nanos | LIMIT 1; +FROM date_nanos | SORT millis ASC | WHERE millis < "2000-01-01" | EVAL nanos = MV_MIN(nanos) | KEEP nanos; nanos:date_nanos +2023-03-23T12:15:03.360103847Z 2023-01-23T13:55:01.543123456Z ; @@ -56,9 +57,10 @@ ct:integer mv_first on date nanos required_capability: date_nanos_type -FROM date_nanos | SORT millis ASC | EVAL nanos = MV_FIRST(nanos) | KEEP nanos | LIMIT 1; +FROM date_nanos | SORT millis ASC | WHERE millis < "2000-01-01" | EVAL nanos = MV_FIRST(nanos) | KEEP nanos; nanos:date_nanos +2023-03-23T12:15:03.360103847Z 2023-01-23T13:55:01.543123456Z ; @@ -263,3 +265,69 @@ ROW a = TO_DATE_NANOS(null), b = TO_DATE_NANOS(null + 1::long), c = TO_DATE_NANO a:date_nanos | b:date_nanos | c:date_nanos null | null | null ; + +Coalasce date nanos +required_capability: to_date_nanos + +ROW a = COALESCE(null, TO_DATE_NANOS(1698069301543123456)); + +a:date_nanos +2023-10-23T13:55:01.543123456Z +; + +Case date nanos result +required_capability: to_date_nanos + +ROW a = CASE(false, TO_DATE_NANOS(0::long), TO_DATE_NANOS(1698069301543123456)); + +a:date_nanos +2023-10-23T13:55:01.543123456Z +; + +Greatest date nanos +required_capability: least_greatest_for_datenanos + +ROW a = GREATEST(TO_DATE_NANOS("2023-10-23T13:55:01.543123456"), TO_DATE_NANOS("2023-10-23T13:53:55.832987654")); + +a:date_nanos +2023-10-23T13:55:01.543123456Z +; + +Least date nanos +required_capability: least_greatest_for_datenanos + +ROW a = LEAST(TO_DATE_NANOS("2023-10-23T13:55:01.543123456"), TO_DATE_NANOS("2023-10-23T13:53:55.832987654")); + +a:date_nanos +2023-10-23T13:53:55.832987654Z +; + +mv_dedup over date nanos +required_capability: date_nanos_type + +FROM date_nanos | WHERE millis < "2000-01-01" | EVAL a = MV_DEDUPE(nanos) | SORT millis DESC | KEEP a; + +a:date_nanos +[2023-01-23T13:55:01.543123456Z, 2023-02-23T13:33:34.937193000Z, 2023-03-23T12:15:03.360103847Z] +2023-03-23T12:15:03.360103847Z +; + +mv_sort over date nanos +required_capability: date_nanos_type + +FROM date_nanos | WHERE millis < "2000-01-01" | EVAL a = MV_SORT(nanos, "asc") | SORT millis DESC | KEEP a; + +a:date_nanos +[2023-01-23T13:55:01.543123456Z, 2023-02-23T13:33:34.937193000Z, 2023-03-23T12:15:03.360103847Z] +[2023-03-23T12:15:03.360103847Z, 2023-03-23T12:15:03.360103847Z, 2023-03-23T12:15:03.360103847Z] +; + +mv_slice over date nanos +required_capability: date_nanos_type + +FROM date_nanos | WHERE millis < "2000-01-01" | EVAL a = MV_SLICE(MV_SORT(nanos, "asc"), 1, 2) | SORT millis DESC | KEEP a; + +a:date_nanos +[2023-02-23T13:33:34.937193000Z, 2023-03-23T12:15:03.360103847Z] +[2023-03-23T12:15:03.360103847Z, 2023-03-23T12:15:03.360103847Z] +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index ac4351413129e..6d4c596e8d7de 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -2291,6 +2291,33 @@ m:integer |a:double |x:integer 74999 |48249.0 |0 ; +statsWithFilterOnGroups +required_capability: fix_filter_pushdown_past_stats +FROM employees +| STATS v = VALUES(emp_no) by job_positions | WHERE job_positions == "Accountant" | MV_EXPAND v | SORT v +; + +v:integer | job_positions:keyword + 10001 | Accountant + 10012 | Accountant + 10016 | Accountant + 10023 | Accountant + 10025 | Accountant + 10028 | Accountant + 10034 | Accountant + 10037 | Accountant + 10044 | Accountant + 10045 | Accountant + 10050 | Accountant + 10051 | Accountant + 10066 | Accountant + 10081 | Accountant + 10085 | Accountant + 10089 | Accountant + 10092 | Accountant + 10094 | Accountant +; + statsWithFiltering required_capability: per_agg_filtering diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 5313e6630c75d..dd9d519649c01 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -1236,6 +1236,7 @@ off_on_holiday:keyword | back_home_again:keyword reverseGraphemeClusters required_capability: fn_reverse +required_capability: fn_reverse_grapheme_clusters ROW message = "áéíóúàèìòùâêîôû😊👍🏽🎉💖कंठाी" | EVAL message_reversed = REVERSE(message); message:keyword | message_reversed:keyword diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 3c39406198da3..5157a80022c39 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -32,6 +32,13 @@ public enum Cap { */ FN_REVERSE, + /** + * Support for reversing whole grapheme clusters. This is not supported + * on JDK versions less than 20 which are not supported in ES 9.0.0+ but this + * exists to keep the {@code 8.x} branch similar to the {@code main} branch. + */ + FN_REVERSE_GRAPHEME_CLUSTERS, + /** * Support for function {@code CBRT}. Done in #108574. */ @@ -133,7 +140,7 @@ public enum Cap { * - fixed variable shadowing * - fixed Join.references(), requiring breaking change to Join serialization */ - LOOKUP_V4(true), + LOOKUP_V4(Build.current().isSnapshot()), /** * Support for requesting the "REPEAT" command. @@ -279,7 +286,7 @@ public enum Cap { /** * Support for match operator */ - MATCH_OPERATOR(true), + MATCH_OPERATOR(Build.current().isSnapshot()), /** * Removing support for the {@code META} keyword. @@ -301,6 +308,11 @@ public enum Cap { */ TO_DATE_NANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + /** + * Support Least and Greatest functions on Date Nanos type + */ + LEAST_GREATEST_FOR_DATENANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + /** * Support for datetime in least and greatest functions */ @@ -349,7 +361,7 @@ public enum Cap { /** * Supported the text categorization function "CATEGORIZE". */ - CATEGORIZE(true), + CATEGORIZE(Build.current().isSnapshot()), /** * QSTR function @@ -375,7 +387,7 @@ public enum Cap { /** * Support named parameters for field names. */ - NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES(true), + NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES(Build.current().isSnapshot()), /** * Fix sorting not allowed on _source and counters. @@ -399,34 +411,29 @@ public enum Cap { /** * Support for semantic_text field mapping */ - SEMANTIC_TEXT_TYPE(EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG); + SEMANTIC_TEXT_TYPE(EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG), + /** + * Fix for an optimization that caused wrong results + * https://github.com/elastic/elasticsearch/issues/115281 + */ + FIX_FILTER_PUSHDOWN_PAST_STATS; - private final boolean snapshotOnly; - private final FeatureFlag featureFlag; + private final boolean enabled; Cap() { - this(false, null); + this.enabled = true; }; - Cap(boolean snapshotOnly) { - this(snapshotOnly, null); + Cap(boolean enabled) { + this.enabled = enabled; }; Cap(FeatureFlag featureFlag) { - this(false, featureFlag); - } - - Cap(boolean snapshotOnly, FeatureFlag featureFlag) { - assert featureFlag == null || snapshotOnly == false; - this.snapshotOnly = snapshotOnly; - this.featureFlag = featureFlag; + this.enabled = featureFlag.isEnabled(); } public boolean isEnabled() { - if (featureFlag == null) { - return Build.current().isSnapshot() || this.snapshotOnly == false; - } - return featureFlag.isEnabled(); + return enabled; } public String capabilityName() { @@ -434,12 +441,17 @@ public String capabilityName() { } } - public static final Set CAPABILITIES = capabilities(); + public static final Set CAPABILITIES = capabilities(false); - private static Set capabilities() { + /** + * Get a {@link Set} of all capabilities. If the {@code all} parameter is {@code false} + * then only enabled capabilities are returned - otherwise all + * known capabilities are returned. + */ + public static Set capabilities(boolean all) { List caps = new ArrayList<>(); for (Cap cap : Cap.values()) { - if (cap.isEnabled()) { + if (all || cap.isEnabled()) { caps.add(cap.capabilityName()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index d47ebeab4ca6c..aad2d37d414b8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -43,7 +43,7 @@ public class Greatest extends EsqlScalarFunction implements OptionalArgument { private DataType dataType; @FunctionInfo( - returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "Returns the maximum value from multiple columns. This is similar to <>\n" + "except it is intended to run on multiple columns at once.", note = "When run on `keyword` or `text` fields, this returns the last string in alphabetical order. " @@ -54,12 +54,12 @@ public Greatest( Source source, @Param( name = "first", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + type = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "First of the columns to evaluate." ) Expression first, @Param( name = "rest", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + type = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "The rest of the columns to evaluate.", optional = true ) List rest @@ -152,7 +152,7 @@ public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { if (dataType == DataType.INTEGER) { return new GreatestIntEvaluator.Factory(source(), factories); } - if (dataType == DataType.LONG || dataType == DataType.DATETIME) { + if (dataType == DataType.LONG || dataType == DataType.DATETIME || dataType == DataType.DATE_NANOS) { return new GreatestLongEvaluator.Factory(source(), factories); } if (DataType.isString(dataType) || dataType == DataType.IP || dataType == DataType.VERSION || dataType == DataType.UNSUPPORTED) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index 81c1419dcf788..70ba9319385f3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -43,7 +43,7 @@ public class Least extends EsqlScalarFunction implements OptionalArgument { private DataType dataType; @FunctionInfo( - returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "Returns the minimum value from multiple columns. " + "This is similar to <> except it is intended to run on multiple columns at once.", examples = @Example(file = "math", tag = "least") @@ -52,12 +52,12 @@ public Least( Source source, @Param( name = "first", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + type = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "First of the columns to evaluate." ) Expression first, @Param( name = "rest", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + type = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "The rest of the columns to evaluate.", optional = true ) List rest @@ -151,7 +151,7 @@ public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { if (dataType == DataType.INTEGER) { return new LeastIntEvaluator.Factory(source(), factories); } - if (dataType == DataType.LONG || dataType == DataType.DATETIME) { + if (dataType == DataType.LONG || dataType == DataType.DATETIME || dataType == DataType.DATE_NANOS) { return new LeastLongEvaluator.Factory(source(), factories); } if (DataType.isString(dataType) || dataType == DataType.IP || dataType == DataType.VERSION || dataType == DataType.UNSUPPORTED) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java index b17ddddb422ce..34b89b4f78997 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java @@ -38,6 +38,7 @@ public class MvDedupe extends AbstractMultivalueFunction { "cartesian_point", "cartesian_shape", "date", + "date_nanos", "double", "geo_point", "geo_shape", @@ -60,6 +61,7 @@ public MvDedupe( "cartesian_point", "cartesian_shape", "date", + "date_nanos", "double", "geo_point", "geo_shape", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java index a829b6f1417b9..ef562c339dfd9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java @@ -59,6 +59,7 @@ public class MvSlice extends EsqlScalarFunction implements OptionalArgument, Eva "cartesian_point", "cartesian_shape", "date", + "date_nanos", "double", "geo_point", "geo_shape", @@ -87,6 +88,7 @@ public MvSlice( "cartesian_point", "cartesian_shape", "date", + "date_nanos", "double", "geo_point", "geo_shape", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java index d9e41233952de..5ca5618bf2a54 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java @@ -69,7 +69,7 @@ public class MvSort extends EsqlScalarFunction implements OptionalArgument, Vali private static final String INVALID_ORDER_ERROR = "Invalid order value in [{}], expected one of [{}, {}] but got [{}]"; @FunctionInfo( - returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "Sorts a multivalued field in lexicographical order.", examples = @Example(file = "ints", tag = "mv_sort") ) @@ -77,7 +77,7 @@ public MvSort( Source source, @Param( name = "field", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + type = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" }, description = "Multivalue expression. If `null`, the function returns `null`." ) Expression field, @Param( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 575bb085c41f7..6b9c8d0da025b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -53,6 +53,7 @@ public class Coalesce extends EsqlScalarFunction implements OptionalArgument { "boolean", "cartesian_point", "cartesian_shape", + "date_nanos", "date", "geo_point", "geo_shape", @@ -73,6 +74,7 @@ public Coalesce( "boolean", "cartesian_point", "cartesian_shape", + "date_nanos", "date", "geo_point", "geo_shape", @@ -90,6 +92,7 @@ public Coalesce( "boolean", "cartesian_point", "cartesian_shape", + "date_nanos", "date", "geo_point", "geo_shape", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFilters.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFilters.java index ed09d0bc16754..15e49c22a44db 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFilters.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFilters.java @@ -15,8 +15,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; -import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Filter; @@ -38,18 +36,13 @@ protected LogicalPlan rule(Filter filter) { LogicalPlan child = filter.child(); Expression condition = filter.condition(); + // TODO: Push down past STATS if the filter is only on the groups; but take into account how `STATS ... BY field` handles + // multi-values: It seems to be equivalent to `EVAL field = MV_DEDUPE(field) | MV_EXPAND(field) | STATS ... BY field`, where the + // last `STATS ... BY field` can assume that `field` is single-valued (to be checked more thoroughly). + // https://github.com/elastic/elasticsearch/issues/115311 if (child instanceof Filter f) { // combine nodes into a single Filter with updated ANDed condition plan = f.with(Predicates.combineAnd(List.of(f.condition(), condition))); - } else if (child instanceof Aggregate agg) { // TODO: re-evaluate along with multi-value support - // Only push [parts of] a filter past an agg if these/it operates on agg's grouping[s], not output. - plan = maybePushDownPastUnary( - filter, - agg, - e -> e instanceof Attribute && agg.output().contains(e) && agg.groupings().contains(e) == false - || e instanceof AggregateFunction, - NO_OP - ); } else if (child instanceof Eval eval) { // Don't push if Filter (still) contains references to Eval's fields. // Account for simple aliases in the Eval, though - these shouldn't stop us. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index ce072e7b0a438..63233f0c46a0d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -257,7 +257,7 @@ public final void test() throws Throwable { assertThat( "Capability is not included in the enabled list capabilities on a snapshot build. Spelling mistake?", testCase.requiredCapabilities, - everyItem(in(EsqlCapabilities.CAPABILITIES)) + everyItem(in(EsqlCapabilities.capabilities(true))) ); } else { for (EsqlCapabilities.Cap c : EsqlCapabilities.Cap.values()) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java index 1c917a961a343..db5d8e03458ea 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java @@ -163,15 +163,7 @@ private void aggregateSingleMode(Expression expression) { result = extractResultFromAggregator(aggregator, PlannerUtils.toElementType(testCase.expectedType())); } - assertThat(result, not(equalTo(Double.NaN))); - assert testCase.getMatcher().matches(Double.POSITIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); - assert testCase.getMatcher().matches(Double.NEGATIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); - assertThat(result, testCase.getMatcher()); - if (testCase.getExpectedWarnings() != null) { - assertWarnings(testCase.getExpectedWarnings()); - } + assertTestCaseResultAndWarnings(result); } private void aggregateGroupingSingleMode(Expression expression) { @@ -263,15 +255,7 @@ private void aggregateWithIntermediates(Expression expression) { result = extractResultFromAggregator(aggregator, PlannerUtils.toElementType(testCase.expectedType())); } - assertThat(result, not(equalTo(Double.NaN))); - assert testCase.getMatcher().matches(Double.POSITIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); - assert testCase.getMatcher().matches(Double.NEGATIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); - assertThat(result, testCase.getMatcher()); - if (testCase.getExpectedWarnings() != null) { - assertWarnings(testCase.getExpectedWarnings()); - } + assertTestCaseResultAndWarnings(result); } private void evaluate(Expression evaluableExpression) { @@ -288,15 +272,7 @@ private void evaluate(Expression evaluableExpression) { if (testCase.expectedType() == DataType.UNSIGNED_LONG && result != null) { result = NumericUtils.unsignedLongAsBigInteger((Long) result); } - assertThat(result, not(equalTo(Double.NaN))); - assert testCase.getMatcher().matches(Double.POSITIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); - assert testCase.getMatcher().matches(Double.NEGATIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); - assertThat(result, testCase.getMatcher()); - if (testCase.getExpectedWarnings() != null) { - assertWarnings(testCase.getExpectedWarnings()); - } + assertTestCaseResultAndWarnings(result); } private void resolveExpression(Expression expression, Consumer onAggregator, Consumer onEvaluableExpression) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 84a41ef040c8e..c05f8e0990b3c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -99,8 +99,10 @@ import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; /** @@ -722,17 +724,19 @@ public static void testFunctionInfo() { for (int i = 0; i < args.size() && i < types.size(); i++) { typesFromSignature.get(i).add(types.get(i).esNameIfPossible()); } - returnFromSignature.add(entry.getValue().esNameIfPossible()); + if (DataType.UNDER_CONSTRUCTION.containsKey(entry.getValue()) == false) { + returnFromSignature.add(entry.getValue().esNameIfPossible()); + } } for (int i = 0; i < args.size(); i++) { EsqlFunctionRegistry.ArgSignature arg = args.get(i); Set annotationTypes = Arrays.stream(arg.type()) - .filter(DataType.UNDER_CONSTRUCTION::containsKey) + .filter(t -> DataType.UNDER_CONSTRUCTION.containsKey(DataType.fromNameOrAlias(t)) == false) .collect(Collectors.toCollection(TreeSet::new)); Set signatureTypes = typesFromSignature.get(i) .stream() - .filter(DataType.UNDER_CONSTRUCTION::containsKey) + .filter(t -> DataType.UNDER_CONSTRUCTION.containsKey(DataType.fromNameOrAlias(t)) == false) .collect(Collectors.toCollection(TreeSet::new)); if (signatureTypes.isEmpty()) { log.info("{}: skipping", arg.name()); @@ -746,10 +750,40 @@ public static void testFunctionInfo() { ); } - Set returnTypes = Arrays.stream(description.returnType()).collect(Collectors.toCollection(TreeSet::new)); + Set returnTypes = Arrays.stream(description.returnType()) + .filter(t -> DataType.UNDER_CONSTRUCTION.containsKey(DataType.fromNameOrAlias(t)) == false) + .collect(Collectors.toCollection(TreeSet::new)); assertEquals(returnFromSignature, returnTypes); } + /** + * Asserts the result of a test case matches the expected result and warnings. + *

+ * The {@code result} parameter should be an object as returned by {@link #toJavaObjectUnsignedLongAware}. + *

+ */ + @SuppressWarnings("unchecked") + protected final void assertTestCaseResultAndWarnings(Object result) { + if (result instanceof Iterable) { + var collectionResult = (Iterable) result; + assertThat(collectionResult, not(hasItem(Double.NaN))); + assertThat(collectionResult, not(hasItem(Double.POSITIVE_INFINITY))); + assertThat(collectionResult, not(hasItem(Double.NEGATIVE_INFINITY))); + } + + assert testCase.getMatcher().matches(Double.NaN) == false; + assertThat(result, not(equalTo(Double.NaN))); + assert testCase.getMatcher().matches(Double.POSITIVE_INFINITY) == false; + assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); + assert testCase.getMatcher().matches(Double.NEGATIVE_INFINITY) == false; + assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); + assertThat(result, testCase.getMatcher()); + + if (testCase.getExpectedWarnings() != null) { + assertWarnings(testCase.getExpectedWarnings()); + } + } + protected final void assertTypeResolutionFailure(Expression expression) { assertTrue("expected unresolved", expression.typeResolved().unresolved()); assertThat(expression.typeResolved().message(), equalTo(testCase.getExpectedTypeError())); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java index 85db73901352b..65e8a53fc05c5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java @@ -41,7 +41,6 @@ import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; @@ -127,15 +126,7 @@ public final void testEvaluate() { result = toJavaObjectUnsignedLongAware(block, 0); } } - assertThat(result, not(equalTo(Double.NaN))); - assert testCase.getMatcher().matches(Double.POSITIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); - assert testCase.getMatcher().matches(Double.NEGATIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); - assertThat(result, testCase.getMatcher()); - if (testCase.getExpectedWarnings() != null) { - assertWarnings(testCase.getExpectedWarnings()); - } + assertTestCaseResultAndWarnings(result); } /** diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java index e740533462746..7fe67707a7976 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java @@ -19,6 +19,7 @@ import java.math.BigInteger; import java.util.ArrayList; import java.util.List; +import java.util.function.Supplier; import static org.elasticsearch.test.ESTestCase.randomBoolean; import static org.elasticsearch.test.ESTestCase.randomList; @@ -37,56 +38,36 @@ public static List intCases(int minRows, int maxRows, int min List cases = new ArrayList<>(); if (0 <= max && 0 >= min && includeZero) { - cases.add(new TypedDataSupplier("<0 ints>", () -> randomList(minRows, maxRows, () -> 0), DataType.INTEGER, false, true)); + addSuppliers(cases, minRows, maxRows, "0 int", DataType.INTEGER, () -> 0); } if (max != 0) { - cases.add( - new TypedDataSupplier("<" + max + " ints>", () -> randomList(minRows, maxRows, () -> max), DataType.INTEGER, false, true) - ); + addSuppliers(cases, minRows, maxRows, max + " int", DataType.INTEGER, () -> max); } if (min != 0 && min != max) { - cases.add( - new TypedDataSupplier("<" + min + " ints>", () -> randomList(minRows, maxRows, () -> min), DataType.INTEGER, false, true) - ); + addSuppliers(cases, minRows, maxRows, min + " int", DataType.INTEGER, () -> min); } int lower = Math.max(min, 1); int upper = Math.min(max, Integer.MAX_VALUE); if (lower < upper) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomIntBetween(lower, upper)), - DataType.INTEGER, - false, - true - ) - ); + addSuppliers(cases, minRows, maxRows, "positive int", DataType.INTEGER, () -> ESTestCase.randomIntBetween(lower, upper)); } int lower1 = Math.max(min, Integer.MIN_VALUE); int upper1 = Math.min(max, -1); if (lower1 < upper1) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomIntBetween(lower1, upper1)), - DataType.INTEGER, - false, - true - ) - ); + addSuppliers(cases, minRows, maxRows, "negative int", DataType.INTEGER, () -> ESTestCase.randomIntBetween(lower1, upper1)); } if (min < 0 && max > 0) { - cases.add(new TypedDataSupplier("", () -> randomList(minRows, maxRows, () -> { + addSuppliers(cases, minRows, maxRows, "random int", DataType.INTEGER, () -> { if (includeZero) { return ESTestCase.randomIntBetween(min, max); } return randomBoolean() ? ESTestCase.randomIntBetween(min, -1) : ESTestCase.randomIntBetween(1, max); - }), DataType.INTEGER, false, true)); + }); } return cases; @@ -96,56 +77,36 @@ public static List longCases(int minRows, int maxRows, long m List cases = new ArrayList<>(); if (0 <= max && 0 >= min && includeZero) { - cases.add(new TypedDataSupplier("<0 longs>", () -> randomList(minRows, maxRows, () -> 0L), DataType.LONG, false, true)); + addSuppliers(cases, minRows, maxRows, "0 long", DataType.LONG, () -> 0L); } if (max != 0) { - cases.add( - new TypedDataSupplier("<" + max + " longs>", () -> randomList(minRows, maxRows, () -> max), DataType.LONG, false, true) - ); + addSuppliers(cases, minRows, maxRows, max + " long", DataType.LONG, () -> max); } if (min != 0 && min != max) { - cases.add( - new TypedDataSupplier("<" + min + " longs>", () -> randomList(minRows, maxRows, () -> min), DataType.LONG, false, true) - ); + addSuppliers(cases, minRows, maxRows, min + " long", DataType.LONG, () -> min); } long lower = Math.max(min, 1); long upper = Math.min(max, Long.MAX_VALUE); if (lower < upper) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomLongBetween(lower, upper)), - DataType.LONG, - false, - true - ) - ); + addSuppliers(cases, minRows, maxRows, "positive long", DataType.LONG, () -> ESTestCase.randomLongBetween(lower, upper)); } long lower1 = Math.max(min, Long.MIN_VALUE); long upper1 = Math.min(max, -1); if (lower1 < upper1) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomLongBetween(lower1, upper1)), - DataType.LONG, - false, - true - ) - ); + addSuppliers(cases, minRows, maxRows, "negative long", DataType.LONG, () -> ESTestCase.randomLongBetween(lower1, upper1)); } if (min < 0 && max > 0) { - cases.add(new TypedDataSupplier("", () -> randomList(minRows, maxRows, () -> { + addSuppliers(cases, minRows, maxRows, "random long", DataType.LONG, () -> { if (includeZero) { return ESTestCase.randomLongBetween(min, max); } return randomBoolean() ? ESTestCase.randomLongBetween(min, -1) : ESTestCase.randomLongBetween(1, max); - }), DataType.LONG, false, true)); + }); } return cases; @@ -156,29 +117,20 @@ public static List ulongCases(int minRows, int maxRows, BigIn // Zero if (BigInteger.ZERO.compareTo(max) <= 0 && BigInteger.ZERO.compareTo(min) >= 0 && includeZero) { - cases.add( - new TypedDataSupplier( - "<0 unsigned longs>", - () -> randomList(minRows, maxRows, () -> BigInteger.ZERO), - DataType.UNSIGNED_LONG, - false, - true - ) - ); + addSuppliers(cases, minRows, maxRows, "0 unsigned long", DataType.UNSIGNED_LONG, () -> BigInteger.ZERO); } // Small values, less than Long.MAX_VALUE BigInteger lower1 = min.max(BigInteger.ONE); BigInteger upper1 = max.min(BigInteger.valueOf(Long.MAX_VALUE)); if (lower1.compareTo(upper1) < 0) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomUnsignedLongBetween(lower1, upper1)), - DataType.UNSIGNED_LONG, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "small unsigned long", + DataType.UNSIGNED_LONG, + () -> ESTestCase.randomUnsignedLongBetween(lower1, upper1) ); } @@ -186,14 +138,13 @@ public static List ulongCases(int minRows, int maxRows, BigIn BigInteger lower2 = min.max(BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE)); BigInteger upper2 = max.min(ESTestCase.UNSIGNED_LONG_MAX); if (lower2.compareTo(upper2) < 0) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomUnsignedLongBetween(lower2, upper2)), - DataType.UNSIGNED_LONG, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "big unsigned long", + DataType.UNSIGNED_LONG, + () -> ESTestCase.randomUnsignedLongBetween(lower2, upper2) ); } @@ -204,85 +155,77 @@ public static List doubleCases(int minRows, int maxRows, doub List cases = new ArrayList<>(); if (0d <= max && 0d >= min && includeZero) { - cases.add(new TypedDataSupplier("<0 doubles>", () -> randomList(minRows, maxRows, () -> 0d), DataType.DOUBLE, false, true)); - cases.add(new TypedDataSupplier("<-0 doubles>", () -> randomList(minRows, maxRows, () -> -0d), DataType.DOUBLE, false, true)); + addSuppliers(cases, minRows, maxRows, "0 double", DataType.DOUBLE, () -> 0d); + addSuppliers(cases, minRows, maxRows, "-0 double", DataType.DOUBLE, () -> -0d); } if (max != 0d) { - cases.add( - new TypedDataSupplier("<" + max + " doubles>", () -> randomList(minRows, maxRows, () -> max), DataType.DOUBLE, false, true) - ); + addSuppliers(cases, minRows, maxRows, max + " double", DataType.DOUBLE, () -> max); } if (min != 0d && min != max) { - cases.add( - new TypedDataSupplier("<" + min + " doubles>", () -> randomList(minRows, maxRows, () -> min), DataType.DOUBLE, false, true) - ); + addSuppliers(cases, minRows, maxRows, min + " double", DataType.DOUBLE, () -> min); } double lower1 = Math.max(min, 0d); double upper1 = Math.min(max, 1d); if (lower1 < upper1) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomDoubleBetween(lower1, upper1, true)), - DataType.DOUBLE, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "small positive double", + DataType.DOUBLE, + () -> ESTestCase.randomDoubleBetween(lower1, upper1, true) ); } double lower2 = Math.max(min, -1d); double upper2 = Math.min(max, 0d); if (lower2 < upper2) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomDoubleBetween(lower2, upper2, true)), - DataType.DOUBLE, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "small negative double", + DataType.DOUBLE, + () -> ESTestCase.randomDoubleBetween(lower2, upper2, true) ); } double lower3 = Math.max(min, 1d); double upper3 = Math.min(max, Double.MAX_VALUE); if (lower3 < upper3) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomDoubleBetween(lower3, upper3, true)), - DataType.DOUBLE, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "big positive double", + DataType.DOUBLE, + () -> ESTestCase.randomDoubleBetween(lower3, upper3, true) ); } double lower4 = Math.max(min, -Double.MAX_VALUE); double upper4 = Math.min(max, -1d); if (lower4 < upper4) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> ESTestCase.randomDoubleBetween(lower4, upper4, true)), - DataType.DOUBLE, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "big negative double", + DataType.DOUBLE, + () -> ESTestCase.randomDoubleBetween(lower4, upper4, true) ); } if (min < 0 && max > 0) { - cases.add(new TypedDataSupplier("", () -> randomList(minRows, maxRows, () -> { + addSuppliers(cases, minRows, maxRows, "random double", DataType.DOUBLE, () -> { if (includeZero) { return ESTestCase.randomDoubleBetween(min, max, true); } return randomBoolean() ? ESTestCase.randomDoubleBetween(min, -1, true) : ESTestCase.randomDoubleBetween(1, max, true); - }), DataType.DOUBLE, false, true)); + }); } return cases; @@ -291,149 +234,126 @@ public static List doubleCases(int minRows, int maxRows, doub public static List dateCases(int minRows, int maxRows) { List cases = new ArrayList<>(); - cases.add( - new TypedDataSupplier( - "<1970-01-01T00:00:00Z dates>", - () -> randomList(minRows, maxRows, () -> 0L), - DataType.DATETIME, - false, - true - ) - ); + addSuppliers(cases, minRows, maxRows, "1970-01-01T00:00:00Z date", DataType.DATETIME, () -> 0L); - cases.add( - new TypedDataSupplier( - "", - // 1970-01-01T00:00:00Z - 2286-11-20T17:46:40Z - () -> randomList(minRows, maxRows, () -> ESTestCase.randomLongBetween(0, 10 * (long) 10e11)), - DataType.DATETIME, - false, - true - ) - ); + // 1970-01-01T00:00:00Z - 2286-11-20T17:46:40Z + addSuppliers(cases, minRows, maxRows, "random date", DataType.DATETIME, () -> ESTestCase.randomLongBetween(0, 10 * (long) 10e11)); - cases.add( - new TypedDataSupplier( - "", - // 2286-11-20T17:46:40Z - +292278994-08-17T07:12:55.807Z - () -> randomList(minRows, maxRows, () -> ESTestCase.randomLongBetween(10 * (long) 10e11, Long.MAX_VALUE)), - DataType.DATETIME, - false, - true - ) + // 2286-11-20T17:46:40Z - +292278994-08-17T07:12:55.807Z + addSuppliers( + cases, + minRows, + maxRows, + "far future date", + DataType.DATETIME, + () -> ESTestCase.randomLongBetween(10 * (long) 10e11, Long.MAX_VALUE) ); - cases.add( - new TypedDataSupplier( - "", - // very close to +292278994-08-17T07:12:55.807Z, the maximum supported millis since epoch - () -> randomList(minRows, maxRows, () -> ESTestCase.randomLongBetween(Long.MAX_VALUE / 100 * 99, Long.MAX_VALUE)), - DataType.DATETIME, - false, - true - ) + // Very close to +292278994-08-17T07:12:55.807Z, the maximum supported millis since epoch + addSuppliers( + cases, + minRows, + maxRows, + "near the end of time date", + DataType.DATETIME, + () -> ESTestCase.randomLongBetween(Long.MAX_VALUE / 100 * 99, Long.MAX_VALUE) ); return cases; } public static List booleanCases(int minRows, int maxRows) { - return List.of( - new TypedDataSupplier("", () -> randomList(minRows, maxRows, () -> true), DataType.BOOLEAN, false, true), - new TypedDataSupplier("", () -> randomList(minRows, maxRows, () -> false), DataType.BOOLEAN, false, true), - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, ESTestCase::randomBoolean), - DataType.BOOLEAN, - false, - true - ) - ); + List cases = new ArrayList<>(); + + addSuppliers(cases, minRows, maxRows, "true boolean", DataType.BOOLEAN, () -> true); + addSuppliers(cases, minRows, maxRows, "false boolean", DataType.BOOLEAN, () -> false); + addSuppliers(cases, minRows, maxRows, "random boolean", DataType.BOOLEAN, ESTestCase::randomBoolean); + + return cases; } public static List ipCases(int minRows, int maxRows) { - return List.of( - new TypedDataSupplier( - "<127.0.0.1 ips>", - () -> randomList(minRows, maxRows, () -> new BytesRef(InetAddressPoint.encode(InetAddresses.forString("127.0.0.1")))), - DataType.IP, - false, - true - ), - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> new BytesRef(InetAddressPoint.encode(ESTestCase.randomIp(true)))), - DataType.IP, - false, - true - ), - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> new BytesRef(InetAddressPoint.encode(ESTestCase.randomIp(false)))), - DataType.IP, - false, - true - ) + List cases = new ArrayList<>(); + + addSuppliers( + cases, + minRows, + maxRows, + "127.0.0.1 ip", + DataType.IP, + () -> new BytesRef(InetAddressPoint.encode(InetAddresses.forString("127.0.0.1"))) + ); + addSuppliers( + cases, + minRows, + maxRows, + "random v4 ip", + DataType.IP, + () -> new BytesRef(InetAddressPoint.encode(ESTestCase.randomIp(true))) + ); + addSuppliers( + cases, + minRows, + maxRows, + "random v6 ip", + DataType.IP, + () -> new BytesRef(InetAddressPoint.encode(ESTestCase.randomIp(false))) ); + + return cases; } public static List versionCases(int minRows, int maxRows) { - return List.of( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> new Version(Integer.toString(ESTestCase.between(0, 100))).toBytesRef()), - DataType.VERSION, - false, - true - ), - new TypedDataSupplier( - "", - () -> randomList( - minRows, - maxRows, - () -> new Version(ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100)).toBytesRef() - ), - DataType.VERSION, - false, - true - ), - new TypedDataSupplier( - "", - () -> randomList( - minRows, - maxRows, - () -> new Version(ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100)) - .toBytesRef() - ), - DataType.VERSION, - false, - true - ) + List cases = new ArrayList<>(); + + addSuppliers( + cases, + minRows, + maxRows, + "major version", + DataType.VERSION, + () -> new Version(Integer.toString(ESTestCase.between(0, 100))).toBytesRef() ); + addSuppliers( + cases, + minRows, + maxRows, + "major.minor version", + DataType.VERSION, + () -> new Version(ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100)).toBytesRef() + ); + addSuppliers( + cases, + minRows, + maxRows, + "major.minor.patch version", + DataType.VERSION, + () -> new Version(ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100)).toBytesRef() + ); + + return cases; } public static List geoPointCases(int minRows, int maxRows, boolean withAltitude) { List cases = new ArrayList<>(); - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> GEO.asWkb(GeometryTestUtils.randomPoint(false))), - DataType.GEO_POINT, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "", + DataType.GEO_POINT, + () -> GEO.asWkb(GeometryTestUtils.randomPoint(false)) ); if (withAltitude) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> GEO.asWkb(GeometryTestUtils.randomPoint(false))), - DataType.GEO_POINT, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "", + DataType.GEO_POINT, + () -> GEO.asWkb(GeometryTestUtils.randomPoint(true)) ); } @@ -443,25 +363,23 @@ public static List geoPointCases(int minRows, int maxRows, bo public static List cartesianPointCases(int minRows, int maxRows, boolean withAltitude) { List cases = new ArrayList<>(); - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint(false))), - DataType.CARTESIAN_POINT, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "", + DataType.CARTESIAN_POINT, + () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint(false)) ); if (withAltitude) { - cases.add( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint(true))), - DataType.CARTESIAN_POINT, - false, - true - ) + addSuppliers( + cases, + minRows, + maxRows, + "", + DataType.CARTESIAN_POINT, + () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint(true)) ); } @@ -471,59 +389,64 @@ public static List cartesianPointCases(int minRows, int maxRo public static List stringCases(int minRows, int maxRows, DataType type) { List cases = new ArrayList<>(); - cases.addAll( - List.of( - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> new BytesRef("")), - type, - false, - true - ), - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> new BytesRef(ESTestCase.randomAlphaOfLengthBetween(1, 30))), - type, - false, - true - ), - new TypedDataSupplier( - "", - () -> randomList(minRows, maxRows, () -> new BytesRef(ESTestCase.randomRealisticUnicodeOfLengthBetween(1, 30))), - type, - false, - true - ) - ) + addSuppliers(cases, minRows, maxRows, "empty " + type, type, () -> new BytesRef("")); + addSuppliers( + cases, + minRows, + maxRows, + "short alpha " + type, + type, + () -> new BytesRef(ESTestCase.randomAlphaOfLengthBetween(1, 30)) + ); + addSuppliers( + cases, + minRows, + maxRows, + "short unicode " + type, + type, + () -> new BytesRef(ESTestCase.randomRealisticUnicodeOfLengthBetween(1, 30)) ); if (minRows <= 100) { var longStringsMaxRows = Math.min(maxRows, 100); - cases.addAll( - List.of( - new TypedDataSupplier( - "", - () -> randomList(minRows, longStringsMaxRows, () -> new BytesRef(ESTestCase.randomAlphaOfLengthBetween(300, 1000))), - type, - false, - true - ), - new TypedDataSupplier( - "", - () -> randomList( - minRows, - longStringsMaxRows, - () -> new BytesRef(ESTestCase.randomRealisticUnicodeOfLengthBetween(300, 1000)) - ), - type, - false, - true - ) - ) + addSuppliers( + cases, + minRows, + longStringsMaxRows, + "long alpha " + type, + type, + () -> new BytesRef(ESTestCase.randomAlphaOfLengthBetween(300, 1000)) + ); + addSuppliers( + cases, + minRows, + longStringsMaxRows, + "long unicode " + type, + type, + () -> new BytesRef(ESTestCase.randomRealisticUnicodeOfLengthBetween(300, 1000)) ); } return cases; } + + private static void addSuppliers( + List cases, + int minRows, + int maxRows, + String name, + DataType type, + Supplier valueSupplier + ) { + if (minRows <= 1 && maxRows >= 1) { + cases.add(new TypedDataSupplier("", () -> randomList(1, 1, valueSupplier), type, false, true)); + } + + if (maxRows > 1) { + cases.add( + new TypedDataSupplier("<" + name + "s>", () -> randomList(Math.max(2, minRows), maxRows, valueSupplier), type, false, true) + ); + } + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgTests.java index 80737dac1aa58..ac599c7ff05f8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgTests.java @@ -64,25 +64,27 @@ protected Expression build(Source source, List args) { private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier fieldSupplier) { return new TestCaseSupplier(List.of(fieldSupplier.type()), () -> { var fieldTypedData = fieldSupplier.get(); + var fieldData = fieldTypedData.multiRowData(); - Object expected = switch (fieldTypedData.type().widenSmallNumeric()) { - case INTEGER -> fieldTypedData.multiRowData() - .stream() - .map(v -> (Integer) v) - .collect(Collectors.summarizingInt(Integer::intValue)) - .getAverage(); - case LONG -> fieldTypedData.multiRowData() - .stream() - .map(v -> (Long) v) - .collect(Collectors.summarizingLong(Long::longValue)) - .getAverage(); - case DOUBLE -> fieldTypedData.multiRowData() - .stream() - .map(v -> (Double) v) - .collect(Collectors.summarizingDouble(Double::doubleValue)) - .getAverage(); - default -> throw new IllegalStateException("Unexpected value: " + fieldTypedData.type()); - }; + Object expected = null; + + if (fieldData.size() == 1) { + // For single elements, we directly return them to avoid precision issues + expected = ((Number) fieldData.get(0)).doubleValue(); + } else if (fieldData.size() > 1) { + expected = switch (fieldTypedData.type().widenSmallNumeric()) { + case INTEGER -> fieldData.stream() + .map(v -> (Integer) v) + .collect(Collectors.summarizingInt(Integer::intValue)) + .getAverage(); + case LONG -> fieldData.stream().map(v -> (Long) v).collect(Collectors.summarizingLong(Long::longValue)).getAverage(); + case DOUBLE -> fieldData.stream() + .map(v -> (Double) v) + .collect(Collectors.summarizingDouble(Double::doubleValue)) + .getAverage(); + default -> throw new IllegalStateException("Unexpected value: " + fieldTypedData.type()); + }; + } return new TestCaseSupplier.TestCase( List.of(fieldTypedData), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index db3fce244c9a8..fbb7c691b1d94 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -46,6 +46,7 @@ public class CaseTests extends AbstractScalarFunctionTestCase { DataType.TEXT, DataType.BOOLEAN, DataType.DATETIME, + DataType.DATE_NANOS, DataType.DOUBLE, DataType.INTEGER, DataType.LONG, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java index 311e3e3d89149..07d6ae34dc1e7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java @@ -115,6 +115,21 @@ public static Iterable parameters() { ) ) ); + suppliers.add( + new TestCaseSupplier( + "(a, b)", + List.of(DataType.DATE_NANOS, DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(1727877348000123456L, DataType.DATE_NANOS, "a"), + new TestCaseSupplier.TypedData(1727790948000987654L, DataType.DATE_NANOS, "b") + ), + "GreatestLongEvaluator[values=[MvMax[field=Attribute[channel=0]], MvMax[field=Attribute[channel=1]]]]", + DataType.DATE_NANOS, + equalTo(1727877348000123456L) + ) + ) + ); return parameterSuppliersFromTypedData(anyNullIsNull(false, suppliers)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java index 69842fde90312..d95cc79dd22e0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java @@ -114,6 +114,21 @@ public static Iterable parameters() { ) ) ); + suppliers.add( + new TestCaseSupplier( + "(a, b)", + List.of(DataType.DATE_NANOS, DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(1727877348000123456L, DataType.DATE_NANOS, "a"), + new TestCaseSupplier.TypedData(1727790948000987654L, DataType.DATE_NANOS, "b") + ), + "LeastLongEvaluator[values=[MvMin[field=Attribute[channel=0]], MvMin[field=Attribute[channel=1]]]]", + DataType.DATE_NANOS, + equalTo(1727790948000987654L) + ) + ) + ); return parameterSuppliersFromTypedData(anyNullIsNull(false, suppliers)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java index e91a5cc1ebca4..485073d1a91d2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.common.time.DateUtils; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -29,6 +30,9 @@ public ToDateNanosTests(@Name("TestCase") Supplier te @ParametersFactory public static Iterable parameters() { + if (EsqlCapabilities.Cap.TO_DATE_NANOS.isEnabled() == false) { + return List.of(); + } final String read = "Attribute[channel=0]"; final List suppliers = new ArrayList<>(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java index d8d3b607efcc0..f3b44274f3ade 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java @@ -39,6 +39,7 @@ public static Iterable parameters() { booleans(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values)); bytesRefs(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values)); dateTimes(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values.mapToObj(Long::valueOf))); + dateNanos(cases, "mv_dedupe", "MvDedupe", DataType.DATE_NANOS, (size, values) -> getMatcher(values.mapToObj(Long::valueOf))); doubles(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values.mapToObj(Double::valueOf))); ints(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values.mapToObj(Integer::valueOf))); longs(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values.mapToObj(Long::valueOf))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java index e5bac422805af..859c79090d62f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java @@ -182,6 +182,23 @@ private static void longs(List suppliers) { equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) ); })); + + suppliers.add(new TestCaseSupplier(List.of(DataType.DATE_NANOS, DataType.INTEGER, DataType.INTEGER), () -> { + List field = randomList(1, 10, () -> randomLong()); + int length = field.size(); + int start = randomIntBetween(0, length - 1); + int end = randomIntBetween(start, length - 1); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field, DataType.DATE_NANOS, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") + ), + "MvSliceLongEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", + DataType.DATE_NANOS, + equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) + ); + })); } private static void doubles(List suppliers) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java index d07ed2aeae887..63f538059dddf 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java @@ -110,6 +110,20 @@ private static void longs(List suppliers) { equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted(Collections.reverseOrder()).toList()) ); })); + + suppliers.add(new TestCaseSupplier(List.of(DataType.DATE_NANOS, DataType.KEYWORD), () -> { + List field = randomList(1, 10, () -> randomLong()); + BytesRef order = new BytesRef("DESC"); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field, DataType.DATE_NANOS, "field"), + new TestCaseSupplier.TypedData(order, DataType.KEYWORD, "order").forceLiteral() + ), + "MvSortLong[field=Attribute[channel=0], order=false]", + DataType.DATE_NANOS, + equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted(Collections.reverseOrder()).toList()) + ); + })); } private static void doubles(List suppliers) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java index c9b6de64e079d..797c99992815e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java @@ -96,6 +96,19 @@ public static Iterable parameters() { equalTo(firstDate == null ? secondDate : firstDate) ); })); + noNullsSuppliers.add(new TestCaseSupplier(List.of(DataType.DATE_NANOS, DataType.DATE_NANOS), () -> { + Long firstDate = randomBoolean() ? null : randomNonNegativeLong(); + Long secondDate = randomNonNegativeLong(); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(firstDate, DataType.DATE_NANOS, "first"), + new TestCaseSupplier.TypedData(secondDate, DataType.DATE_NANOS, "second") + ), + "CoalesceEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", + DataType.DATE_NANOS, + equalTo(firstDate == null ? secondDate : firstDate) + ); + })); List suppliers = new ArrayList<>(noNullsSuppliers); for (TestCaseSupplier s : noNullsSuppliers) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 8d7c1997f78e3..ff7675504d6ff 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -738,6 +738,7 @@ public void testMultipleCombineLimits() { ); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/115311") public void testSelectivelyPushDownFilterPastRefAgg() { // expected plan: "from test | where emp_no > 1 and emp_no < 3 | stats x = count(1) by emp_no | where x > 7" LogicalPlan plan = optimizedPlan(""" @@ -790,6 +791,7 @@ public void testNoPushDownOrFilterPastAgg() { assertTrue(stats.child() instanceof EsRelation); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/115311") public void testSelectivePushDownComplexFilterPastAgg() { // expected plan: from test | emp_no > 0 | stats x = count(1) by emp_no | where emp_no < 3 or x > 9 LogicalPlan plan = optimizedPlan(""" @@ -1393,13 +1395,15 @@ public void testPushDownLimitThroughMultipleSort_AfterMvExpand2() { } /** + * TODO: Push down the filter correctly https://github.com/elastic/elasticsearch/issues/115311 + * * Expected * Limit[5[INTEGER]] - * \_Aggregate[[first_name{f}#232],[MAX(salary{f}#233) AS max_s, first_name{f}#232]] - * \_Filter[ISNOTNULL(first_name{f}#232)] - * \_MvExpand[first_name{f}#232] - * \_TopN[[Order[emp_no{f}#231,ASC,LAST]],50[INTEGER]] - * \_EsRelation[employees][emp_no{f}#231, first_name{f}#232, salary{f}#233] + * \_Filter[ISNOTNULL(first_name{r}#23)] + * \_Aggregate[STANDARD,[first_name{r}#23],[MAX(salary{f}#18,true[BOOLEAN]) AS max_s, first_name{r}#23]] + * \_MvExpand[first_name{f}#14,first_name{r}#23] + * \_TopN[[Order[emp_no{f}#13,ASC,LAST]],50[INTEGER]] + * \_EsRelation[test][_meta_field{f}#19, emp_no{f}#13, first_name{f}#14, ..] */ public void testDontPushDownLimitPastAggregate_AndMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -1413,10 +1417,10 @@ public void testDontPushDownLimitPastAggregate_AndMvExpand() { | limit 5"""); var limit = as(plan, Limit.class); + var filter = as(limit.child(), Filter.class); assertThat(limit.limit().fold(), equalTo(5)); - var agg = as(limit.child(), Aggregate.class); - var filter = as(agg.child(), Filter.class); - var mvExp = as(filter.child(), MvExpand.class); + var agg = as(filter.child(), Aggregate.class); + var mvExp = as(agg.child(), MvExpand.class); var topN = as(mvExp.child(), TopN.class); assertThat(topN.limit().fold(), equalTo(50)); assertThat(orderNames(topN), contains("emp_no")); @@ -1424,14 +1428,16 @@ public void testDontPushDownLimitPastAggregate_AndMvExpand() { } /** + * TODO: Push down the filter correctly https://github.com/elastic/elasticsearch/issues/115311 + * * Expected * Limit[5[INTEGER]] - * \_Aggregate[[first_name{f}#262],[MAX(salary{f}#263) AS max_s, first_name{f}#262]] - * \_Filter[ISNOTNULL(first_name{f}#262)] - * \_Limit[50[INTEGER]] - * \_MvExpand[first_name{f}#262] - * \_Limit[50[INTEGER]] - * \_EsRelation[employees][emp_no{f}#261, first_name{f}#262, salary{f}#263] + * \_Filter[ISNOTNULL(first_name{r}#22)] + * \_Aggregate[STANDARD,[first_name{r}#22],[MAX(salary{f}#17,true[BOOLEAN]) AS max_s, first_name{r}#22]] + * \_Limit[50[INTEGER]] + * \_MvExpand[first_name{f}#13,first_name{r}#22] + * \_Limit[50[INTEGER]] + * \_EsRelation[test][_meta_field{f}#18, emp_no{f}#12, first_name{f}#13, ..] */ public void testPushDown_TheRightLimit_PastMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -1445,9 +1451,9 @@ public void testPushDown_TheRightLimit_PastMvExpand() { var limit = as(plan, Limit.class); assertThat(limit.limit().fold(), equalTo(5)); - var agg = as(limit.child(), Aggregate.class); - var filter = as(agg.child(), Filter.class); - limit = as(filter.child(), Limit.class); + var filter = as(limit.child(), Filter.class); + var agg = as(filter.child(), Aggregate.class); + limit = as(agg.child(), Limit.class); assertThat(limit.limit().fold(), equalTo(50)); var mvExp = as(limit.child(), MvExpand.class); limit = as(mvExp.child(), Limit.class); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFiltersTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFiltersTests.java index 49a738f4f4fa3..e159e5ed0bd7d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFiltersTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFiltersTests.java @@ -213,6 +213,7 @@ public void testPushDownLikeRlikeFilter() { // from ... | where a > 1 | stats count(1) by b | where count(1) >= 3 and b < 2 // => ... | where a > 1 and b < 2 | stats count(1) by b | where count(1) >= 3 + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/115311") public void testSelectivelyPushDownFilterPastFunctionAgg() { EsRelation relation = relation(); GreaterThan conditionA = greaterThanOf(getFieldAttribute("a"), ONE); diff --git a/x-pack/plugin/inference/qa/mixed-cluster/build.gradle b/x-pack/plugin/inference/qa/mixed-cluster/build.gradle index 1d5369468b054..0bc4813f25137 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/build.gradle +++ b/x-pack/plugin/inference/qa/mixed-cluster/build.gradle @@ -20,7 +20,7 @@ dependencies { // inference is available in 8.11 or later def supportedVersion = bwcVersion -> { - return bwcVersion.onOrAfter(Version.fromString("8.11.0")); + return bwcVersion.onOrAfter(Version.fromString("8.11.0")) && bwcVersion != VersionProperties.elasticsearchVersion } BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java index 04a07eeb984ec..def52e97666f9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java @@ -29,6 +29,8 @@ public class SentenceBoundaryChunkingSettings implements ChunkingSettings { public static final String NAME = "SentenceBoundaryChunkingSettings"; private static final ChunkingStrategy STRATEGY = ChunkingStrategy.SENTENCE; + private static final int MAX_CHUNK_SIZE_LOWER_LIMIT = 20; + private static final int MAX_CHUNK_SIZE_UPPER_LIMIT = 300; private static final Set VALID_KEYS = Set.of( ChunkingSettingsOptions.STRATEGY.toString(), ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), @@ -62,9 +64,11 @@ public static SentenceBoundaryChunkingSettings fromMap(Map map) ); } - Integer maxChunkSize = ServiceUtils.extractRequiredPositiveInteger( + Integer maxChunkSize = ServiceUtils.extractRequiredPositiveIntegerBetween( map, ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), + MAX_CHUNK_SIZE_LOWER_LIMIT, + MAX_CHUNK_SIZE_UPPER_LIMIT, ModelConfigurations.CHUNKING_SETTINGS, validationException ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettings.java index 5b91e122b9c80..7fb0fdc91bf72 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettings.java @@ -28,6 +28,8 @@ public class WordBoundaryChunkingSettings implements ChunkingSettings { public static final String NAME = "WordBoundaryChunkingSettings"; private static final ChunkingStrategy STRATEGY = ChunkingStrategy.WORD; + private static final int MAX_CHUNK_SIZE_LOWER_LIMIT = 10; + private static final int MAX_CHUNK_SIZE_UPPER_LIMIT = 300; private static final Set VALID_KEYS = Set.of( ChunkingSettingsOptions.STRATEGY.toString(), ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), @@ -56,9 +58,11 @@ public static WordBoundaryChunkingSettings fromMap(Map map) { ); } - Integer maxChunkSize = ServiceUtils.extractRequiredPositiveInteger( + Integer maxChunkSize = ServiceUtils.extractRequiredPositiveIntegerBetween( map, ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), + MAX_CHUNK_SIZE_LOWER_LIMIT, + MAX_CHUNK_SIZE_UPPER_LIMIT, ModelConfigurations.CHUNKING_SETTINGS, validationException ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index fb18cfb4959c7..4c07516051287 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -617,8 +617,7 @@ private String generateInvalidQueryInferenceResultsMessage(StringBuilder baseMes @Override public BlockLoader blockLoader(MappedFieldType.BlockLoaderContext blContext) { SourceValueFetcher fetcher = SourceValueFetcher.toString(blContext.sourcePaths(name().concat(".text"))); - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return new BlockSourceReader.BytesRefsBlockLoader(fetcher, BlockSourceReader.lookupMatchingAll(), sourceMode); + return new BlockSourceReader.BytesRefsBlockLoader(fetcher, BlockSourceReader.lookupMatchingAll()); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index c0e3c78b12f13..9e7f8712b4087 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -435,6 +435,32 @@ public static Integer extractRequiredPositiveIntegerLessThanOrEqualToMax( return field; } + public static Integer extractRequiredPositiveIntegerBetween( + Map map, + String settingName, + int minValue, + int maxValue, + String scope, + ValidationException validationException + ) { + Integer field = extractRequiredPositiveInteger(map, settingName, scope, validationException); + + if (field != null && field < minValue) { + validationException.addValidationError( + ServiceUtils.mustBeGreaterThanOrEqualNumberErrorMessage(settingName, scope, field, minValue) + ); + return null; + } + if (field != null && field > maxValue) { + validationException.addValidationError( + ServiceUtils.mustBeLessThanOrEqualNumberErrorMessage(settingName, scope, field, maxValue) + ); + return null; + } + + return field; + } + public static Integer extractOptionalPositiveInteger( Map map, String settingName, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 49919fda9f89d..6732e5719b897 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -85,8 +85,8 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi ); public static final int EMBEDDING_MAX_BATCH_SIZE = 10; - public static final String DEFAULT_ELSER_ID = ".elser-2"; - public static final String DEFAULT_E5_ID = ".multi-e5-small"; + public static final String DEFAULT_ELSER_ID = ".elser-2-elasticsearch"; + public static final String DEFAULT_E5_ID = ".multilingual-e5-small-elasticsearch"; private static final Logger logger = LogManager.getLogger(ElasticsearchInternalService.class); private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(ElasticsearchInternalService.class); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java index 5b9625073e6c6..235a3730ce4f6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java @@ -38,25 +38,27 @@ public void testValidChunkingSettingsMap() { } private Map, ChunkingSettings> chunkingSettingsMapToChunkingSettings() { - var maxChunkSize = randomNonNegativeInt(); - var overlap = randomIntBetween(1, maxChunkSize / 2); + var maxChunkSizeWordBoundaryChunkingSettings = randomIntBetween(10, 300); + var overlap = randomIntBetween(1, maxChunkSizeWordBoundaryChunkingSettings / 2); + var maxChunkSizeSentenceBoundaryChunkingSettings = randomIntBetween(20, 300); + return Map.of( Map.of( ChunkingSettingsOptions.STRATEGY.toString(), ChunkingStrategy.WORD.toString(), ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), - maxChunkSize, + maxChunkSizeWordBoundaryChunkingSettings, ChunkingSettingsOptions.OVERLAP.toString(), overlap ), - new WordBoundaryChunkingSettings(maxChunkSize, overlap), + new WordBoundaryChunkingSettings(maxChunkSizeWordBoundaryChunkingSettings, overlap), Map.of( ChunkingSettingsOptions.STRATEGY.toString(), ChunkingStrategy.SENTENCE.toString(), ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), - maxChunkSize + maxChunkSizeSentenceBoundaryChunkingSettings ), - new SentenceBoundaryChunkingSettings(maxChunkSize, 1) + new SentenceBoundaryChunkingSettings(maxChunkSizeSentenceBoundaryChunkingSettings, 1) ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsTests.java index 8373ae93354b1..2832c2f64e0e6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsTests.java @@ -21,11 +21,11 @@ public static ChunkingSettings createRandomChunkingSettings() { switch (randomStrategy) { case WORD -> { - var maxChunkSize = randomNonNegativeInt(); + var maxChunkSize = randomIntBetween(10, 300); return new WordBoundaryChunkingSettings(maxChunkSize, randomIntBetween(1, maxChunkSize / 2)); } case SENTENCE -> { - return new SentenceBoundaryChunkingSettings(randomNonNegativeInt(), randomBoolean() ? 0 : 1); + return new SentenceBoundaryChunkingSettings(randomIntBetween(20, 300), randomBoolean() ? 0 : 1); } default -> throw new IllegalArgumentException("Unsupported random strategy [" + randomStrategy + "]"); } @@ -38,13 +38,13 @@ public static Map createRandomChunkingSettingsMap() { switch (randomStrategy) { case WORD -> { - var maxChunkSize = randomNonNegativeInt(); + var maxChunkSize = randomIntBetween(10, 300); chunkingSettingsMap.put(ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), maxChunkSize); chunkingSettingsMap.put(ChunkingSettingsOptions.OVERLAP.toString(), randomIntBetween(1, maxChunkSize / 2)); } case SENTENCE -> { - chunkingSettingsMap.put(ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), randomNonNegativeInt()); + chunkingSettingsMap.put(ChunkingSettingsOptions.MAX_CHUNK_SIZE.toString(), randomIntBetween(20, 300)); } default -> { } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java index 5687ebc4dbae7..afce8c57e0350 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java @@ -318,7 +318,8 @@ public void testChunkSplitLargeChunkSizesWithChunkingSettings() { } public void testInvalidChunkingSettingsProvided() { - ChunkingSettings chunkingSettings = new WordBoundaryChunkingSettings(randomNonNegativeInt(), randomNonNegativeInt()); + var maxChunkSize = randomIntBetween(10, 300); + ChunkingSettings chunkingSettings = new WordBoundaryChunkingSettings(maxChunkSize, randomIntBetween(1, maxChunkSize / 2)); assertThrows(IllegalArgumentException.class, () -> { new SentenceBoundaryChunker().chunk(TEST_TEXT, chunkingSettings); }); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettingsTests.java index fe97d7eb3af54..47a1a116ba21e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettingsTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.inference.ChunkingStrategy; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.HashMap; @@ -28,14 +27,14 @@ public void testMaxChunkSizeNotProvided() { } public void testInvalidInputsProvided() { - var chunkingSettingsMap = buildChunkingSettingsMap(Optional.of(randomNonNegativeInt())); + var chunkingSettingsMap = buildChunkingSettingsMap(Optional.of(randomIntBetween(20, 300))); chunkingSettingsMap.put(randomAlphaOfLength(10), randomNonNegativeInt()); assertThrows(ValidationException.class, () -> { SentenceBoundaryChunkingSettings.fromMap(chunkingSettingsMap); }); } public void testValidInputsProvided() { - int maxChunkSize = randomNonNegativeInt(); + int maxChunkSize = randomIntBetween(20, 300); SentenceBoundaryChunkingSettings settings = SentenceBoundaryChunkingSettings.fromMap( buildChunkingSettingsMap(Optional.of(maxChunkSize)) ); @@ -59,12 +58,12 @@ protected Writeable.Reader instanceReader() { @Override protected SentenceBoundaryChunkingSettings createTestInstance() { - return new SentenceBoundaryChunkingSettings(randomNonNegativeInt(), randomBoolean() ? 0 : 1); + return new SentenceBoundaryChunkingSettings(randomIntBetween(20, 300), randomBoolean() ? 0 : 1); } @Override protected SentenceBoundaryChunkingSettings mutateInstance(SentenceBoundaryChunkingSettings instance) throws IOException { - var chunkSize = randomValueOtherThan(instance.maxChunkSize, ESTestCase::randomNonNegativeInt); + var chunkSize = randomValueOtherThan(instance.maxChunkSize, () -> randomIntBetween(20, 300)); return new SentenceBoundaryChunkingSettings(chunkSize, instance.sentenceOverlap); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java index 08c0724f36270..ef643a4b36fdc 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java @@ -136,7 +136,7 @@ public void testNumberOfChunksWithWordBoundaryChunkingSettings() { } public void testInvalidChunkingSettingsProvided() { - ChunkingSettings chunkingSettings = new SentenceBoundaryChunkingSettings(randomNonNegativeInt(), 0); + ChunkingSettings chunkingSettings = new SentenceBoundaryChunkingSettings(randomIntBetween(20, 300), 0); assertThrows(IllegalArgumentException.class, () -> { new WordBoundaryChunker().chunk(TEST_TEXT, chunkingSettings); }); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettingsTests.java index c5515f7bf0512..dd91a3c7a947e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettingsTests.java @@ -14,7 +14,6 @@ import java.io.IOException; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.Optional; @@ -28,19 +27,20 @@ public void testMaxChunkSizeNotProvided() { public void testOverlapNotProvided() { assertThrows(ValidationException.class, () -> { - WordBoundaryChunkingSettings.fromMap(buildChunkingSettingsMap(Optional.of(randomNonNegativeInt()), Optional.empty())); + WordBoundaryChunkingSettings.fromMap(buildChunkingSettingsMap(Optional.of(randomIntBetween(10, 300)), Optional.empty())); }); } public void testInvalidInputsProvided() { - var chunkingSettingsMap = buildChunkingSettingsMap(Optional.of(randomNonNegativeInt()), Optional.of(randomNonNegativeInt())); + var maxChunkSize = randomIntBetween(10, 300); + var chunkingSettingsMap = buildChunkingSettingsMap(Optional.of(maxChunkSize), Optional.of(randomIntBetween(1, maxChunkSize / 2))); chunkingSettingsMap.put(randomAlphaOfLength(10), randomNonNegativeInt()); assertThrows(ValidationException.class, () -> { WordBoundaryChunkingSettings.fromMap(chunkingSettingsMap); }); } public void testOverlapGreaterThanHalfMaxChunkSize() { - var maxChunkSize = randomNonNegativeInt(); + var maxChunkSize = randomIntBetween(10, 300); var overlap = randomIntBetween((maxChunkSize / 2) + 1, maxChunkSize); assertThrows(ValidationException.class, () -> { WordBoundaryChunkingSettings.fromMap(buildChunkingSettingsMap(Optional.of(maxChunkSize), Optional.of(overlap))); @@ -48,7 +48,7 @@ public void testOverlapGreaterThanHalfMaxChunkSize() { } public void testValidInputsProvided() { - int maxChunkSize = randomNonNegativeInt(); + int maxChunkSize = randomIntBetween(10, 300); int overlap = randomIntBetween(1, maxChunkSize / 2); WordBoundaryChunkingSettings settings = WordBoundaryChunkingSettings.fromMap( buildChunkingSettingsMap(Optional.of(maxChunkSize), Optional.of(overlap)) @@ -75,29 +75,14 @@ protected Writeable.Reader instanceReader() { @Override protected WordBoundaryChunkingSettings createTestInstance() { - var maxChunkSize = randomNonNegativeInt(); + var maxChunkSize = randomIntBetween(10, 300); return new WordBoundaryChunkingSettings(maxChunkSize, randomIntBetween(1, maxChunkSize / 2)); } @Override protected WordBoundaryChunkingSettings mutateInstance(WordBoundaryChunkingSettings instance) throws IOException { - var valueToMutate = randomFrom(List.of(ChunkingSettingsOptions.MAX_CHUNK_SIZE, ChunkingSettingsOptions.OVERLAP)); - var maxChunkSize = instance.maxChunkSize; - var overlap = instance.overlap; - - if (valueToMutate.equals(ChunkingSettingsOptions.MAX_CHUNK_SIZE)) { - while (maxChunkSize == instance.maxChunkSize) { - maxChunkSize = randomNonNegativeInt(); - } - - if (overlap > maxChunkSize / 2) { - overlap = randomIntBetween(1, maxChunkSize / 2); - } - } else if (valueToMutate.equals(ChunkingSettingsOptions.OVERLAP)) { - while (overlap == instance.overlap) { - overlap = randomIntBetween(1, maxChunkSize / 2); - } - } + var maxChunkSize = randomValueOtherThan(instance.maxChunkSize, () -> randomIntBetween(10, 300)); + var overlap = randomValueOtherThan(instance.overlap, () -> randomIntBetween(1, maxChunkSize / 2)); return new WordBoundaryChunkingSettings(maxChunkSize, overlap); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java index ca48d5427d18b..e3df0f0b5a2e1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java @@ -605,6 +605,60 @@ public void testExtractRequiredPositiveIntegerLessThanOrEqualToMax_AddsErrorWhen assertThat(validation.validationErrors().get(1), is("[scope] does not contain the required setting [not_key]")); } + public void testExtractRequiredPositiveIntegerBetween_ReturnsValueWhenValueIsBetweenMinAndMax() { + var minValue = randomNonNegativeInt(); + var maxValue = randomIntBetween(minValue + 2, minValue + 10); + testExtractRequiredPositiveIntegerBetween_Successful(minValue, maxValue, randomIntBetween(minValue + 1, maxValue - 1)); + } + + public void testExtractRequiredPositiveIntegerBetween_ReturnsValueWhenValueIsEqualToMin() { + var minValue = randomNonNegativeInt(); + var maxValue = randomIntBetween(minValue + 1, minValue + 10); + testExtractRequiredPositiveIntegerBetween_Successful(minValue, maxValue, minValue); + } + + public void testExtractRequiredPositiveIntegerBetween_ReturnsValueWhenValueIsEqualToMax() { + var minValue = randomNonNegativeInt(); + var maxValue = randomIntBetween(minValue + 1, minValue + 10); + testExtractRequiredPositiveIntegerBetween_Successful(minValue, maxValue, maxValue); + } + + private void testExtractRequiredPositiveIntegerBetween_Successful(int minValue, int maxValue, int actualValue) { + var validation = new ValidationException(); + validation.addValidationError("previous error"); + Map map = modifiableMap(Map.of("key", actualValue)); + var parsedInt = ServiceUtils.extractRequiredPositiveIntegerBetween(map, "key", minValue, maxValue, "scope", validation); + + assertThat(validation.validationErrors(), hasSize(1)); + assertNotNull(parsedInt); + assertThat(parsedInt, is(actualValue)); + assertTrue(map.isEmpty()); + } + + public void testExtractRequiredIntBetween_AddsErrorForValueBelowMin() { + var minValue = randomNonNegativeInt(); + var maxValue = randomIntBetween(minValue, minValue + 10); + testExtractRequiredIntBetween_Unsuccessful(minValue, maxValue, minValue - 1); + } + + public void testExtractRequiredIntBetween_AddsErrorForValueAboveMax() { + var minValue = randomNonNegativeInt(); + var maxValue = randomIntBetween(minValue, minValue + 10); + testExtractRequiredIntBetween_Unsuccessful(minValue, maxValue, maxValue + 1); + } + + private void testExtractRequiredIntBetween_Unsuccessful(int minValue, int maxValue, int actualValue) { + var validation = new ValidationException(); + validation.addValidationError("previous error"); + Map map = modifiableMap(Map.of("key", actualValue)); + var parsedInt = ServiceUtils.extractRequiredPositiveIntegerBetween(map, "key", minValue, maxValue, "scope", validation); + + assertThat(validation.validationErrors(), hasSize(2)); + assertNull(parsedInt); + assertTrue(map.isEmpty()); + assertThat(validation.validationErrors().get(1), containsString("Invalid value")); + } + public void testExtractOptionalEnum_ReturnsNull_WhenFieldDoesNotExist() { var validation = new ValidationException(); Map map = modifiableMap(Map.of("key", "value")); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index b82b8a08f2175..5ec66687752a8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -1561,8 +1561,8 @@ public void testEmbeddingTypeFromTaskTypeAndSettings() { public void testIsDefaultId() { var service = createService(mock(Client.class)); - assertTrue(service.isDefaultId(".elser-2")); - assertTrue(service.isDefaultId(".multi-e5-small")); + assertTrue(service.isDefaultId(".elser-2-elasticsearch")); + assertTrue(service.isDefaultId(".multilingual-e5-small-elasticsearch")); assertFalse(service.isDefaultId("foo")); } diff --git a/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java similarity index 59% rename from x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java rename to x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java index edecf4eb9669e..f5ac107628d1a 100644 --- a/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java +++ b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.logsdb; +import org.elasticsearch.client.Request; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.test.cluster.ElasticsearchCluster; @@ -19,7 +20,7 @@ import java.util.List; import java.util.Map; -public class LogsdbRestIT extends ESRestTestCase { +public class LogsdbWithBasicRestIT extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() @@ -96,7 +97,7 @@ public void testLogsdbOverrideSyntheticSourceModeInMapping() throws IOException assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); } - public void testLogsdbNoOverrideSyntheticSourceSetting() throws IOException { + public void testLogsdbOverrideSyntheticSourceSetting() throws IOException { final String index = "test-index"; createIndex( index, @@ -104,6 +105,70 @@ public void testLogsdbNoOverrideSyntheticSourceSetting() throws IOException { ); var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); assertEquals("logsdb", settings.get("index.mode")); - assertEquals(SourceFieldMapper.Mode.SYNTHETIC.toString(), settings.get("index.mapping.source.mode")); + assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); + } + + public void testLogsdbOverrideNullSyntheticSourceSetting() throws IOException { + final String index = "test-index"; + createIndex(index, Settings.builder().put("index.mode", "logsdb").putNull("index.mapping.source.mode").build()); + var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); + assertEquals("logsdb", settings.get("index.mode")); + assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); + } + + public void testLogsdbOverrideSyntheticSourceSettingInTemplate() throws IOException { + var request = new Request("POST", "/_index_template/1"); + request.setJsonEntity(""" + { + "index_patterns": ["test-*"], + "template": { + "settings":{ + "index": { + "mode": "logsdb", + "mapping": { + "source": { + "mode": "synthetic" + } + } + } + } + } + } + """); + assertOK(client().performRequest(request)); + + final String index = "test-index"; + createIndex(index); + var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); + assertEquals("logsdb", settings.get("index.mode")); + assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); + } + + public void testLogsdbOverrideNullInTemplate() throws IOException { + var request = new Request("POST", "/_index_template/1"); + request.setJsonEntity(""" + { + "index_patterns": ["test-*"], + "template": { + "settings":{ + "index": { + "mode": "logsdb", + "mapping": { + "source": { + "mode": null + } + } + } + } + } + } + """); + assertOK(client().performRequest(request)); + + final String index = "test-index"; + createIndex(index); + var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); + assertEquals("logsdb", settings.get("index.mode")); + assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); } } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsDbSourceModeMigrationIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsDbSourceModeMigrationIT.java new file mode 100644 index 0000000000000..adb23567e3933 --- /dev/null +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsDbSourceModeMigrationIT.java @@ -0,0 +1,290 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.ClassRule; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; + +public class LogsDbSourceModeMigrationIT extends LogsIndexModeRestTestIT { + public static final String INDEX_TEMPLATE = """ + { + "index_patterns": ["my-logs-*-*"], + "priority": 100, + "data_stream": {}, + "composed_of": [ + "my-logs-mapping", + "my-logs-original-source", + "my-logs-migrated-source" + ], + "ignore_missing_component_templates": ["my-logs-original-source", "my-logs-migrated-source"] + } + """; + + public static final String MAPPING_COMPONENT_TEMPLATE = """ + { + "template": { + "settings": { + "index": { + "mode": "logsdb" + } + }, + "mappings": { + "properties": { + "@timestamp": { + "type": "date", + "format": "epoch_millis" + }, + "message": { + "type": "text" + }, + "method": { + "type": "keyword" + }, + "hits": { + "type": "long" + } + } + } + } + }"""; + + public static final String STORED_SOURCE_COMPONENT_TEMPLATE = """ + { + "template": { + "settings": { + "index": { + "mapping.source.mode": "stored" + } + } + } + }"""; + + public static final String SYNTHETIC_SOURCE_COMPONENT_TEMPLATE = """ + { + "template": { + "settings": { + "index": { + "mapping.source.mode": "synthetic" + } + } + } + }"""; + + @ClassRule() + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .module("constant-keyword") + .module("data-streams") + .module("mapper-extras") + .module("x-pack-aggregate-metric") + .module("x-pack-stack") + .setting("xpack.security.enabled", "false") + .setting("xpack.otel_data.registry.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .setting("cluster.logsdb.enabled", "true") + .setting("stack.templates.enabled", "false") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Before + public void setup() { + client = client(); + } + + private RestClient client; + + public void testSwitchFromStoredToSyntheticSource() throws IOException { + assertOK(putComponentTemplate(client, "my-logs-mapping", MAPPING_COMPONENT_TEMPLATE)); + assertOK(putComponentTemplate(client, "my-logs-original-source", STORED_SOURCE_COMPONENT_TEMPLATE)); + + assertOK(putTemplate(client, "my-logs", INDEX_TEMPLATE)); + assertOK(createDataStream(client, "my-logs-ds-test")); + + var initialSourceMode = (String) getSetting( + client, + getDataStreamBackingIndex(client, "my-logs-ds-test", 0), + "index.mapping.source.mode" + ); + assertThat(initialSourceMode, equalTo("stored")); + var initialIndexMode = (String) getSetting(client, getDataStreamBackingIndex(client, "my-logs-ds-test", 0), "index.mode"); + assertThat(initialIndexMode, equalTo("logsdb")); + + var indexedWithStoredSource = new ArrayList(); + var indexedWithSyntheticSource = new ArrayList(); + for (int i = 0; i < 10; i++) { + indexedWithStoredSource.add(generateDoc()); + indexedWithSyntheticSource.add(generateDoc()); + } + + Response storedSourceBulkResponse = bulkIndex(client, "my-logs-ds-test", indexedWithStoredSource, 0); + assertOK(storedSourceBulkResponse); + assertThat(entityAsMap(storedSourceBulkResponse).get("errors"), Matchers.equalTo(false)); + + assertOK(putComponentTemplate(client, "my-logs-migrated-source", SYNTHETIC_SOURCE_COMPONENT_TEMPLATE)); + var rolloverResponse = rolloverDataStream(client, "my-logs-ds-test"); + assertOK(rolloverResponse); + assertThat(entityAsMap(rolloverResponse).get("rolled_over"), is(true)); + + var finalSourceMode = (String) getSetting( + client, + getDataStreamBackingIndex(client, "my-logs-ds-test", 1), + "index.mapping.source.mode" + ); + assertThat(finalSourceMode, equalTo("synthetic")); + + Response syntheticSourceBulkResponse = bulkIndex(client, "my-logs-ds-test", indexedWithSyntheticSource, 10); + assertOK(syntheticSourceBulkResponse); + assertThat(entityAsMap(syntheticSourceBulkResponse).get("errors"), Matchers.equalTo(false)); + + var allDocs = Stream.concat(indexedWithStoredSource.stream(), indexedWithSyntheticSource.stream()).toList(); + + var sourceList = search(new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).size(allDocs.size()), "my-logs-ds-test"); + assertThat(sourceList.size(), equalTo(allDocs.size())); + + for (int i = 0; i < sourceList.size(); i++) { + var expected = XContentHelper.convertToMap(BytesReference.bytes(allDocs.get(i)), false, XContentType.JSON).v2(); + assertThat(sourceList.get(i), equalTo(expected)); + } + } + + public void testSwitchFromSyntheticToStoredSource() throws IOException { + assertOK(putComponentTemplate(client, "my-logs-mapping", MAPPING_COMPONENT_TEMPLATE)); + assertOK(putComponentTemplate(client, "my-logs-original-source", SYNTHETIC_SOURCE_COMPONENT_TEMPLATE)); + + assertOK(putTemplate(client, "my-logs", INDEX_TEMPLATE)); + assertOK(createDataStream(client, "my-logs-ds-test")); + + var initialSourceMode = (String) getSetting( + client, + getDataStreamBackingIndex(client, "my-logs-ds-test", 0), + "index.mapping.source.mode" + ); + assertThat(initialSourceMode, equalTo("synthetic")); + var initialIndexMode = (String) getSetting(client, getDataStreamBackingIndex(client, "my-logs-ds-test", 0), "index.mode"); + assertThat(initialIndexMode, equalTo("logsdb")); + + var indexedWithSyntheticSource = new ArrayList(); + var indexedWithStoredSource = new ArrayList(); + for (int i = 0; i < 10; i++) { + indexedWithSyntheticSource.add(generateDoc()); + indexedWithStoredSource.add(generateDoc()); + } + + Response syntheticSourceBulkResponse = bulkIndex(client, "my-logs-ds-test", indexedWithSyntheticSource, 0); + assertOK(syntheticSourceBulkResponse); + assertThat(entityAsMap(syntheticSourceBulkResponse).get("errors"), Matchers.equalTo(false)); + + assertOK(putComponentTemplate(client, "my-logs-migrated-source", STORED_SOURCE_COMPONENT_TEMPLATE)); + var rolloverResponse = rolloverDataStream(client, "my-logs-ds-test"); + assertOK(rolloverResponse); + assertThat(entityAsMap(rolloverResponse).get("rolled_over"), is(true)); + + var finalSourceMode = (String) getSetting( + client, + getDataStreamBackingIndex(client, "my-logs-ds-test", 1), + "index.mapping.source.mode" + ); + assertThat(finalSourceMode, equalTo("stored")); + + Response storedSourceBulkResponse = bulkIndex(client, "my-logs-ds-test", indexedWithStoredSource, 10); + assertOK(storedSourceBulkResponse); + assertThat(entityAsMap(storedSourceBulkResponse).get("errors"), Matchers.equalTo(false)); + + var allDocs = Stream.concat(indexedWithSyntheticSource.stream(), indexedWithStoredSource.stream()).toList(); + + var sourceList = search(new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).size(allDocs.size()), "my-logs-ds-test"); + assertThat(sourceList.size(), equalTo(allDocs.size())); + + for (int i = 0; i < sourceList.size(); i++) { + var expected = XContentHelper.convertToMap(BytesReference.bytes(allDocs.get(i)), false, XContentType.JSON).v2(); + assertThat(sourceList.get(i), equalTo(expected)); + } + } + + private static Response bulkIndex(RestClient client, String dataStreamName, List documents, int startId) + throws IOException { + var sb = new StringBuilder(); + int id = startId; + for (var document : documents) { + sb.append(Strings.format("{ \"create\": { \"_id\" : \"%d\" } }", id)).append("\n"); + sb.append(Strings.toString(document)).append("\n"); + id++; + } + + var bulkRequest = new Request("POST", "/" + dataStreamName + "/_bulk"); + bulkRequest.setJsonEntity(sb.toString()); + bulkRequest.addParameter("refresh", "true"); + return client.performRequest(bulkRequest); + } + + @SuppressWarnings("unchecked") + private List> search(SearchSourceBuilder search, String dataStreamName) throws IOException { + var request = new Request("GET", "/" + dataStreamName + "/_search"); + request.setJsonEntity(Strings.toString(search)); + var searchResponse = client.performRequest(request); + assertOK(searchResponse); + + Map searchResponseMap = XContentHelper.convertToMap( + XContentType.JSON.xContent(), + searchResponse.getEntity().getContent(), + false + ); + var hitsMap = (Map) searchResponseMap.get("hits"); + + var hitsList = (List>) hitsMap.get("hits"); + assertThat(hitsList.size(), greaterThan(0)); + + return hitsList.stream() + .sorted(Comparator.comparingInt((Map hit) -> Integer.parseInt((String) hit.get("_id")))) + .map(hit -> (Map) hit.get("_source")) + .toList(); + } + + private static XContentBuilder generateDoc() throws IOException { + var doc = XContentFactory.jsonBuilder(); + doc.startObject(); + { + doc.field("@timestamp", Long.toString(randomMillisUpToYear9999())); + doc.field("message", randomAlphaOfLengthBetween(20, 50)); + doc.field("method", randomAlphaOfLength(3)); + doc.field("hits", randomLong()); + } + doc.endObject(); + + return doc; + } +} diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java index c5ccee1d36b72..f529b9fa1db96 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.logsdb; -import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.settings.Settings; @@ -496,16 +495,6 @@ public void testIgnoreAboveSetting() throws IOException { } } - private static Map getMapping(final RestClient client, final String indexName) throws IOException { - final Request request = new Request("GET", "/" + indexName + "/_mapping"); - - Map mappings = ((Map>) entityAsMap(client.performRequest(request)).get(indexName)).get( - "mappings" - ); - - return mappings; - } - private Function> subObject(String key) { return (mapAsObject) -> (Map) ((Map) mapAsObject).get(key); } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java index dbee5d1d2de8c..cc7f5bdb33871 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java @@ -98,4 +98,15 @@ protected static Response putClusterSetting(final RestClient client, final Strin request.setJsonEntity("{ \"transient\": { \"" + settingName + "\": " + settingValue + " } }"); return client.performRequest(request); } + + @SuppressWarnings("unchecked") + protected static Map getMapping(final RestClient client, final String indexName) throws IOException { + final Request request = new Request("GET", "/" + indexName + "/_mapping"); + + Map mappings = ((Map>) entityAsMap(client.performRequest(request)).get(indexName)).get( + "mappings" + ); + + return mappings; + } } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java index f60c941c75a7c..4625fe91294d7 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java @@ -47,6 +47,12 @@ final class SyntheticSourceIndexSettingsProvider implements IndexSettingProvider this.mapperServiceFactory = mapperServiceFactory; } + @Override + public boolean overrulesTemplateAndRequestSettings() { + // Indicates that the provider value takes precedence over any user setting. + return true; + } + @Override public Settings getAdditionalIndexSettings( String indexName, diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/50_esql_synthetic_source_disabled_fields.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/50_esql_synthetic_source_disabled_fields.yml index 68597afda6c78..bc81d1eb67309 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/50_esql_synthetic_source_disabled_fields.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/50_esql_synthetic_source_disabled_fields.yml @@ -283,7 +283,7 @@ teardown: - match: {values.0.3: "PUT"} - match: {values.0.4: false} - match: {values.0.5: "POINT (-74.006 40.7128)"} - - match: {values.0.6: null} # null is expected, because text fields aren't stored in ignored source + - match: {values.0.6: "Do. Or do not. There is no try."} - match: {values.0.7: 102} - do: @@ -296,10 +296,86 @@ teardown: - match: {columns.0.name: "message"} - match: {columns.0.type: "text"} - # null is expected, because text fields aren't stored in ignored source - - match: {values.0.0: null} - - match: {values.1.0: null} - - match: {values.2.0: null} - - match: {values.3.0: null} - - match: {values.4.0: null} - - match: {values.5.0: null} + - match: {values.0.0: "Do. Or do not. There is no try."} + - match: {values.1.0: "I find your lack of faith disturbing."} + - match: {values.2.0: "Wars not make one great."} + - match: {values.3.0: "No, I am your father."} + - match: {values.4.0: "May the force be with you."} + - match: {values.5.0: "That's no moon. It's a space station."} + +--- +"message field with keyword multi-field with ignore_above": + - do: + indices.create: + index: my-index2 + body: + settings: + index: + mode: logsdb + mappings: + properties: + "@timestamp": + type: date + host.name: + type: keyword + store: false + message: + type: text + store: false + fields: + raw: + type: keyword + ignore_above: 3 + + - do: + bulk: + index: my-index2 + refresh: true + body: + - { "index": { } } + - { "@timestamp": "2024-02-12T10:30:00Z", "host.name": "foo", "message": "No, I am your father." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:31:00Z", "host.name": "bar", "message": "Do. Or do not. There is no try." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:32:00Z", "host.name": "foo", "message": "May the force be with you." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:33:00Z", "host.name": "baz", "message": "I find your lack of faith disturbing." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:34:00Z", "host.name": "baz", "message": "Wars not make one great." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:35:00Z", "host.name": "foo", "message": "That's no moon. It's a space station." } + + - do: + esql.query: + body: + query: 'FROM my-index2 | SORT host.name, @timestamp | LIMIT 1' + + - match: {columns.0.name: "@timestamp"} + - match: {columns.0.type: "date"} + - match: {columns.1.name: "host.name"} + - match: {columns.1.type: "keyword"} + - match: {columns.2.name: "message"} + - match: {columns.2.type: "text"} + - match: {columns.3.name: "message.raw"} + - match: {columns.3.type: "keyword"} + + - match: {values.0.0: "2024-02-12T10:31:00.000Z"} + - match: {values.0.1: "bar"} + - match: {values.0.2: "Do. Or do not. There is no try."} + # Note that isn't related to synthetic source. For both stored and synthetic source null is returned: +# - match: {values.0.3: "Do. Or do not. There is no try."} + + - do: + esql.query: + body: + query: 'FROM my-index2 | SORT host.name, @timestamp | KEEP message | LIMIT 10' + + - match: {columns.0.name: "message"} + - match: {columns.0.type: "text"} + + - match: {values.0.0: "Do. Or do not. There is no try."} + - match: {values.1.0: "I find your lack of faith disturbing."} + - match: {values.2.0: "Wars not make one great."} + - match: {values.3.0: "No, I am your father."} + - match: {values.4.0: "May the force be with you."} + - match: {values.5.0: "That's no moon. It's a space station."} diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/51_esql_synthetic_source.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/51_esql_synthetic_source.yml index 7e305bda4ef4e..6c840a0cf9d3a 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/51_esql_synthetic_source.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/51_esql_synthetic_source.yml @@ -175,3 +175,80 @@ teardown: - match: {values.3.0: "No, I am your father."} - match: {values.4.0: "May the force be with you."} - match: {values.5.0: "That's no moon. It's a space station."} + +--- +"message field with stored keyword multi-field with ignore_above": + - do: + indices.create: + index: my-index2 + body: + settings: + index: + mode: logsdb + mappings: + properties: + "@timestamp": + type: date + host.name: + type: keyword + store: false + message: + type: text + store: false + fields: + raw: + type: keyword + store: true + + - do: + bulk: + index: my-index2 + refresh: true + body: + - { "index": { } } + - { "@timestamp": "2024-02-12T10:30:00Z", "host.name": "foo", "message": "No, I am your father." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:31:00Z", "host.name": "bar", "message": "Do. Or do not. There is no try." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:32:00Z", "host.name": "foo", "message": "May the force be with you." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:33:00Z", "host.name": "baz", "message": "I find your lack of faith disturbing." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:34:00Z", "host.name": "baz", "message": "Wars not make one great." } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:35:00Z", "host.name": "foo", "message": "That's no moon. It's a space station." } + + - do: + esql.query: + body: + query: 'FROM my-index2 | SORT host.name, @timestamp | LIMIT 1' + + - match: {columns.0.name: "@timestamp"} + - match: {columns.0.type: "date"} + - match: {columns.1.name: "host.name"} + - match: {columns.1.type: "keyword"} + - match: {columns.2.name: "message"} + - match: {columns.2.type: "text"} + - match: {columns.3.name: "message.raw"} + - match: {columns.3.type: "keyword"} + + - match: {values.0.0: "2024-02-12T10:31:00.000Z"} + - match: {values.0.1: "bar"} + - match: {values.0.2: "Do. Or do not. There is no try."} + - match: {values.0.3: "Do. Or do not. There is no try."} + + - do: + esql.query: + body: + query: 'FROM my-index2 | SORT host.name, @timestamp | KEEP message | LIMIT 10' + + - match: {columns.0.name: "message"} + - match: {columns.0.type: "text"} + + - match: {values.0.0: "Do. Or do not. There is no try."} + - match: {values.1.0: "I find your lack of faith disturbing."} + - match: {values.2.0: "Wars not make one great."} + - match: {values.3.0: "No, I am your father."} + - match: {values.4.0: "May the force be with you."} + - match: {values.5.0: "That's no moon. It's a space station."} + diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index e8fd0da496bbe..b43d87c17e644 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -339,8 +339,7 @@ protected Object parseSourceValue(Object value) { BlockSourceReader.LeafIteratorLookup lookup = isStored() || isIndexed() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) : BlockSourceReader.lookupMatchingAll(); - var sourceMode = blContext.indexSettings().getIndexMappingSourceMode(); - return new BlockSourceReader.LongsBlockLoader(valueFetcher, lookup, sourceMode); + return new BlockSourceReader.LongsBlockLoader(valueFetcher, lookup); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java index bbd63e0d3bfe9..0dec99a9b9bb9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java @@ -33,6 +33,7 @@ public class AdaptiveAllocationsScaler { private final String deploymentId; private final KalmanFilter1d requestRateEstimator; private final KalmanFilter1d inferenceTimeEstimator; + private final long scaleToZeroAfterNoRequestsSeconds; private double timeWithoutRequestsSeconds; private int numberOfAllocations; @@ -44,10 +45,11 @@ public class AdaptiveAllocationsScaler { private Double lastMeasuredRequestRate; private Double lastMeasuredInferenceTime; private Long lastMeasuredQueueSize; - private long scaleToZeroAfterNoRequestsSeconds; AdaptiveAllocationsScaler(String deploymentId, int numberOfAllocations, long scaleToZeroAfterNoRequestsSeconds) { this.deploymentId = deploymentId; + this.scaleToZeroAfterNoRequestsSeconds = scaleToZeroAfterNoRequestsSeconds; + // A smoothing factor of 100 roughly means the last 100 measurements have an effect // on the estimated values. The sampling time is 10 seconds, so approximately the // last 15 minutes are taken into account. @@ -67,7 +69,6 @@ public class AdaptiveAllocationsScaler { lastMeasuredRequestRate = null; lastMeasuredInferenceTime = null; lastMeasuredQueueSize = null; - this.scaleToZeroAfterNoRequestsSeconds = scaleToZeroAfterNoRequestsSeconds; } void setMinMaxNumberOfAllocations(Integer minNumberOfAllocations, Integer maxNumberOfAllocations) { @@ -117,6 +118,10 @@ void process(AdaptiveAllocationsScalerService.Stats stats, double timeIntervalSe dynamicsChanged = false; } + void resetTimeWithoutRequests() { + timeWithoutRequestsSeconds = 0; + } + double getLoadLower() { double requestRateLower = Math.max(0.0, requestRateEstimator.lower()); double inferenceTimeLower = Math.max(0.0, inferenceTimeEstimator.hasValue() ? inferenceTimeEstimator.lower() : 1.0); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java index 770e890512935..16ec3ee9b468c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java @@ -188,7 +188,10 @@ Collection observeDouble(Function cluster.nodes(2) .distribution(DistributionType.DEFAULT) .setting("xpack.ml.enabled", "false") .setting("xpack.license.self_generated.type", "trial") @@ -62,8 +61,10 @@ public abstract class SecurityOnTrialLicenseRestTestCase extends ESRestTestCase .user("admin_user", "admin-password", ROOT_USER_ROLE, true) .user("security_test_user", "security-test-password", "security_test_role", false) .user("x_pack_rest_user", "x-pack-test-password", ROOT_USER_ROLE, true) - .user("cat_test_user", "cat-test-password", "cat_test_role", false) - .build(); + .user("cat_test_user", "cat-test-password", "cat_test_role", false); + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().apply(commonTrialSecurityClusterConfig).build(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/rolemapping/RoleMappingRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/rolemapping/RoleMappingRestIT.java new file mode 100644 index 0000000000000..51970af4b88a0 --- /dev/null +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/rolemapping/RoleMappingRestIT.java @@ -0,0 +1,268 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.rolemapping; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; +import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression; +import org.elasticsearch.xpack.security.SecurityOnTrialLicenseRestTestCase; +import org.junit.ClassRule; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class RoleMappingRestIT extends ESRestTestCase { + private static final String settingsJson = """ + { + "metadata": { + "version": "1", + "compatibility": "8.4.0" + }, + "state": { + "role_mappings": { + "role-mapping-1": { + "enabled": true, + "roles": [ "role_1" ], + "rules": { "field": { "username": "no_user" } }, + "metadata": { + "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", + "_foo": "something", + "_es_reserved_role_mapping_name": "ignored" + } + }, + "role-mapping-2": { + "enabled": true, + "roles": [ "role_2" ], + "rules": { "field": { "username": "no_user" } } + }, + "role-mapping-3": { + "enabled": true, + "roles": [ "role_3" ], + "rules": { "field": { "username": "no_user" } }, + "metadata": { + "_read_only" : { "field": 1 }, + "_es_reserved_role_mapping_name": { "still_ignored": true } + } + } + } + } + }"""; + private static final ExpressionRoleMapping clusterStateMapping1 = new ExpressionRoleMapping( + "role-mapping-1-read-only-operator-mapping", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("no_user"))), + List.of("role_1"), + null, + Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something", "_read_only", true), + true + ); + private static final ExpressionRoleMapping clusterStateMapping2 = new ExpressionRoleMapping( + "role-mapping-2-read-only-operator-mapping", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("no_user"))), + List.of("role_2"), + null, + Map.of("_read_only", true), + true + ); + private static final ExpressionRoleMapping clusterStateMapping3 = new ExpressionRoleMapping( + "role-mapping-3-read-only-operator-mapping", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("no_user"))), + List.of("role_3"), + null, + Map.of("_read_only", true), + true + ); + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .apply(SecurityOnTrialLicenseRestTestCase.commonTrialSecurityClusterConfig) + .configFile("operator/settings.json", Resource.fromString(settingsJson)) + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public void testGetRoleMappings() throws IOException { + expectMappings(List.of(clusterStateMapping1, clusterStateMapping2, clusterStateMapping3)); + expectMappings(List.of(clusterStateMapping1), "role-mapping-1"); + expectMappings(List.of(clusterStateMapping1, clusterStateMapping3), "role-mapping-1", "role-mapping-3"); + expectMappings(List.of(clusterStateMapping1), clusterStateMapping1.getName()); + expectMappings(List.of(clusterStateMapping1), clusterStateMapping1.getName(), "role-mapping-1"); + + expect404(() -> getMappings("role-mapping-4")); + expect404(() -> getMappings("role-mapping-4-read-only-operator-mapping")); + + ExpressionRoleMapping nativeMapping1 = expressionRoleMapping("role-mapping-1"); + putMapping(nativeMapping1, createOrUpdateWarning(nativeMapping1.getName())); + + ExpressionRoleMapping nativeMapping4 = expressionRoleMapping("role-mapping-4"); + putMapping(nativeMapping4); + + expectMappings(List.of(clusterStateMapping1, clusterStateMapping2, clusterStateMapping3, nativeMapping1, nativeMapping4)); + expectMappings(List.of(clusterStateMapping1, nativeMapping1), "role-mapping-1"); + expectMappings(List.of(clusterStateMapping1, nativeMapping1), "role-mapping-1", clusterStateMapping1.getName()); + expectMappings(List.of(clusterStateMapping1), clusterStateMapping1.getName()); + expectMappings(List.of(nativeMapping4), "role-mapping-4"); + expectMappings(List.of(nativeMapping4), "role-mapping-4", "role-mapping-4-read-only-operator-mapping"); + } + + public void testPutAndDeleteRoleMappings() throws IOException { + { + var ex = expectThrows( + ResponseException.class, + () -> putMapping(expressionRoleMapping("role-mapping-1-read-only-operator-mapping")) + ); + assertThat( + ex.getMessage(), + containsString( + "Invalid mapping name [role-mapping-1-read-only-operator-mapping]. " + + "[-read-only-operator-mapping] is not an allowed suffix" + ) + ); + } + + // Also fails even if a CS role mapping with that name does not exist + { + var ex = expectThrows( + ResponseException.class, + () -> putMapping(expressionRoleMapping("role-mapping-4-read-only-operator-mapping")) + ); + assertThat( + ex.getMessage(), + containsString( + "Invalid mapping name [role-mapping-4-read-only-operator-mapping]. " + + "[-read-only-operator-mapping] is not an allowed suffix" + ) + ); + } + + assertOK(putMapping(expressionRoleMapping("role-mapping-1"), createOrUpdateWarning("role-mapping-1"))); + + assertOK(deleteMapping("role-mapping-1", deletionWarning("role-mapping-1"))); + + // 404 without warnings if no native mapping exists + expect404(() -> deleteMapping("role-mapping-1")); + } + + private static void expect404(ThrowingRunnable clientCall) { + var ex = expectThrows(ResponseException.class, clientCall); + assertThat(ex.getResponse().getStatusLine().getStatusCode(), equalTo(404)); + } + + private static Response putMapping(ExpressionRoleMapping roleMapping) throws IOException { + return putMapping(roleMapping, null); + } + + private static Response putMapping(ExpressionRoleMapping roleMapping, @Nullable String warning) throws IOException { + Request request = new Request("PUT", "/_security/role_mapping/" + roleMapping.getName()); + XContentBuilder xContent = roleMapping.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS); + request.setJsonEntity(BytesReference.bytes(xContent).utf8ToString()); + if (warning != null) { + request.setOptions( + RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warnings -> warnings.equals(List.of(warning)) == false).build() + ); + } + return client().performRequest(request); + } + + private static Response deleteMapping(String name) throws IOException { + return deleteMapping(name, null); + } + + private static Response deleteMapping(String name, @Nullable String warning) throws IOException { + Request request = new Request("DELETE", "/_security/role_mapping/" + name); + if (warning != null) { + request.setOptions( + RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warnings -> warnings.equals(List.of(warning)) == false).build() + ); + } + return client().performRequest(request); + } + + private static ExpressionRoleMapping expressionRoleMapping(String name) { + return new ExpressionRoleMapping( + name, + new FieldExpression("username", List.of(new FieldExpression.FieldValue(randomAlphaOfLength(10)))), + List.of(randomAlphaOfLength(5)), + null, + Map.of(), + true + ); + } + + @SuppressWarnings("unchecked") + private static void expectMappings(List expectedMappings, String... requestedMappingNames) throws IOException { + Map map = responseAsMap(getMappings(requestedMappingNames)); + assertThat( + map.keySet(), + containsInAnyOrder(expectedMappings.stream().map(ExpressionRoleMapping::getName).toList().toArray(new String[0])) + ); + List actualMappings = new ArrayList<>(); + for (Map.Entry entry : map.entrySet()) { + XContentParser body = XContentHelper.mapToXContentParser(XContentParserConfiguration.EMPTY, (Map) entry.getValue()); + ExpressionRoleMapping actual = ExpressionRoleMapping.parse(entry.getKey(), body); + actualMappings.add(actual); + } + assertThat(actualMappings, containsInAnyOrder(expectedMappings.toArray(new ExpressionRoleMapping[0]))); + } + + private static Response getMappings(String... requestedMappingNames) throws IOException { + return client().performRequest(new Request("GET", "/_security/role_mapping/" + String.join(",", requestedMappingNames))); + } + + @Override + protected Settings restAdminSettings() { + String token = basicAuthHeaderValue("admin_user", new SecureString("admin-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue("admin_user", new SecureString("admin-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + private static String createOrUpdateWarning(String mappingName) { + return "A read-only role mapping with the same name [" + + mappingName + + "] has been previously defined in a configuration file. " + + "Both role mappings will be used to determine role assignments."; + } + + private static String deletionWarning(String mappingName) { + return "A read-only role mapping with the same name [" + + mappingName + + "] has previously been defined in a configuration file. " + + "The native role mapping was deleted, but the read-only mapping will remain active " + + "and will be used to determine role assignments."; + }; +} diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java index 3b6ffd0698623..fdd854e7a9673 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; +import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; @@ -45,6 +46,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -63,7 +65,6 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; @@ -271,21 +272,28 @@ private void assertRoleMappingsSaveOK(CountDownLatch savedClusterState, AtomicLo assertThat(resolveRolesFuture.get(), containsInAnyOrder("kibana_user", "fleet_user")); } - // the role mappings are not retrievable by the role mapping action (which only accesses "native" i.e. index-based role mappings) - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); - assertThat(response.mappings(), emptyArray()); - - // role mappings (with the same names) can also be stored in the "native" store - var putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")).actionGet(); - assertTrue(putRoleMappingResponse.isCreated()); - putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet(); - assertTrue(putRoleMappingResponse.isCreated()); + // the role mappings are retrievable by the role mapping action for BWC + assertGetResponseHasMappings(true, "everyone_kibana", "everyone_fleet"); + + // role mappings (with the same names) can be stored in the "native" store + { + PutRoleMappingResponse response = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")) + .actionGet(); + assertTrue(response.isCreated()); + response = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet(); + assertTrue(response.isCreated()); + } + { + // deleting role mappings that exist in the native store and in cluster-state should result in success + var response = client().execute(DeleteRoleMappingAction.INSTANCE, deleteRequest("everyone_kibana")).actionGet(); + assertTrue(response.isFound()); + response = client().execute(DeleteRoleMappingAction.INSTANCE, deleteRequest("everyone_fleet")).actionGet(); + assertTrue(response.isFound()); + } + } - public void testRoleMappingsApplied() throws Exception { + public void testClusterStateRoleMappingsAddedThenDeleted() throws Exception { ensureGreen(); var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); @@ -294,6 +302,12 @@ public void testRoleMappingsApplied() throws Exception { assertRoleMappingsSaveOK(savedClusterState.v1(), savedClusterState.v2()); logger.info("---> cleanup cluster settings..."); + { + // Deleting non-existent native role mappings returns not found even if they exist in config file + var response = client().execute(DeleteRoleMappingAction.INSTANCE, deleteRequest("everyone_kibana")).get(); + assertFalse(response.isFound()); + } + savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); @@ -308,48 +322,96 @@ public void testRoleMappingsApplied() throws Exception { clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) ); - // native role mappings are not affected by the removal of the cluster-state based ones + // cluster-state role mapping was removed and is not returned in the API anymore { var request = new GetRoleMappingsRequest(); request.setNames("everyone_kibana", "everyone_fleet"); var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertTrue(response.hasMappings()); - assertThat( - Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), - containsInAnyOrder("everyone_kibana", "everyone_fleet") - ); + assertFalse(response.hasMappings()); } - // and roles are resolved based on the native role mappings + // no role mappings means no roles are resolved for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); userRoleMapper.resolveRoles( new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), resolveRolesFuture ); - assertThat(resolveRolesFuture.get(), contains("kibana_user_native")); + assertThat(resolveRolesFuture.get(), empty()); } + } - { - var request = new DeleteRoleMappingRequest(); - request.setName("everyone_kibana"); - var response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); - assertTrue(response.isFound()); - request = new DeleteRoleMappingRequest(); - request.setName("everyone_fleet"); - response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); - assertTrue(response.isFound()); + public void testGetRoleMappings() throws Exception { + ensureGreen(); + + final List nativeMappings = List.of("everyone_kibana", "_everyone_kibana", "zzz_mapping", "123_mapping"); + for (var mapping : nativeMappings) { + client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest(mapping)).actionGet(); } - // no roles are resolved now, because both native and cluster-state based stores have been cleared - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), empty()); + var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); + writeJSONFile(internalCluster().getMasterName(), testJSON, logger, versionCounter); + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + var request = new GetRoleMappingsRequest(); + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), + containsInAnyOrder( + "everyone_kibana", + ExpressionRoleMapping.addReadOnlySuffix("everyone_kibana"), + "_everyone_kibana", + ExpressionRoleMapping.addReadOnlySuffix("everyone_fleet"), + "zzz_mapping", + "123_mapping" + ) + ); + + List readOnlyFlags = new ArrayList<>(); + for (ExpressionRoleMapping mapping : response.mappings()) { + boolean isReadOnly = ExpressionRoleMapping.hasReadOnlySuffix(mapping.getName()) + && mapping.getMetadata().get("_read_only") != null; + readOnlyFlags.add(isReadOnly); } + // assert that cluster-state role mappings come last + assertThat(readOnlyFlags, contains(false, false, false, false, true, true)); + + // it's possible to delete overlapping native role mapping + assertTrue(client().execute(DeleteRoleMappingAction.INSTANCE, deleteRequest("everyone_kibana")).actionGet().isFound()); + + // Fetch a specific file based role + request = new GetRoleMappingsRequest(); + request.setNames(ExpressionRoleMapping.addReadOnlySuffix("everyone_kibana")); + response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), + containsInAnyOrder(ExpressionRoleMapping.addReadOnlySuffix("everyone_kibana")) + ); + + savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); + writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); + awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + final ClusterStateResponse clusterStateResponse = clusterAdmin().state( + new ClusterStateRequest(TEST_REQUEST_TIMEOUT).waitForMetadataVersion(savedClusterState.v2().get()) + ).get(); + + assertNull( + clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) + ); + + // Make sure remaining native mappings can still be fetched + request = new GetRoleMappingsRequest(); + response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), + containsInAnyOrder("_everyone_kibana", "zzz_mapping", "123_mapping") + ); } public static Tuple setupClusterStateListenerForError( @@ -434,11 +496,8 @@ public void testRoleMappingApplyWithSecurityIndexClosed() throws Exception { boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); - // no native role mappings exist - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); + // even if index is closed, cluster-state role mappings are still returned + assertGetResponseHasMappings(true, "everyone_kibana", "everyone_fleet"); // cluster state settings are also applied var clusterStateResponse = clusterAdmin().state( @@ -477,6 +536,12 @@ public void testRoleMappingApplyWithSecurityIndexClosed() throws Exception { } } + private DeleteRoleMappingRequest deleteRequest(String name) { + var request = new DeleteRoleMappingRequest(); + request.setName(name); + return request; + } + private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { var json = """ { @@ -495,4 +560,19 @@ private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { return new PutRoleMappingRequestBuilder(null).source(name, parser).request(); } } + + private static void assertGetResponseHasMappings(boolean readOnly, String... mappings) throws InterruptedException, ExecutionException { + var request = new GetRoleMappingsRequest(); + request.setNames(mappings); + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), + containsInAnyOrder( + Arrays.stream(mappings) + .map(mapping -> readOnly ? ExpressionRoleMapping.addReadOnlySuffix(mapping) : mapping) + .toArray(String[]::new) + ) + ); + } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsRestartIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsRestartIT.java index 6c6582138ce89..97a5f080cee4e 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsRestartIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsRestartIT.java @@ -30,6 +30,7 @@ import static org.elasticsearch.integration.RoleMappingFileSettingsIT.setupClusterStateListenerForCleanup; import static org.elasticsearch.integration.RoleMappingFileSettingsIT.writeJSONFile; import static org.elasticsearch.integration.RoleMappingFileSettingsIT.writeJSONFileWithoutVersionIncrement; +import static org.elasticsearch.xpack.core.security.authz.RoleMappingMetadata.METADATA_NAME_FIELD; import static org.hamcrest.Matchers.containsInAnyOrder; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) @@ -123,7 +124,7 @@ public void testReservedStatePersistsOnRestart() throws Exception { new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("kibana_user"), List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), + Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something", METADATA_NAME_FIELD, "everyone_kibana_alone"), true ), new ExpressionRoleMapping( @@ -131,7 +132,14 @@ public void testReservedStatePersistsOnRestart() throws Exception { new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("fleet_user"), List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else"), + Map.of( + "uuid", + "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", + "_foo", + "something_else", + METADATA_NAME_FIELD, + "everyone_fleet_alone" + ), false ) ); @@ -141,26 +149,29 @@ public void testReservedStatePersistsOnRestart() throws Exception { ensureGreen(); awaitFileSettingsWatcher(); - // assert busy to give mappings time to update after restart; otherwise, the role mapping names might be dummy values - // `name_not_available_after_deserialization` - assertBusy( - () -> assertRoleMappingsInClusterState( - new ExpressionRoleMapping( - "everyone_kibana_alone", - new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), - List.of("kibana_user"), - List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), - true + assertRoleMappingsInClusterState( + new ExpressionRoleMapping( + "everyone_kibana_alone", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), + List.of("kibana_user"), + List.of(), + Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something", METADATA_NAME_FIELD, "everyone_kibana_alone"), + true + ), + new ExpressionRoleMapping( + "everyone_fleet_alone", + new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), + List.of("fleet_user"), + List.of(), + Map.of( + "uuid", + "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", + "_foo", + "something_else", + METADATA_NAME_FIELD, + "everyone_fleet_alone" ), - new ExpressionRoleMapping( - "everyone_fleet_alone", - new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), - List.of("fleet_user"), - List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else"), - false - ) + false ) ); @@ -197,7 +208,7 @@ public void testFileSettingsReprocessedOnRestartWithoutVersionChange() throws Ex new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("kibana_user"), List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), + Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something", METADATA_NAME_FIELD, "everyone_kibana_alone"), true ), new ExpressionRoleMapping( @@ -205,7 +216,14 @@ public void testFileSettingsReprocessedOnRestartWithoutVersionChange() throws Ex new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("fleet_user"), List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else"), + Map.of( + "uuid", + "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", + "_foo", + "something_else", + METADATA_NAME_FIELD, + "everyone_fleet_alone" + ), false ) ); @@ -225,7 +243,7 @@ public void testFileSettingsReprocessedOnRestartWithoutVersionChange() throws Ex new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("kibana_user"), List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), + Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something", METADATA_NAME_FIELD, "everyone_kibana_alone"), true ), new ExpressionRoleMapping( @@ -233,7 +251,14 @@ public void testFileSettingsReprocessedOnRestartWithoutVersionChange() throws Ex new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("fleet_user"), List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else"), + Map.of( + "uuid", + "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", + "_foo", + "something_else", + METADATA_NAME_FIELD, + "everyone_fleet_alone" + ), false ) ); @@ -251,7 +276,14 @@ public void testFileSettingsReprocessedOnRestartWithoutVersionChange() throws Ex new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("kibana_user", "kibana_admin"), List.of(), - Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something"), + Map.of( + "uuid", + "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", + "_foo", + "something", + METADATA_NAME_FIELD, + "everyone_kibana_together" + ), true ) ) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 79a00fa1293bd..8f32bcf7ace8a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -899,6 +899,7 @@ Collection createComponents( components.add(nativeUsersStore); components.add(new PluginComponentBinding<>(NativeRoleMappingStore.class, nativeRoleMappingStore)); components.add(new PluginComponentBinding<>(UserRoleMapper.class, userRoleMapper)); + components.add(clusterStateRoleMapper); components.add(reservedRealm); components.add(realms); this.realms.set(realms); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java index 73d1a1abcdb50..837b475dea68f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java @@ -43,7 +43,7 @@ public String name() { @Override public TransformState transform(Object source, TransformState prevState) throws Exception { @SuppressWarnings("unchecked") - Set roleMappings = validate((List) source); + Set roleMappings = validateAndTranslate((List) source); RoleMappingMetadata newRoleMappingMetadata = new RoleMappingMetadata(roleMappings); if (newRoleMappingMetadata.equals(RoleMappingMetadata.getFromClusterState(prevState.state()))) { return prevState; @@ -71,7 +71,7 @@ public List fromXContent(XContentParser parser) throws IO return result; } - private Set validate(List roleMappings) { + private Set validateAndTranslate(List roleMappings) { var exceptions = new ArrayList(); for (var roleMapping : roleMappings) { // File based defined role mappings are allowed to use MetadataUtils.RESERVED_PREFIX @@ -85,6 +85,8 @@ private Set validate(List roleMapp exceptions.forEach(illegalArgumentException::addSuppressed); throw illegalArgumentException; } - return roleMappings.stream().map(PutRoleMappingRequest::getMapping).collect(Collectors.toUnmodifiableSet()); + return roleMappings.stream() + .map(r -> RoleMappingMetadata.copyWithNameInMetadata(r.getMapping())) + .collect(Collectors.toUnmodifiableSet()); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java index 74129facae70a..b1fdf2e90dd46 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -16,17 +17,19 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingResponse; +import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; public class TransportDeleteRoleMappingAction extends HandledTransportAction { - private final NativeRoleMappingStore roleMappingStore; + private final ClusterStateRoleMapper clusterStateRoleMapper; @Inject public TransportDeleteRoleMappingAction( ActionFilters actionFilters, TransportService transportService, - NativeRoleMappingStore roleMappingStore + NativeRoleMappingStore roleMappingStore, + ClusterStateRoleMapper clusterStateRoleMapper ) { super( DeleteRoleMappingAction.NAME, @@ -36,10 +39,24 @@ public TransportDeleteRoleMappingAction( EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.roleMappingStore = roleMappingStore; + this.clusterStateRoleMapper = clusterStateRoleMapper; } @Override protected void doExecute(Task task, DeleteRoleMappingRequest request, ActionListener listener) { - roleMappingStore.deleteRoleMapping(request, listener.safeMap(DeleteRoleMappingResponse::new)); + roleMappingStore.deleteRoleMapping(request, listener.safeMap(found -> { + if (found && clusterStateRoleMapper.hasMapping(request.getName())) { + // Allow to delete a mapping with the same name in the native role mapping store as the file_settings namespace, but + // add a warning header to signal to the caller that this could be a problem. + HeaderWarning.addWarning( + "A read-only role mapping with the same name [" + + request.getName() + + "] has previously been defined in a configuration file. " + + "The native role mapping was deleted, but the read-only mapping will remain active " + + "and will be used to determine role assignments." + ); + } + return new DeleteRoleMappingResponse(found); + })); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java index ac0d3177cca09..5f16b095db0ef 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java @@ -6,6 +6,8 @@ */ package org.elasticsearch.xpack.security.action.rolemapping; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -17,21 +19,31 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; +import org.elasticsearch.xpack.core.security.authz.RoleMappingMetadata; +import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; import java.util.HashSet; +import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; public class TransportGetRoleMappingsAction extends HandledTransportAction { + private static final Logger logger = LogManager.getLogger(TransportGetRoleMappingsAction.class); private final NativeRoleMappingStore roleMappingStore; + private final ClusterStateRoleMapper clusterStateRoleMapper; @Inject public TransportGetRoleMappingsAction( ActionFilters actionFilters, TransportService transportService, - NativeRoleMappingStore nativeRoleMappingStore + NativeRoleMappingStore nativeRoleMappingStore, + ClusterStateRoleMapper clusterStateRoleMapper ) { super( GetRoleMappingsAction.NAME, @@ -41,19 +53,84 @@ public TransportGetRoleMappingsAction( EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.roleMappingStore = nativeRoleMappingStore; + this.clusterStateRoleMapper = clusterStateRoleMapper; } @Override protected void doExecute(Task task, final GetRoleMappingsRequest request, final ActionListener listener) { final Set names; if (request.getNames() == null || request.getNames().length == 0) { - names = null; + names = Set.of(); } else { names = new HashSet<>(Arrays.asList(request.getNames())); } - this.roleMappingStore.getRoleMappings(names, ActionListener.wrap(mappings -> { - ExpressionRoleMapping[] array = mappings.toArray(new ExpressionRoleMapping[mappings.size()]); - listener.onResponse(new GetRoleMappingsResponse(array)); + roleMappingStore.getRoleMappings(names, ActionListener.wrap(nativeRoleMappings -> { + final Collection clusterStateRoleMappings = clusterStateRoleMapper.getMappings( + // if the API was queried with a reserved suffix for any of the names, we need to remove it because role mappings are + // stored without it in cluster-state + removeReadOnlySuffixIfPresent(names) + ); + listener.onResponse(buildResponse(clusterStateRoleMappings, nativeRoleMappings)); }, listener::onFailure)); } + + private GetRoleMappingsResponse buildResponse( + Collection clusterStateMappings, + Collection nativeMappings + ) { + Stream translatedClusterStateMappings = clusterStateMappings.stream().filter(roleMapping -> { + if (RoleMappingMetadata.hasFallbackName(roleMapping)) { + logger.warn( + "Role mapping retrieved from cluster-state with an ambiguous name. It will be omitted from the API response." + + "This is likely a transient issue during node start-up." + ); + return false; + } + return true; + }).map(this::translateClusterStateMapping); + return new GetRoleMappingsResponse( + Stream.concat(nativeMappings.stream(), translatedClusterStateMappings).toArray(ExpressionRoleMapping[]::new) + ); + } + + private Set removeReadOnlySuffixIfPresent(Set names) { + return names.stream().map(ExpressionRoleMapping::removeReadOnlySuffixIfPresent).collect(Collectors.toSet()); + } + + /** + * Translator method for ensuring unique API names and marking cluster-state role mappings as read-only. + * Role mappings retrieved from cluster-state are surfaced through both the transport and REST layers, + * along with native role mappings. Unlike native role mappings, cluster-state role mappings are + * read-only and cannot be modified via APIs. It is possible for cluster-state and native role mappings + * to have overlapping names. + * + *

+ * This does the following: + *

+ * + *
    + *
  1. Appends a reserved suffix to cluster-state role mapping names to avoid conflicts with native role mappings.
  2. + *
  3. Marks the metadata of cluster-state role mappings with a reserved read-only flag.
  4. + *
  5. Removes internal metadata flag used in processing (see {@link RoleMappingMetadata#METADATA_NAME_FIELD}).
  6. + *
+ */ + private ExpressionRoleMapping translateClusterStateMapping(ExpressionRoleMapping mapping) { + Map metadata = new HashMap<>(mapping.getMetadata()); + if (metadata.put(ExpressionRoleMapping.READ_ONLY_ROLE_MAPPING_METADATA_FLAG, true) != null) { + logger.error( + "Metadata field [{}] is reserved and will be overwritten with an internal system value. " + + "Rename this field in your role mapping configuration.", + ExpressionRoleMapping.READ_ONLY_ROLE_MAPPING_METADATA_FLAG + ); + } + metadata.remove(RoleMappingMetadata.METADATA_NAME_FIELD); + return new ExpressionRoleMapping( + ExpressionRoleMapping.addReadOnlySuffix(mapping.getName()), + mapping.getExpression(), + mapping.getRoles(), + mapping.getRoleTemplates(), + metadata, + mapping.isEnabled() + ); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java index 82a3b4f000064..682ade925d2ec 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -16,24 +17,41 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; +import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import static org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping.validateNoReadOnlySuffix; + public class TransportPutRoleMappingAction extends HandledTransportAction { private final NativeRoleMappingStore roleMappingStore; + private final ClusterStateRoleMapper clusterStateRoleMapper; @Inject public TransportPutRoleMappingAction( ActionFilters actionFilters, TransportService transportService, - NativeRoleMappingStore roleMappingStore + NativeRoleMappingStore roleMappingStore, + ClusterStateRoleMapper clusterStateRoleMapper ) { super(PutRoleMappingAction.NAME, transportService, actionFilters, PutRoleMappingRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.roleMappingStore = roleMappingStore; + this.clusterStateRoleMapper = clusterStateRoleMapper; } @Override protected void doExecute(Task task, final PutRoleMappingRequest request, final ActionListener listener) { + validateNoReadOnlySuffix(request.getName()); + if (clusterStateRoleMapper.hasMapping(request.getName())) { + // Allow to define a mapping with the same name in the native role mapping store as the file_settings namespace, but add a + // warning header to signal to the caller that this could be a problem. + HeaderWarning.addWarning( + "A read-only role mapping with the same name [" + + request.getName() + + "] has been previously defined in a configuration file. " + + "Both role mappings will be used to determine role assignments." + ); + } roleMappingStore.putRoleMapping( request, ActionListener.wrap(created -> listener.onResponse(new PutRoleMappingResponse(created)), listener::onFailure) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java index 5dea6a938263c..99e3311283920 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; import org.elasticsearch.script.ScriptService; import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; @@ -21,6 +22,7 @@ import java.util.Objects; import java.util.Set; +import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.security.SecurityExtension.SecurityComponents; @@ -28,8 +30,7 @@ * A role mapper the reads the role mapping rules (i.e. {@link ExpressionRoleMapping}s) from the cluster state * (i.e. {@link RoleMappingMetadata}). This is not enabled by default. */ -public final class ClusterStateRoleMapper extends AbstractRoleMapperClearRealmCache implements ClusterStateListener { - +public class ClusterStateRoleMapper extends AbstractRoleMapperClearRealmCache implements ClusterStateListener { /** * This setting is never registered by the xpack security plugin - in order to disable the * cluster-state based role mapper another plugin must register it as a boolean setting @@ -81,13 +82,26 @@ public void clusterChanged(ClusterChangedEvent event) { } } - private Set getMappings() { + public boolean hasMapping(String name) { + if (enabled == false) { + return false; + } + return false == getMappings(Set.of(name)).isEmpty(); + } + + public Set getMappings() { + return getMappings(null); + } + + public Set getMappings(@Nullable Set names) { if (enabled == false) { return Set.of(); - } else { - final Set mappings = RoleMappingMetadata.getFromClusterState(clusterService.state()).getRoleMappings(); - logger.trace("Retrieved [{}] mapping(s) from cluster state", mappings.size()); + } + final Set mappings = RoleMappingMetadata.getFromClusterState(clusterService.state()).getRoleMappings(); + logger.trace("Retrieved [{}] mapping(s) from cluster state", mappings.size()); + if (names == null || names.isEmpty()) { return mappings; } + return mappings.stream().filter(roleMapping -> names.contains(roleMapping.getName())).collect(Collectors.toSet()); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java index 6e8698f095d32..010c19e8cc1b1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; @@ -19,21 +18,26 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; +import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -import org.hamcrest.Matchers; import org.junit.Before; -import java.util.Arrays; +import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Set; +import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; -import static org.hamcrest.Matchers.arrayContaining; +import static org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping.READ_ONLY_ROLE_MAPPING_METADATA_FLAG; +import static org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping.READ_ONLY_ROLE_MAPPING_SUFFIX; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anySet; import static org.mockito.ArgumentMatchers.nullable; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -42,8 +46,10 @@ public class TransportGetRoleMappingsActionTests extends ESTestCase { private NativeRoleMappingStore store; private TransportGetRoleMappingsAction action; - private AtomicReference> namesRef; - private List result; + private AtomicReference> nativeNamesRef; + private AtomicReference> clusterStateNamesRef; + private List nativeMappings; + private Set clusterStateMappings; @SuppressWarnings("unchecked") @Before @@ -58,68 +64,219 @@ public void setupMocks() { null, Collections.emptySet() ); - action = new TransportGetRoleMappingsAction(mock(ActionFilters.class), transportService, store); + ClusterStateRoleMapper clusterStateRoleMapper = mock(); + action = new TransportGetRoleMappingsAction(mock(ActionFilters.class), transportService, store, clusterStateRoleMapper); - namesRef = new AtomicReference<>(null); - result = Collections.emptyList(); + nativeNamesRef = new AtomicReference<>(null); + clusterStateNamesRef = new AtomicReference<>(null); + nativeMappings = Collections.emptyList(); + clusterStateMappings = Collections.emptySet(); + + doAnswer(invocation -> { + Object[] args = invocation.getArguments(); + assert args.length == 1; + clusterStateNamesRef.set((Set) args[0]); + return clusterStateMappings; + }).when(clusterStateRoleMapper).getMappings(anySet()); doAnswer(invocation -> { Object[] args = invocation.getArguments(); assert args.length == 2; - namesRef.set((Set) args[0]); + nativeNamesRef.set((Set) args[0]); ActionListener> listener = (ActionListener>) args[1]; - listener.onResponse(result); + listener.onResponse(nativeMappings); return null; }).when(store).getRoleMappings(nullable(Set.class), any(ActionListener.class)); } - public void testGetSingleRole() throws Exception { - final PlainActionFuture future = new PlainActionFuture<>(); - final GetRoleMappingsRequest request = new GetRoleMappingsRequest(); - request.setNames("everyone"); + public void testGetSingleRoleMappingNativeOnly() throws Exception { + testGetMappings(List.of(mapping("everyone")), Collections.emptySet(), Set.of("everyone"), Set.of("everyone"), "everyone"); + } - final ExpressionRoleMapping mapping = mock(ExpressionRoleMapping.class); - result = Collections.singletonList(mapping); - action.doExecute(mock(Task.class), request, future); - assertThat(future.get(), notNullValue()); - assertThat(future.get().mappings(), arrayContaining(mapping)); - assertThat(namesRef.get(), containsInAnyOrder("everyone")); + public void testGetMultipleNamedRoleMappingsNativeOnly() throws Exception { + testGetMappings( + List.of(mapping("admin"), mapping("engineering"), mapping("sales"), mapping("finance")), + Collections.emptySet(), + Set.of("admin", "engineering", "sales", "finance"), + Set.of("admin", "engineering", "sales", "finance"), + "admin", + "engineering", + "sales", + "finance" + ); } - public void testGetMultipleNamedRoles() throws Exception { - final PlainActionFuture future = new PlainActionFuture<>(); - final GetRoleMappingsRequest request = new GetRoleMappingsRequest(); - request.setNames("admin", "engineering", "sales", "finance"); + public void testGetAllRoleMappingsNativeOnly() throws Exception { + testGetMappings( + List.of(mapping("admin"), mapping("engineering"), mapping("sales"), mapping("finance")), + Collections.emptySet(), + Set.of(), + Set.of() + ); + } - final ExpressionRoleMapping mapping1 = mock(ExpressionRoleMapping.class); - final ExpressionRoleMapping mapping2 = mock(ExpressionRoleMapping.class); - final ExpressionRoleMapping mapping3 = mock(ExpressionRoleMapping.class); - result = Arrays.asList(mapping1, mapping2, mapping3); + public void testGetSingleRoleMappingClusterStateOnly() throws Exception { + testGetMappings(List.of(), Set.of(mapping("everyone")), Set.of("everyone"), Set.of("everyone"), "everyone"); + } - action.doExecute(mock(Task.class), request, future); + public void testGetMultipleNamedRoleMappingsClusterStateOnly() throws Exception { + testGetMappings( + List.of(), + Set.of(mapping("admin"), mapping("engineering"), mapping("sales"), mapping("finance")), + Set.of("admin", "engineering", "sales", "finance"), + Set.of("admin", "engineering", "sales", "finance"), + "admin", + "engineering", + "sales", + "finance" + ); + } + + public void testGetAllRoleMappingsClusterStateOnly() throws Exception { + testGetMappings( + List.of(), + Set.of(mapping("admin"), mapping("engineering"), mapping("sales"), mapping("finance")), + Set.of(), + Set.of() + ); + } + + public void testGetSingleRoleMappingBoth() throws Exception { + testGetMappings(List.of(mapping("everyone")), Set.of(mapping("everyone")), Set.of("everyone"), Set.of("everyone"), "everyone"); + } + + public void testGetMultipleNamedRoleMappingsBoth() throws Exception { + testGetMappings( + List.of(mapping("admin"), mapping("engineering")), + Set.of(mapping("sales"), mapping("finance")), + Set.of("admin", "engineering", "sales", "finance"), + Set.of("admin", "engineering", "sales", "finance"), + "admin", + "engineering", + "sales", + "finance" + ); + } + + public void testGetAllRoleMappingsClusterBoth() throws Exception { + testGetMappings(List.of(mapping("admin"), mapping("engineering")), Set.of(mapping("admin"), mapping("sales")), Set.of(), Set.of()); + } + + public void testGetSingleRoleMappingQueryWithReadOnlySuffix() throws Exception { + testGetMappings( + List.of(), + Set.of(mapping("everyone")), + // suffix not stripped for native store query + Set.of("everyone" + READ_ONLY_ROLE_MAPPING_SUFFIX), + // suffix is stripped for cluster state store + Set.of("everyone"), + "everyone" + READ_ONLY_ROLE_MAPPING_SUFFIX + ); + + testGetMappings( + List.of(), + Set.of(mapping("everyoneread-only-operator-mapping")), + Set.of( + "everyoneread-only-operator-mapping", + "everyone-read-only-operator-mapping-", + "everyone-read-only-operator-mapping-more" + ), + // suffix that is similar but not the same is not stripped + Set.of( + "everyoneread-only-operator-mapping", + "everyone-read-only-operator-mapping-", + "everyone-read-only-operator-mapping-more" + ), + "everyoneread-only-operator-mapping", + "everyone-read-only-operator-mapping-", + "everyone-read-only-operator-mapping-more" + ); + + testGetMappings( + List.of(mapping("everyone")), + Set.of(mapping("everyone")), + // suffix not stripped for native store query + Set.of("everyone" + READ_ONLY_ROLE_MAPPING_SUFFIX, "everyone"), + // suffix is stripped for cluster state store + Set.of("everyone"), + "everyone" + READ_ONLY_ROLE_MAPPING_SUFFIX, + "everyone" + ); + } + + public void testClusterStateRoleMappingWithFallbackNameOmitted() throws ExecutionException, InterruptedException { + testGetMappings( + List.of(), + Set.of(mapping("name_not_available_after_deserialization")), + Set.of(), + Set.of("name_not_available_after_deserialization"), + Set.of("name_not_available_after_deserialization"), + "name_not_available_after_deserialization" + ); - final GetRoleMappingsResponse response = future.get(); - assertThat(response, notNullValue()); - assertThat(response.mappings(), arrayContainingInAnyOrder(mapping1, mapping2, mapping3)); - assertThat(namesRef.get(), containsInAnyOrder("admin", "engineering", "sales", "finance")); + testGetMappings( + List.of(mapping("name_not_available_after_deserialization")), + Set.of(mapping("name_not_available_after_deserialization")), + Set.of(), + Set.of("name_not_available_after_deserialization"), + Set.of("name_not_available_after_deserialization"), + "name_not_available_after_deserialization" + ); + } + + private void testGetMappings( + List returnedNativeMappings, + Set returnedClusterStateMappings, + Set expectedNativeNames, + Set expectedClusterStateNames, + String... names + ) throws InterruptedException, ExecutionException { + testGetMappings( + returnedNativeMappings, + returnedClusterStateMappings, + returnedClusterStateMappings.stream().map(this::expectedClusterStateMapping).collect(Collectors.toSet()), + expectedNativeNames, + expectedClusterStateNames, + names + ); } - public void testGetAllRoles() throws Exception { + private void testGetMappings( + List returnedNativeMappings, + Set returnedClusterStateMappings, + Set expectedClusterStateMappings, + Set expectedNativeNames, + Set expectedClusterStateNames, + String... names + ) throws InterruptedException, ExecutionException { final PlainActionFuture future = new PlainActionFuture<>(); final GetRoleMappingsRequest request = new GetRoleMappingsRequest(); - request.setNames(Strings.EMPTY_ARRAY); - - final ExpressionRoleMapping mapping1 = mock(ExpressionRoleMapping.class); - final ExpressionRoleMapping mapping2 = mock(ExpressionRoleMapping.class); - final ExpressionRoleMapping mapping3 = mock(ExpressionRoleMapping.class); - result = Arrays.asList(mapping1, mapping2, mapping3); + request.setNames(names); + nativeMappings = returnedNativeMappings; + clusterStateMappings = returnedClusterStateMappings; action.doExecute(mock(Task.class), request, future); + assertThat(future.get(), notNullValue()); + List combined = new ArrayList<>(returnedNativeMappings); + combined.addAll(expectedClusterStateMappings); + ExpressionRoleMapping[] actualMappings = future.get().mappings(); + assertThat(actualMappings, arrayContainingInAnyOrder(combined.toArray(new ExpressionRoleMapping[0]))); + assertThat(nativeNamesRef.get(), containsInAnyOrder(expectedNativeNames.toArray(new String[0]))); + assertThat(clusterStateNamesRef.get(), containsInAnyOrder(expectedClusterStateNames.toArray(new String[0]))); + } - final GetRoleMappingsResponse response = future.get(); - assertThat(response, notNullValue()); - assertThat(response.mappings(), arrayContainingInAnyOrder(mapping1, mapping2, mapping3)); - assertThat(namesRef.get(), Matchers.nullValue(Set.class)); + private ExpressionRoleMapping mapping(String name) { + return new ExpressionRoleMapping(name, null, null, null, Map.of(), true); } + private ExpressionRoleMapping expectedClusterStateMapping(ExpressionRoleMapping mapping) { + return new ExpressionRoleMapping( + mapping.getName() + READ_ONLY_ROLE_MAPPING_SUFFIX, + null, + null, + null, + Map.of(READ_ONLY_ROLE_MAPPING_METADATA_FLAG, true), + true + ); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java index 6f789a10a3a6c..6d1ac864d20fd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression; +import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.junit.Before; @@ -29,18 +30,21 @@ import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.iterableWithSize; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class TransportPutRoleMappingActionTests extends ESTestCase { private NativeRoleMappingStore store; private TransportPutRoleMappingAction action; private AtomicReference requestRef; + private ClusterStateRoleMapper clusterStateRoleMapper; @SuppressWarnings("unchecked") @Before @@ -55,7 +59,9 @@ public void setupMocks() { null, Collections.emptySet() ); - action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, store); + clusterStateRoleMapper = mock(); + when(clusterStateRoleMapper.hasMapping(any())).thenReturn(false); + action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, store, clusterStateRoleMapper); requestRef = new AtomicReference<>(null); @@ -85,6 +91,41 @@ public void testPutValidMapping() throws Exception { assertThat(mapping.getMetadata().get("dumb"), equalTo(true)); } + public void testValidMappingClashingClusterStateMapping() throws Exception { + final FieldExpression expression = new FieldExpression("username", Collections.singletonList(new FieldExpression.FieldValue("*"))); + final PutRoleMappingResponse response = put("anarchy", expression, "superuser", Collections.singletonMap("dumb", true)); + when(clusterStateRoleMapper.hasMapping(any())).thenReturn(true); + + assertThat(response.isCreated(), equalTo(true)); + + final ExpressionRoleMapping mapping = requestRef.get().getMapping(); + assertThat(mapping.getExpression(), is(expression)); + assertThat(mapping.isEnabled(), equalTo(true)); + assertThat(mapping.getName(), equalTo("anarchy")); + assertThat(mapping.getRoles(), iterableWithSize(1)); + assertThat(mapping.getRoles(), contains("superuser")); + assertThat(mapping.getMetadata(), aMapWithSize(1)); + assertThat(mapping.getMetadata().get("dumb"), equalTo(true)); + } + + public void testInvalidSuffix() { + final FieldExpression expression = new FieldExpression("username", Collections.singletonList(new FieldExpression.FieldValue("*"))); + String name = ExpressionRoleMapping.addReadOnlySuffix("anarchy"); + final var ex = expectThrows(IllegalArgumentException.class, () -> { + put(name, expression, "superuser", Collections.singletonMap("dumb", true)); + }); + assertThat( + ex.getMessage(), + containsString( + "Invalid mapping name [" + + name + + "]. [" + + ExpressionRoleMapping.READ_ONLY_ROLE_MAPPING_SUFFIX + + "] is not an allowed suffix" + ) + ); + } + private PutRoleMappingResponse put(String name, FieldExpression expression, String role, Map metadata) throws Exception { final PutRoleMappingRequest request = new PutRoleMappingRequest(); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitorTests.java index 8e2f713e6ed3e..d9cbbaafa1779 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitorTests.java @@ -83,9 +83,10 @@ private void doTestGeometry(LongFunction h3ToGeometry, boolean hasArea visitor.reset(centerChild); reader.visit(visitor); if (hasArea) { - if (h3CrossesDateline && visitor.getLeftX() > visitor.getRightX()) { - // if both polygons crosses the dateline it cannot be inside due to the polygon splitting technique - assertEquals("failing h3: " + h3, GeoRelation.QUERY_CROSSES, visitor.relation()); + if (h3CrossesDateline) { + // if the h3 crosses the dateline, we might get CROSSES due to the polygon splitting technique. We can't + // be sure which one is the correct one, so we just check that it is not DISJOINT + assertNotSame("failing h3: " + h3, GeoRelation.QUERY_DISJOINT, visitor.relation()); } else { assertEquals("failing h3: " + h3, GeoRelation.QUERY_INSIDE, visitor.relation()); } diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/settings_endpoints/10_watcher_settings.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/settings_endpoints/10_watcher_settings.yml new file mode 100644 index 0000000000000..f639b4f8f1a77 --- /dev/null +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/settings_endpoints/10_watcher_settings.yml @@ -0,0 +1,104 @@ +--- +setup: + - do: + cluster.health: + wait_for_status: yellow + - do: + watcher.put_watch: + id: "my_watch" + body: > + { + "trigger": { + "schedule": { + "hourly": { + "minute": [ 0, 5 ] + } + } + }, + "input": { + "simple": { + "payload": { + "send": "yes" + } + } + }, + "condition": { + "always": {} + }, + "actions": { + "test_index": { + "index": { + "index": "test" + } + } + } + } + +--- +"Test update and get watch settings api": + - do: + watcher.get_settings: { } + + - match: { index.auto_expand_replicas: "0-1" } + - match: { index.number_of_replicas: "0" } + + - do: + watcher.update_settings: + body: + index.auto_expand_replicas: "0-all" + + - do: + watcher.get_settings: { } + + - match: { index.auto_expand_replicas: "0-all" } + - is_false: index.routing.allocation.include._tier_preference + + - do: + watcher.update_settings: + body: + index.auto_expand_replicas: null + index.number_of_replicas: 1 + + - do: + watcher.get_settings: { } + + - match: { index.number_of_replicas: "1" } +--- +"Test disallowed setting name throws error": + - requires: + test_runner_features: regex + - do: + watcher.update_settings: + body: + index.disallowed_setting: "some_invalid_value" + catch: bad_request + - match: + error: + type: "action_request_validation_exception" + reason: '/illegal settings\: \[index.disallowed_setting\].*/' +--- +"Test allowed prefix setting name": + - do: + watcher.update_settings: + body: + index.routing.allocation.include.role: "watcher" + index.routing.allocation.exclude.role: "noWatcher" + index.routing.allocation.require.role: "mustWatcher" + - do: + watcher.get_settings: { } + - match: { index.routing.allocation.include.role: "watcher" } + - match: { index.routing.allocation.exclude.role: "noWatcher" } + - match: { index.routing.allocation.require.role: "mustWatcher" } +--- +"Test explicitly disallowed prefix setting name throws error": + - requires: + test_runner_features: regex + - do: + watcher.update_settings: + body: + index.routing.allocation.include.disallowed_prefix: "some_invalid_value" + catch: bad_request + - match: + error: + type: "action_request_validation_exception" + reason: '/illegal settings\: \[index.routing.allocation.include.disallowed_prefix\].*/' diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/update_settings/10_update_watcher_settings.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/update_settings/10_update_watcher_settings.yml deleted file mode 100644 index d7478d643a98a..0000000000000 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/update_settings/10_update_watcher_settings.yml +++ /dev/null @@ -1,66 +0,0 @@ ---- -setup: - - do: - cluster.health: - wait_for_status: yellow - ---- -"Test update and get watch settings api": - - do: - watcher.put_watch: - id: "my_watch" - body: > - { - "trigger": { - "schedule": { - "hourly": { - "minute": [ 0, 5 ] - } - } - }, - "input": { - "simple": { - "payload": { - "send": "yes" - } - } - }, - "condition": { - "always": {} - }, - "actions": { - "test_index": { - "index": { - "index": "test" - } - } - } - } - - match: { _id: "my_watch" } - - - do: - watcher.get_settings: {} - - - match: { index.auto_expand_replicas: "0-1" } - - match: { index.number_of_replicas: "0" } - - - do: - watcher.update_settings: - body: - index.auto_expand_replicas: "0-all" - - - do: - watcher.get_settings: {} - - - match: { index.auto_expand_replicas: "0-all" } - - - do: - watcher.update_settings: - body: - index.auto_expand_replicas: null - index.number_of_replicas: 1 - - - do: - watcher.get_settings: {} - - - match: { index.number_of_replicas: "1" } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportGetWatcherSettingsAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportGetWatcherSettingsAction.java index 29349735afcd2..2962bffd68b66 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportGetWatcherSettingsAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportGetWatcherSettingsAction.java @@ -23,8 +23,10 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.watcher.transport.actions.put.GetWatcherSettingsAction; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.UpdateWatcherSettingsAction; +import static org.elasticsearch.xpack.core.watcher.transport.actions.put.UpdateWatcherSettingsAction.ALLOWED_SETTINGS_PREFIXES; +import static org.elasticsearch.xpack.core.watcher.transport.actions.put.UpdateWatcherSettingsAction.ALLOWED_SETTING_KEYS; +import static org.elasticsearch.xpack.core.watcher.transport.actions.put.UpdateWatcherSettingsAction.EXPLICITLY_DENIED_SETTINGS; import static org.elasticsearch.xpack.watcher.transport.actions.TransportUpdateWatcherSettingsAction.WATCHER_INDEX_NAME; import static org.elasticsearch.xpack.watcher.transport.actions.TransportUpdateWatcherSettingsAction.WATCHER_INDEX_REQUEST; @@ -73,11 +75,14 @@ protected void masterOperation( */ private static Settings filterSettableSettings(Settings settings) { Settings.Builder builder = Settings.builder(); - for (String settingName : UpdateWatcherSettingsAction.ALLOWED_SETTING_KEYS) { - if (settings.hasValue(settingName)) { - builder.put(settingName, settings.get(settingName)); - } - } + settings.keySet() + .stream() + .filter( + setting -> (ALLOWED_SETTING_KEYS.contains(setting) + || ALLOWED_SETTINGS_PREFIXES.stream().anyMatch(prefix -> setting.startsWith(prefix + "."))) + && EXPLICITLY_DENIED_SETTINGS.contains(setting) == false + ) + .forEach(setting -> builder.put(setting, settings.get(setting))); return builder.build(); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java index ced131640f0ee..cc8d0edf37014 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java @@ -17,6 +17,8 @@ import org.elasticsearch.xpack.core.watcher.support.WatcherDateTimeUtils; import org.elasticsearch.xpack.core.watcher.trigger.TriggerEvent; import org.elasticsearch.xpack.core.watcher.watch.Watch; +import org.elasticsearch.xpack.core.watcher.watch.WatchStatus; +import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule; import org.elasticsearch.xpack.watcher.trigger.schedule.Schedule; import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleRegistry; import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger; @@ -32,6 +34,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; @@ -67,7 +70,11 @@ public synchronized void start(Collection jobs) { Map startingSchedules = Maps.newMapWithExpectedSize(jobs.size()); for (Watch job : jobs) { if (job.trigger() instanceof ScheduleTrigger trigger) { - startingSchedules.put(job.id(), new ActiveSchedule(job.id(), trigger.getSchedule(), startTime)); + if (trigger.getSchedule() instanceof IntervalSchedule) { + startingSchedules.put(job.id(), new ActiveSchedule(job.id(), trigger.getSchedule(), calculateLastStartTime(job))); + } else { + startingSchedules.put(job.id(), new ActiveSchedule(job.id(), trigger.getSchedule(), startTime)); + } } } // why are we calling putAll() here instead of assigning a brand @@ -108,10 +115,39 @@ public void add(Watch watch) { // watcher indexing listener // this also means that updating an existing watch would not retrigger the schedule time, if it remains the same schedule if (currentSchedule == null || currentSchedule.schedule.equals(trigger.getSchedule()) == false) { - schedules.put(watch.id(), new ActiveSchedule(watch.id(), trigger.getSchedule(), clock.millis())); + if (trigger.getSchedule() instanceof IntervalSchedule) { + schedules.put(watch.id(), new ActiveSchedule(watch.id(), trigger.getSchedule(), calculateLastStartTime(watch))); + } else { + schedules.put(watch.id(), new ActiveSchedule(watch.id(), trigger.getSchedule(), clock.millis())); + } + } } + /** + * Attempts to calculate the epoch millis of the last time the watch was checked, If the watch has never been checked, the timestamp of + * the last state change is used. If the watch has never been checked and has never been in an active state, the current time is used. + * @param job the watch to calculate the last start time for + * @return the epoch millis of the last time the watch was checked or now + */ + private long calculateLastStartTime(Watch job) { + var lastChecked = Optional.ofNullable(job) + .map(Watch::status) + .map(WatchStatus::lastChecked) + .map(ZonedDateTime::toInstant) + .map(Instant::toEpochMilli); + + return lastChecked.orElseGet( + () -> Optional.ofNullable(job) + .map(Watch::status) + .map(WatchStatus::state) + .map(WatchStatus.State::getTimestamp) + .map(ZonedDateTime::toInstant) + .map(Instant::toEpochMilli) + .orElse(clock.millis()) + ); + } + @Override public boolean remove(String jobId) { logger.debug("Removing watch [{}] from engine (engine is running: {})", jobId, isRunning.get()); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java index 8b7cfa75f9229..9a12b8f394eb2 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.core.watcher.trigger.TriggerEvent; import org.elasticsearch.xpack.core.watcher.watch.ClockMock; import org.elasticsearch.xpack.core.watcher.watch.Watch; +import org.elasticsearch.xpack.core.watcher.watch.WatchStatus; import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.input.none.ExecutableNoneInput; import org.elasticsearch.xpack.watcher.trigger.schedule.Schedule; @@ -283,6 +284,244 @@ public void testAddOnlyWithNewSchedule() { assertThat(engine.getSchedules().get("_id"), not(is(activeSchedule))); } + /** + * This test verifies that a watch with a valid lastCheckedTime executes before the interval time to ensure the job resumes waiting + * from the same point it left off before the reallocation / restart + */ + public void testWatchWithLastCheckedTimeExecutesBeforeInitialInterval() throws Exception { + final var firstLatch = new CountDownLatch(1); + final var secondLatch = new CountDownLatch(1); + + Watch watch = new Watch( + "watch", + new ScheduleTrigger(interval("1s")), + new ExecutableNoneInput(), + InternalAlwaysCondition.INSTANCE, + null, + null, + Collections.emptyList(), + null, + new WatchStatus(-1L, null, null, clock.instant().minusMillis(500).atZone(ZoneOffset.UTC), null, null, null), + SequenceNumbers.UNASSIGNED_SEQ_NO, + SequenceNumbers.UNASSIGNED_PRIMARY_TERM + ); + + var watches = Collections.singletonList(watch); + + var runCount = new AtomicInteger(0); + + engine.register(events -> { + for (TriggerEvent ignored : events) { + if (runCount.get() == 0) { + logger.info("job first fire"); + firstLatch.countDown(); + } else { + logger.info("job second fire"); + secondLatch.countDown(); + } + runCount.incrementAndGet(); + } + }); + + engine.start(watches); + advanceClockIfNeeded(clock.instant().plusMillis(510).atZone(ZoneOffset.UTC)); + if (firstLatch.await(3, TimeUnit.SECONDS) == false) { + fail("waiting too long for all watches to be triggered"); + } + + advanceClockIfNeeded(clock.instant().plusMillis(1100).atZone(ZoneOffset.UTC)); + if (secondLatch.await(3, TimeUnit.SECONDS) == false) { + fail("waiting too long for all watches to be triggered"); + } + + assertThat(runCount.get(), is(2)); + + engine.stop(); + } + + /** + * This test verifies that a watch without a lastCheckedTime but with a valid activationTime executes before the interval time to + * ensure the job resumes waiting from the same point it left off before the reallocation / restart + */ + public void testWatchWithNoLastCheckedTimeButHasActivationTimeExecutesBeforeInitialInterval() throws Exception { + final var firstLatch = new CountDownLatch(1); + final var secondLatch = new CountDownLatch(1); + + Watch watch = new Watch( + "watch", + new ScheduleTrigger(interval("1s")), + new ExecutableNoneInput(), + InternalAlwaysCondition.INSTANCE, + null, + null, + Collections.emptyList(), + null, + new WatchStatus( + -1L, + new WatchStatus.State(true, clock.instant().minusMillis(500).atZone(ZoneOffset.UTC)), + null, + null, + null, + null, + null + ), + SequenceNumbers.UNASSIGNED_SEQ_NO, + SequenceNumbers.UNASSIGNED_PRIMARY_TERM + ); + + var watches = Collections.singletonList(watch); + + var runCount = new AtomicInteger(0); + + engine.register(events -> { + for (TriggerEvent ignored : events) { + if (runCount.get() == 0) { + logger.info("job first fire"); + firstLatch.countDown(); + } else { + logger.info("job second fire"); + secondLatch.countDown(); + } + runCount.incrementAndGet(); + } + }); + + engine.start(watches); + advanceClockIfNeeded(clock.instant().plusMillis(510).atZone(ZoneOffset.UTC)); + if (firstLatch.await(3, TimeUnit.SECONDS) == false) { + fail("waiting too long for all watches to be triggered"); + } + + advanceClockIfNeeded(clock.instant().plusMillis(1100).atZone(ZoneOffset.UTC)); + if (secondLatch.await(3, TimeUnit.SECONDS) == false) { + fail("waiting too long for all watches to be triggered"); + } + + assertThat(runCount.get(), is(2)); + + engine.stop(); + } + + /** + * This test verifies that a watch added after service start with a lastCheckedTime executes before the interval time to ensure the job + * resumes waiting from the same point it left off before the reallocation / restart + */ + public void testAddWithLastCheckedTimeExecutesBeforeInitialInterval() throws Exception { + final var firstLatch = new CountDownLatch(1); + final var secondLatch = new CountDownLatch(1); + + Watch watch = new Watch( + "watch", + new ScheduleTrigger(interval("1s")), + new ExecutableNoneInput(), + InternalAlwaysCondition.INSTANCE, + null, + null, + Collections.emptyList(), + null, + new WatchStatus(-1L, null, null, clock.instant().minusMillis(500).atZone(ZoneOffset.UTC), null, null, null), + SequenceNumbers.UNASSIGNED_SEQ_NO, + SequenceNumbers.UNASSIGNED_PRIMARY_TERM + ); + + var runCount = new AtomicInteger(0); + + engine.register(events -> { + for (TriggerEvent ignored : events) { + if (runCount.get() == 0) { + logger.info("job first fire"); + firstLatch.countDown(); + } else { + logger.info("job second fire"); + secondLatch.countDown(); + } + runCount.incrementAndGet(); + } + }); + + engine.start(Collections.emptyList()); + advanceClockIfNeeded(clock.instant().plusMillis(1100).atZone(ZoneOffset.UTC)); + engine.add(watch); + + advanceClockIfNeeded(clock.instant().plusMillis(510).atZone(ZoneOffset.UTC)); + if (firstLatch.await(3, TimeUnit.SECONDS) == false) { + fail("waiting too long for all watches to be triggered"); + } + + advanceClockIfNeeded(clock.instant().plusMillis(1100).atZone(ZoneOffset.UTC)); + if (secondLatch.await(3, TimeUnit.SECONDS) == false) { + fail("waiting too long for all watches to be triggered"); + } + + assertThat(runCount.get(), is(2)); + + engine.stop(); + } + + /** + * This test verifies that a watch added after service start without a lastCheckedTime but with a valid activationTime executes before + * the interval time to ensure the job resumes waiting from the same point it left off before the reallocation / restart + */ + public void testAddWithNoLastCheckedTimeButHasActivationTimeExecutesBeforeInitialInterval() throws Exception { + final var firstLatch = new CountDownLatch(1); + final var secondLatch = new CountDownLatch(1); + + Watch watch = new Watch( + "watch", + new ScheduleTrigger(interval("1s")), + new ExecutableNoneInput(), + InternalAlwaysCondition.INSTANCE, + null, + null, + Collections.emptyList(), + null, + new WatchStatus( + -1L, + new WatchStatus.State(true, clock.instant().minusMillis(500).atZone(ZoneOffset.UTC)), + null, + null, + null, + null, + null + ), + SequenceNumbers.UNASSIGNED_SEQ_NO, + SequenceNumbers.UNASSIGNED_PRIMARY_TERM + ); + + var runCount = new AtomicInteger(0); + + engine.register(events -> { + for (TriggerEvent ignored : events) { + if (runCount.get() == 0) { + logger.info("job first fire"); + firstLatch.countDown(); + } else { + logger.info("job second fire"); + secondLatch.countDown(); + } + runCount.incrementAndGet(); + } + }); + + engine.start(Collections.emptyList()); + advanceClockIfNeeded(clock.instant().plusMillis(1100).atZone(ZoneOffset.UTC)); + engine.add(watch); + + advanceClockIfNeeded(clock.instant().plusMillis(510).atZone(ZoneOffset.UTC)); + if (firstLatch.await(3, TimeUnit.SECONDS) == false) { + fail("waiting too long for all watches to be triggered"); + } + + advanceClockIfNeeded(clock.instant().plusMillis(1100).atZone(ZoneOffset.UTC)); + if (secondLatch.await(3, TimeUnit.SECONDS) == false) { + fail("waiting too long for all watches to be triggered"); + } + + assertThat(runCount.get(), is(2)); + + engine.stop(); + } + private Watch createWatch(String name, Schedule schedule) { return new Watch( name,