diff --git a/CHANGELOG.md b/CHANGELOG.md index 8d552b5eed83b..cbe2613f1e46a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Increase segrep pressure checkpoint default limit to 30 ([#16577](https://github.com/opensearch-project/OpenSearch/pull/16577/files)) - Add dynamic setting allowing size > 0 requests to be cached in the request cache ([#16483](https://github.com/opensearch-project/OpenSearch/pull/16483)) - Make IndexStoreListener a pluggable interface ([#16583](https://github.com/opensearch-project/OpenSearch/pull/16583)) +- Support for keyword fields in star-tree index ([#16233](https://github.com/opensearch-project/OpenSearch/pull/16233)) - Add a flag in QueryShardContext to differentiate inner hit query ([#16600](https://github.com/opensearch-project/OpenSearch/pull/16600)) ### Dependencies diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java index 5840884f5422a..c91c4d7bbb63b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java @@ -56,7 +56,7 @@ public class StarTreeMapperIT extends OpenSearchIntegTestCase { .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(512, ByteSizeUnit.MB)) .build(); - private static XContentBuilder createMinimalTestMapping(boolean invalidDim, boolean invalidMetric, boolean keywordDim) { + private static XContentBuilder createMinimalTestMapping(boolean invalidDim, boolean invalidMetric, boolean ipdim) { try { return jsonBuilder().startObject() .startObject("composite") @@ -68,12 +68,15 @@ private static XContentBuilder createMinimalTestMapping(boolean invalidDim, bool .endObject() .startArray("ordered_dimensions") .startObject() - .field("name", getDim(invalidDim, keywordDim)) + .field("name", getDim(invalidDim, ipdim)) + .endObject() + .startObject() + .field("name", "keyword_dv") .endObject() .endArray() .startArray("metrics") .startObject() - .field("name", getDim(invalidMetric, false)) + .field("name", getMetric(invalidMetric, false)) .endObject() .endArray() .endObject() @@ -99,6 +102,10 @@ private static XContentBuilder createMinimalTestMapping(boolean invalidDim, bool .field("type", "keyword") .field("doc_values", false) .endObject() + .startObject("ip") + .field("type", "ip") + .field("doc_values", false) + .endObject() .endObject() .endObject(); } catch (IOException e) { @@ -356,10 +363,19 @@ private XContentBuilder getMappingWithDuplicateFields(boolean isDuplicateDim, bo } private static String getDim(boolean hasDocValues, boolean isKeyword) { + if (hasDocValues) { + return random().nextBoolean() ? "numeric" : "keyword"; + } else if (isKeyword) { + return "ip"; + } + return "numeric_dv"; + } + + private static String getMetric(boolean hasDocValues, boolean isKeyword) { if (hasDocValues) { return "numeric"; } else if (isKeyword) { - return "keyword"; + return "ip"; } return "numeric_dv"; } @@ -398,6 +414,7 @@ public void testValidCompositeIndex() { assertEquals(expectedTimeUnits.get(i).shortName(), dateDim.getSortedCalendarIntervals().get(i).shortName()); } assertEquals("numeric_dv", starTreeFieldType.getDimensions().get(1).getField()); + assertEquals("keyword_dv", starTreeFieldType.getDimensions().get(2).getField()); assertEquals("numeric_dv", starTreeFieldType.getMetrics().get(0).getField()); List expectedMetrics = Arrays.asList(MetricStat.VALUE_COUNT, MetricStat.SUM, MetricStat.AVG); assertEquals(expectedMetrics, starTreeFieldType.getMetrics().get(0).getMetrics()); @@ -665,10 +682,7 @@ public void testInvalidDimCompositeIndex() { IllegalArgumentException.class, () -> prepareCreate(TEST_INDEX).setSettings(settings).setMapping(createMinimalTestMapping(true, false, false)).get() ); - assertEquals( - "Aggregations not supported for the dimension field [numeric] with field type [integer] as part of star tree field", - ex.getMessage() - ); + assertTrue(ex.getMessage().startsWith("Aggregations not supported for the dimension field ")); } public void testMaxDimsCompositeIndex() { @@ -734,7 +748,7 @@ public void testUnsupportedDim() { () -> prepareCreate(TEST_INDEX).setSettings(settings).setMapping(createMinimalTestMapping(false, false, true)).get() ); assertEquals( - "Failed to parse mapping [_doc]: unsupported field type associated with dimension [keyword] as part of star tree field [startree-1]", + "Failed to parse mapping [_doc]: unsupported field type associated with dimension [ip] as part of star tree field [startree-1]", ex.getMessage() ); } diff --git a/server/src/main/java/org/apache/lucene/index/DocValuesWriterWrapper.java b/server/src/main/java/org/apache/lucene/index/DocValuesWriterWrapper.java new file mode 100644 index 0000000000000..5329bad776e43 --- /dev/null +++ b/server/src/main/java/org/apache/lucene/index/DocValuesWriterWrapper.java @@ -0,0 +1,18 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.apache.lucene.index; + +import org.apache.lucene.search.DocIdSetIterator; + +/** + * Base wrapper class for DocValuesWriter. + */ +public interface DocValuesWriterWrapper { + T getDocValues(); +} diff --git a/server/src/main/java/org/apache/lucene/index/SortedNumericDocValuesWriterWrapper.java b/server/src/main/java/org/apache/lucene/index/SortedNumericDocValuesWriterWrapper.java index f7759fcced284..582e4c3f87f98 100644 --- a/server/src/main/java/org/apache/lucene/index/SortedNumericDocValuesWriterWrapper.java +++ b/server/src/main/java/org/apache/lucene/index/SortedNumericDocValuesWriterWrapper.java @@ -18,9 +18,9 @@ * * @opensearch.experimental */ -public class SortedNumericDocValuesWriterWrapper { +public class SortedNumericDocValuesWriterWrapper implements DocValuesWriterWrapper { - private final SortedNumericDocValuesWriter sortedNumericDocValuesWriter; + private final SortedNumericDocValuesWriter sortedNumericDocValuesWriterDelegate; /** * Sole constructor. Constructs a new {@link SortedNumericDocValuesWriterWrapper} instance. @@ -29,7 +29,7 @@ public class SortedNumericDocValuesWriterWrapper { * @param counter a counter for tracking memory usage */ public SortedNumericDocValuesWriterWrapper(FieldInfo fieldInfo, Counter counter) { - sortedNumericDocValuesWriter = new SortedNumericDocValuesWriter(fieldInfo, counter); + sortedNumericDocValuesWriterDelegate = new SortedNumericDocValuesWriter(fieldInfo, counter); } /** @@ -39,7 +39,7 @@ public SortedNumericDocValuesWriterWrapper(FieldInfo fieldInfo, Counter counter) * @param value the value to add */ public void addValue(int docID, long value) { - sortedNumericDocValuesWriter.addValue(docID, value); + sortedNumericDocValuesWriterDelegate.addValue(docID, value); } /** @@ -47,7 +47,8 @@ public void addValue(int docID, long value) { * * @return the {@link SortedNumericDocValues} instance */ + @Override public SortedNumericDocValues getDocValues() { - return sortedNumericDocValuesWriter.getDocValues(); + return sortedNumericDocValuesWriterDelegate.getDocValues(); } } diff --git a/server/src/main/java/org/apache/lucene/index/SortedSetDocValuesWriterWrapper.java b/server/src/main/java/org/apache/lucene/index/SortedSetDocValuesWriterWrapper.java new file mode 100644 index 0000000000000..95aa242535e48 --- /dev/null +++ b/server/src/main/java/org/apache/lucene/index/SortedSetDocValuesWriterWrapper.java @@ -0,0 +1,58 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.apache.lucene.index; + +import org.apache.lucene.util.ByteBlockPool; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.Counter; + +/** + * A wrapper class for writing sorted set doc values. + *

+ * This class provides a convenient way to add sorted set doc values to a field + * and retrieve the corresponding {@link SortedSetDocValues} instance. + * + * @opensearch.experimental + */ +public class SortedSetDocValuesWriterWrapper implements DocValuesWriterWrapper { + + private final SortedSetDocValuesWriter sortedSetDocValuesWriterDelegate; + + /** + * Sole constructor. Constructs a new {@link SortedSetDocValuesWriterWrapper} instance. + * + * @param fieldInfo the field information for the field being written + * @param counter a counter for tracking memory usage + * @param byteBlockPool a byte block pool for allocating byte blocks + * @see SortedSetDocValuesWriter + */ + public SortedSetDocValuesWriterWrapper(FieldInfo fieldInfo, Counter counter, ByteBlockPool byteBlockPool) { + sortedSetDocValuesWriterDelegate = new SortedSetDocValuesWriter(fieldInfo, counter, byteBlockPool); + } + + /** + * Adds a bytes ref value to the sorted set doc values for the specified document. + * + * @param docID the document ID + * @param value the value to add + */ + public void addValue(int docID, BytesRef value) { + sortedSetDocValuesWriterDelegate.addValue(docID, value); + } + + /** + * Returns the {@link SortedSetDocValues} instance containing the sorted numeric doc values + * + * @return the {@link SortedSetDocValues} instance + */ + @Override + public SortedSetDocValues getDocValues() { + return sortedSetDocValuesWriterDelegate.getDocValues(); + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesReader.java b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesReader.java index 637d3250fda3f..38d3f4867e89b 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesReader.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesReader.java @@ -14,7 +14,7 @@ import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CorruptIndexException; -import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.IndexFileNames; @@ -40,6 +40,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -111,7 +112,7 @@ public Composite912DocValuesReader(DocValuesProducer producer, SegmentReadState readState.segmentInfo.getId(), readState.segmentSuffix ); - + Map dimensionFieldTypeMap = new HashMap<>(); while (true) { // validate magic marker @@ -155,13 +156,16 @@ public Composite912DocValuesReader(DocValuesProducer producer, SegmentReadState compositeIndexInputMap.put(compositeFieldName, starTreeIndexInput); compositeIndexMetadataMap.put(compositeFieldName, starTreeMetadata); - List dimensionFields = starTreeMetadata.getDimensionFields(); - + Map dimensionFieldToDocValuesMap = starTreeMetadata.getDimensionFields(); // generating star tree unique fields (fully qualified name for dimension and metrics) - for (String dimensions : dimensionFields) { - fields.add(fullyQualifiedFieldNameForStarTreeDimensionsDocValues(compositeFieldName, dimensions)); + for (Map.Entry dimensionEntry : dimensionFieldToDocValuesMap.entrySet()) { + String dimName = fullyQualifiedFieldNameForStarTreeDimensionsDocValues( + compositeFieldName, + dimensionEntry.getKey() + ); + fields.add(dimName); + dimensionFieldTypeMap.put(dimName, dimensionEntry.getValue()); } - // adding metric fields for (Metric metric : starTreeMetadata.getMetrics()) { for (MetricStat metricStat : metric.getBaseMetrics()) { @@ -184,7 +188,7 @@ public Composite912DocValuesReader(DocValuesProducer producer, SegmentReadState // populates the dummy list of field infos to fetch doc id set iterators for respective fields. // the dummy field info is used to fetch the doc id set iterators for respective fields based on field name - FieldInfos fieldInfos = new FieldInfos(getFieldInfoList(fields)); + FieldInfos fieldInfos = new FieldInfos(getFieldInfoList(fields, dimensionFieldTypeMap)); this.readState = new SegmentReadState( readState.directory, readState.segmentInfo, @@ -291,17 +295,4 @@ public CompositeIndexValues getCompositeIndexValues(CompositeIndexFieldInfo comp } - /** - * Returns the sorted numeric doc values for the given sorted numeric field. - * If the sorted numeric field is null, it returns an empty doc id set iterator. - *

- * Sorted numeric field can be null for cases where the segment doesn't hold a particular value. - * - * @param sortedNumeric the sorted numeric doc values for a field - * @return empty sorted numeric values if the field is not present, else sortedNumeric - */ - public static SortedNumericDocValues getSortedNumericDocValues(SortedNumericDocValues sortedNumeric) { - return sortedNumeric == null ? DocValues.emptySortedNumeric() : sortedNumeric; - } - } diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java index dd35091dece2f..904d6a7aba5c6 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java @@ -22,6 +22,7 @@ import org.apache.lucene.index.SegmentInfo; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.store.IndexOutput; import org.opensearch.common.annotation.ExperimentalApi; @@ -29,12 +30,12 @@ import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; import org.opensearch.index.codec.composite.CompositeIndexReader; import org.opensearch.index.codec.composite.LuceneDocValuesConsumerFactory; -import org.opensearch.index.compositeindex.datacube.startree.StarTreeField; import org.opensearch.index.compositeindex.datacube.startree.builder.StarTreesBuilder; import org.opensearch.index.compositeindex.datacube.startree.index.CompositeIndexValues; import org.opensearch.index.compositeindex.datacube.startree.index.StarTreeValues; import org.opensearch.index.mapper.CompositeMappedFieldType; import org.opensearch.index.mapper.DocCountFieldMapper; +import org.opensearch.index.mapper.KeywordFieldMapper; import org.opensearch.index.mapper.MapperService; import java.io.IOException; @@ -71,6 +72,7 @@ public class Composite912DocValuesWriter extends DocValuesConsumer { private final AtomicInteger fieldNumberAcrossCompositeFields; private final Map fieldProducerMap = new HashMap<>(); + private final Map fieldDocIdSetIteratorMap = new HashMap<>(); public Composite912DocValuesWriter(DocValuesConsumer delegate, SegmentWriteState segmentWriteState, MapperService mapperService) throws IOException { @@ -82,14 +84,7 @@ public Composite912DocValuesWriter(DocValuesConsumer delegate, SegmentWriteState this.compositeMappedFieldTypes = mapperService.getCompositeFieldTypes(); compositeFieldSet = new HashSet<>(); segmentFieldSet = new HashSet<>(); - // TODO : add integ test for this - for (FieldInfo fi : this.state.fieldInfos) { - if (DocValuesType.SORTED_NUMERIC.equals(fi.getDocValuesType())) { - segmentFieldSet.add(fi.name); - } else if (fi.name.equals(DocCountFieldMapper.NAME)) { - segmentFieldSet.add(fi.name); - } - } + addStarTreeSupportedFieldsFromSegment(); for (CompositeMappedFieldType type : compositeMappedFieldTypes) { compositeFieldSet.addAll(type.fields()); } @@ -148,6 +143,17 @@ public Composite912DocValuesWriter(DocValuesConsumer delegate, SegmentWriteState segmentHasCompositeFields = Collections.disjoint(segmentFieldSet, compositeFieldSet) == false; } + private void addStarTreeSupportedFieldsFromSegment() { + // TODO : add integ test for this + for (FieldInfo fi : this.state.fieldInfos) { + if (DocValuesType.SORTED_NUMERIC.equals(fi.getDocValuesType()) + || DocValuesType.SORTED_SET.equals(fi.getDocValuesType()) + || fi.name.equals(DocCountFieldMapper.NAME)) { + segmentFieldSet.add(fi.name); + } + } + } + @Override public void addNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { delegate.addNumericField(field, valuesProducer); @@ -179,6 +185,15 @@ public void addSortedNumericField(FieldInfo field, DocValuesProducer valuesProdu @Override public void addSortedSetField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { delegate.addSortedSetField(field, valuesProducer); + // Perform this only during flush flow + if (mergeState.get() == null && segmentHasCompositeFields) { + createCompositeIndicesIfPossible(valuesProducer, field); + } + if (mergeState.get() != null) { + if (compositeFieldSet.contains(field.name)) { + fieldDocIdSetIteratorMap.put(field.name, valuesProducer.getSortedSet(field)); + } + } } @Override @@ -231,6 +246,7 @@ private void createCompositeIndicesIfPossible(DocValuesProducer valuesProducer, * Add empty doc values for fields not present in segment */ private void addDocValuesForEmptyField(String compositeField) { + // special case for doc count if (compositeField.equals(DocCountFieldMapper.NAME)) { fieldProducerMap.put(compositeField, new EmptyDocValuesProducer() { @Override @@ -239,16 +255,31 @@ public NumericDocValues getNumeric(FieldInfo field) { } }); } else { - fieldProducerMap.put(compositeField, new EmptyDocValuesProducer() { - @Override - public SortedNumericDocValues getSortedNumeric(FieldInfo field) { - return DocValues.emptySortedNumeric(); - } - }); + if (isSortedSetField(compositeField)) { + fieldProducerMap.put(compositeField, new EmptyDocValuesProducer() { + @Override + public SortedSetDocValues getSortedSet(FieldInfo field) { + return DocValues.emptySortedSet(); + } + }); + } + // TODO : change this logic to evaluate for sortedNumericField specifically + else { + fieldProducerMap.put(compositeField, new EmptyDocValuesProducer() { + @Override + public SortedNumericDocValues getSortedNumeric(FieldInfo field) { + return DocValues.emptySortedNumeric(); + } + }); + } } compositeFieldSet.remove(compositeField); } + private boolean isSortedSetField(String field) { + return mapperService.fieldType(field) instanceof KeywordFieldMapper.KeywordFieldType; + } + @Override public void merge(MergeState mergeState) throws IOException { this.mergeState.compareAndSet(null, mergeState); @@ -272,7 +303,6 @@ private void mergeCompositeFields(MergeState mergeState) throws IOException { */ private void mergeStarTreeFields(MergeState mergeState) throws IOException { Map> starTreeSubsPerField = new HashMap<>(); - StarTreeField starTreeField = null; for (int i = 0; i < mergeState.docValuesProducers.length; i++) { CompositeIndexReader reader = null; if (mergeState.docValuesProducers[i] == null) { diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/DateDimension.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/DateDimension.java index 8feb9ccd27dbd..88a67e1134067 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/DateDimension.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/DateDimension.java @@ -99,6 +99,11 @@ public List getSubDimensionNames() { return fields; } + @Override + public DocValuesType getDocValuesType() { + return DocValuesType.SORTED_NUMERIC; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject("date_dimension"); @@ -170,8 +175,4 @@ public int compare(DateTimeUnitRounding unit1, DateTimeUnitRounding unit2) { public static List getSortedDateTimeUnits(List dateTimeUnits) { return dateTimeUnits.stream().sorted(new DateTimeUnitComparator()).collect(Collectors.toList()); } - - public DocValuesType getDocValuesType() { - return DocValuesType.SORTED_NUMERIC; - } } diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionFactory.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionFactory.java index 7e72a3f0d9de6..e834706e2fa9d 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionFactory.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionFactory.java @@ -24,6 +24,7 @@ import java.util.stream.Collectors; import static org.opensearch.index.compositeindex.datacube.DateDimension.CALENDAR_INTERVALS; +import static org.opensearch.index.compositeindex.datacube.KeywordDimension.KEYWORD; /** * Dimension factory class mainly used to parse and create dimension from the mappings @@ -43,6 +44,8 @@ public static Dimension parseAndCreateDimension( return parseAndCreateDateDimension(name, dimensionMap, c); case NumericDimension.NUMERIC: return new NumericDimension(name); + case KEYWORD: + return new KeywordDimension(name); default: throw new IllegalArgumentException( String.format(Locale.ROOT, "unsupported field type associated with dimension [%s] as part of star tree field", name) @@ -56,16 +59,23 @@ public static Dimension parseAndCreateDimension( Map dimensionMap, Mapper.TypeParser.ParserContext c ) { - if (builder.getSupportedDataCubeDimensionType().isPresent() - && builder.getSupportedDataCubeDimensionType().get().equals(DimensionType.DATE)) { - return parseAndCreateDateDimension(name, dimensionMap, c); - } else if (builder.getSupportedDataCubeDimensionType().isPresent() - && builder.getSupportedDataCubeDimensionType().get().equals(DimensionType.NUMERIC)) { + if (builder.getSupportedDataCubeDimensionType().isEmpty()) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "unsupported field type associated with star tree dimension [%s]", name) + ); + } + switch (builder.getSupportedDataCubeDimensionType().get()) { + case DATE: + return parseAndCreateDateDimension(name, dimensionMap, c); + case NUMERIC: return new NumericDimension(name); - } - throw new IllegalArgumentException( - String.format(Locale.ROOT, "unsupported field type associated with star tree dimension [%s]", name) - ); + case KEYWORD: + return new KeywordDimension(name); + default: + throw new IllegalArgumentException( + String.format(Locale.ROOT, "unsupported field type associated with star tree dimension [%s]", name) + ); + } } private static DateDimension parseAndCreateDateDimension( diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionType.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionType.java index 4b9faea331752..d327f8ca1fa1e 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionType.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionType.java @@ -27,5 +27,11 @@ public enum DimensionType { * Represents a date dimension type. * This is used for dimensions that contain date or timestamp values. */ - DATE + DATE, + + /** + * Represents a keyword dimension type. + * This is used for dimensions that contain keyword ordinals. + */ + KEYWORD } diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/KeywordDimension.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/KeywordDimension.java new file mode 100644 index 0000000000000..58e248fd548d6 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/KeywordDimension.java @@ -0,0 +1,82 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.compositeindex.datacube; + +import org.apache.lucene.index.DocValuesType; +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.index.mapper.CompositeDataCubeFieldType; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import java.util.function.Consumer; + +/** + * Composite index keyword dimension class + * + * @opensearch.experimental + */ +@ExperimentalApi +public class KeywordDimension implements Dimension { + public static final String KEYWORD = "keyword"; + private final String field; + + public KeywordDimension(String field) { + this.field = field; + } + + @Override + public String getField() { + return field; + } + + @Override + public int getNumSubDimensions() { + return 1; + } + + @Override + public void setDimensionValues(Long value, Consumer dimSetter) { + // This will set the keyword dimension value's ordinal + dimSetter.accept(value); + } + + @Override + public List getSubDimensionNames() { + return List.of(field); + } + + @Override + public DocValuesType getDocValuesType() { + return DocValuesType.SORTED_SET; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(CompositeDataCubeFieldType.NAME, field); + builder.field(CompositeDataCubeFieldType.TYPE, KEYWORD); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + KeywordDimension dimension = (KeywordDimension) o; + return Objects.equals(field, dimension.getField()); + } + + @Override + public int hashCode() { + return Objects.hash(field); + } +} diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/NumericDimension.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/NumericDimension.java index f1d1b15337f4a..fe9e3d17c0047 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/NumericDimension.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/NumericDimension.java @@ -51,6 +51,11 @@ public List getSubDimensionNames() { return List.of(field); } + @Override + public DocValuesType getDocValuesType() { + return DocValuesType.SORTED_NUMERIC; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -72,9 +77,4 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(field); } - - @Override - public DocValuesType getDocValuesType() { - return DocValuesType.SORTED_NUMERIC; - } } diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/ReadDimension.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/ReadDimension.java index 0e2ec086abc0a..384553a8f7e06 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/ReadDimension.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/ReadDimension.java @@ -25,9 +25,16 @@ public class ReadDimension implements Dimension { public static final String READ = "read"; private final String field; + private final DocValuesType docValuesType; public ReadDimension(String field) { this.field = field; + this.docValuesType = DocValuesType.SORTED_NUMERIC; + } + + public ReadDimension(String field, DocValuesType docValuesType) { + this.field = field; + this.docValuesType = docValuesType; } public String getField() { @@ -49,6 +56,11 @@ public List getSubDimensionNames() { return List.of(field); } + @Override + public DocValuesType getDocValuesType() { + return docValuesType; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -70,9 +82,4 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(field); } - - @Override - public DocValuesType getDocValuesType() { - return DocValuesType.SORTED_NUMERIC; - } } diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/StarTreeField.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/StarTreeField.java index 833bf63c04a18..37b59fc1f59c8 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/StarTreeField.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/StarTreeField.java @@ -8,6 +8,7 @@ package org.opensearch.index.compositeindex.datacube.startree; +import org.apache.lucene.index.DocValuesType; import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; @@ -33,6 +34,7 @@ public class StarTreeField implements ToXContent { private final List metrics; private final StarTreeFieldConfiguration starTreeConfig; private final List dimensionNames; + private final List dimensionDocValueTypes; private final List metricNames; public StarTreeField(String name, List dimensions, List metrics, StarTreeFieldConfiguration starTreeConfig) { @@ -41,8 +43,12 @@ public StarTreeField(String name, List dimensions, List metri this.metrics = metrics; this.starTreeConfig = starTreeConfig; dimensionNames = new ArrayList<>(); + dimensionDocValueTypes = new ArrayList<>(); for (Dimension dimension : dimensions) { - dimensionNames.addAll(dimension.getSubDimensionNames()); + for (String dimensionName : dimension.getSubDimensionNames()) { + dimensionNames.add(dimensionName); + dimensionDocValueTypes.add(dimension.getDocValuesType()); + } } metricNames = new ArrayList<>(); for (Metric metric : metrics) { @@ -64,6 +70,10 @@ public List getDimensionNames() { return dimensionNames; } + public List getDimensionDocValueTypes() { + return dimensionDocValueTypes; + } + public List getMetricNames() { return metricNames; } diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilder.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilder.java index 3054e8e66b601..cf36f2d7d4126 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilder.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilder.java @@ -13,15 +13,23 @@ import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.DocValuesWriterWrapper; import org.apache.lucene.index.EmptyDocValuesProducer; import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.OrdinalMap; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedNumericDocValuesWriterWrapper; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.SortedSetDocValuesWriterWrapper; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.ByteBlockPool; import org.apache.lucene.util.Counter; +import org.apache.lucene.util.LongValues; import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.packed.PackedInts; import org.opensearch.index.compositeindex.datacube.Dimension; import org.opensearch.index.compositeindex.datacube.Metric; import org.opensearch.index.compositeindex.datacube.MetricStat; @@ -36,6 +44,7 @@ import org.opensearch.index.compositeindex.datacube.startree.node.StarTreeNodeType; import org.opensearch.index.compositeindex.datacube.startree.utils.SequentialDocValuesIterator; import org.opensearch.index.compositeindex.datacube.startree.utils.iterator.SortedNumericStarTreeValuesIterator; +import org.opensearch.index.compositeindex.datacube.startree.utils.iterator.SortedSetStarTreeValuesIterator; import org.opensearch.index.mapper.DocCountFieldMapper; import org.opensearch.index.mapper.FieldMapper; import org.opensearch.index.mapper.FieldValueConverter; @@ -45,6 +54,8 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; @@ -86,12 +97,20 @@ public abstract class BaseStarTreeBuilder implements StarTreeBuilder { protected final int maxLeafDocuments; List dimensionsSplitOrder = new ArrayList<>(); protected final InMemoryTreeNode rootNode = getNewNode(); - protected final StarTreeField starTreeField; private final SegmentWriteState writeState; private final IndexOutput metaOut; private final IndexOutput dataOut; + private final Counter bytesUsed = Counter.newCounter(); + private Map flushSortedSetDocValuesMap = new HashMap<>(); + // Maintains list of sortedSetDocValues for each star tree dimension field across segments during merge + private Map> mergeSortedSetDimensionsMap = new HashMap<>(); + // Maintains ordinalMap for each star tree dimension field during merge + private Map mergeSortedSetDimensionsOrdinalMap = new HashMap<>(); + + // This should be true for merge flows + protected boolean isMerge = false; /** * Reads all the configuration related to dimensions and metrics, builds a star-tree based on the different construction parameters. @@ -233,11 +252,23 @@ public void build( String dimension = dimensionsSplitOrder.get(i).getField(); FieldInfo dimensionFieldInfo = writeState.fieldInfos.fieldInfo(dimension); if (dimensionFieldInfo == null) { - dimensionFieldInfo = getFieldInfo(dimension, DocValuesType.SORTED_NUMERIC); + dimensionFieldInfo = getFieldInfo(dimension, dimensionsSplitOrder.get(i).getDocValuesType()); } - dimensionReaders[i] = new SequentialDocValuesIterator( - new SortedNumericStarTreeValuesIterator(fieldProducerMap.get(dimensionFieldInfo.name).getSortedNumeric(dimensionFieldInfo)) + dimensionReaders[i] = getSequentialDocValuesIterator( + dimensionFieldInfo, + fieldProducerMap, + dimensionsSplitOrder.get(i).getDocValuesType() ); + + if (dimensionsSplitOrder.get(i).getDocValuesType().equals(DocValuesType.SORTED_SET)) { + // This is needed as we need to write the ordinals and also the bytesRef associated with it + // as part of star tree doc values file formats + flushSortedSetDocValuesMap.put( + dimensionsSplitOrder.get(i).getField(), + fieldProducerMap.get(dimensionFieldInfo.name).getSortedSet(dimensionFieldInfo) + ); + } + } Iterator starTreeDocumentIterator = sortAndAggregateSegmentDocuments(dimensionReaders, metricReaders); logger.debug("Sorting and aggregating star-tree in ms : {}", (System.currentTimeMillis() - startTime)); @@ -245,6 +276,72 @@ public void build( logger.debug("Finished Building star-tree in ms : {}", (System.currentTimeMillis() - startTime)); } + /** + * Returns the sequential doc values iterator for the given field based on associated docValuesType + */ + private SequentialDocValuesIterator getSequentialDocValuesIterator( + FieldInfo fieldInfo, + Map fieldProducerMap, + DocValuesType type + ) throws IOException { + switch (type) { + case SORTED_NUMERIC: + return new SequentialDocValuesIterator( + new SortedNumericStarTreeValuesIterator(fieldProducerMap.get(fieldInfo.name).getSortedNumeric(fieldInfo)) + ); + case SORTED_SET: + return new SequentialDocValuesIterator( + new SortedSetStarTreeValuesIterator(fieldProducerMap.get(fieldInfo.name).getSortedSet(fieldInfo)) + ); + default: + throw new IllegalArgumentException("Unsupported type: " + type); + } + } + + /** + * Returns the ordinal map per field based on given star-tree values across different segments + */ + protected Map getOrdinalMaps(List starTreeValuesSubs) throws IOException { + long curr = System.currentTimeMillis(); + Map> dimensionToIterators = new HashMap<>(); + // Group iterators by dimension + for (StarTreeValues starTree : starTreeValuesSubs) { + for (String dimName : starTree.getStarTreeField().getDimensionNames()) { + if (starTree.getDimensionValuesIterator(dimName) instanceof SortedSetStarTreeValuesIterator) { + dimensionToIterators.computeIfAbsent(dimName, k -> new ArrayList<>()) + .add((SortedSetStarTreeValuesIterator) starTree.getDimensionValuesIterator(dimName)); + } + } + } + + if (dimensionToIterators.isEmpty()) return Collections.emptyMap(); + this.mergeSortedSetDimensionsMap = dimensionToIterators; + Map dimensionToOrdinalMap = new HashMap<>(); + for (Map.Entry> entry : dimensionToIterators.entrySet()) { + String dimName = entry.getKey(); + List iterators = entry.getValue(); + + // step 1: iterate through each sub and mark terms still in use + TermsEnum[] liveTerms = new TermsEnum[iterators.size()]; + long[] weights = new long[liveTerms.length]; + + for (int sub = 0; sub < liveTerms.length; sub++) { + SortedSetStarTreeValuesIterator dv = iterators.get(sub); + liveTerms[sub] = dv.termsEnum(); + weights[sub] = dv.getValueCount(); + } + + // step 2: create ordinal map for this dimension + OrdinalMap map = OrdinalMap.build(null, liveTerms, weights, PackedInts.COMPACT); + dimensionToOrdinalMap.put(dimName, map); + + logger.debug("Ordinal map for dimension {} - Size in bytes: {}", dimName, map.ramBytesUsed()); + } + this.mergeSortedSetDimensionsOrdinalMap = dimensionToOrdinalMap; + logger.debug("Total time to build ordinal maps: {} ms", System.currentTimeMillis() - curr); + return dimensionToOrdinalMap; + } + /** * Builds the star tree using sorted and aggregated star-tree Documents * @@ -295,6 +392,9 @@ void appendDocumentsToStarTree(Iterator starTreeDocumentIterat } } + /** + * Writes star tree structure to file format + */ private void serializeStarTree(int numSegmentStarTreeDocuments, int numStarTreeDocs) throws IOException { // serialize the star tree data long dataFilePointer = dataOut.getFilePointer(); @@ -314,10 +414,13 @@ private void serializeStarTree(int numSegmentStarTreeDocuments, int numStarTreeD ); } + /** + * Creates the star-tree docValues indices in disk + */ private void createSortedDocValuesIndices(DocValuesConsumer docValuesConsumer, AtomicInteger fieldNumberAcrossStarTrees) throws IOException { - List dimensionWriters = new ArrayList<>(); - List metricWriters = new ArrayList<>(); + List> dimensionWriters = new ArrayList<>(); + List> metricWriters = new ArrayList<>(); FieldInfo[] dimensionFieldInfoList = new FieldInfo[numDimensions]; FieldInfo[] metricFieldInfoList = new FieldInfo[metricAggregatorInfos.size()]; int dimIndex = 0; @@ -325,16 +428,21 @@ private void createSortedDocValuesIndices(DocValuesConsumer docValuesConsumer, A for (String name : dim.getSubDimensionNames()) { final FieldInfo fi = getFieldInfo( fullyQualifiedFieldNameForStarTreeDimensionsDocValues(starTreeField.getName(), name), - DocValuesType.SORTED_NUMERIC, + dim.getDocValuesType(), fieldNumberAcrossStarTrees.getAndIncrement() ); dimensionFieldInfoList[dimIndex] = fi; - dimensionWriters.add(new SortedNumericDocValuesWriterWrapper(fi, Counter.newCounter())); + if (dim.getDocValuesType().equals(DocValuesType.SORTED_SET)) { + ByteBlockPool.DirectTrackingAllocator byteBlockAllocator = new ByteBlockPool.DirectTrackingAllocator(bytesUsed); + ByteBlockPool docValuesBytePool = new ByteBlockPool(byteBlockAllocator); + dimensionWriters.add(new SortedSetDocValuesWriterWrapper(fi, bytesUsed, docValuesBytePool)); + } else { + dimensionWriters.add(new SortedNumericDocValuesWriterWrapper(fi, bytesUsed)); + } dimIndex++; } } for (int i = 0; i < metricAggregatorInfos.size(); i++) { - final FieldInfo fi = getFieldInfo( fullyQualifiedFieldNameForStarTreeMetricsDocValues( starTreeField.getName(), @@ -344,16 +452,18 @@ private void createSortedDocValuesIndices(DocValuesConsumer docValuesConsumer, A DocValuesType.SORTED_NUMERIC, fieldNumberAcrossStarTrees.getAndIncrement() ); - metricFieldInfoList[i] = fi; - metricWriters.add(new SortedNumericDocValuesWriterWrapper(fi, Counter.newCounter())); + metricWriters.add(new SortedNumericDocValuesWriterWrapper(fi, bytesUsed)); } - for (int docId = 0; docId < numStarTreeDocs; docId++) { StarTreeDocument starTreeDocument = getStarTreeDocument(docId); - for (int i = 0; i < starTreeDocument.dimensions.length; i++) { - if (starTreeDocument.dimensions[i] != null) { - dimensionWriters.get(i).addValue(docId, starTreeDocument.dimensions[i]); + int idx = 0; + for (Dimension dim : dimensionsSplitOrder) { + for (String name : dim.getSubDimensionNames()) { + if (starTreeDocument.dimensions[idx] != null) { + indexDocValue(dimensionWriters.get(idx), docId, starTreeDocument.dimensions[idx], dim.getField()); + } + idx++; } } @@ -362,11 +472,17 @@ private void createSortedDocValuesIndices(DocValuesConsumer docValuesConsumer, A FieldValueConverter aggregatedValueType = metricAggregatorInfos.get(i).getValueAggregators().getAggregatedValueType(); if (aggregatedValueType.equals(LONG)) { if (starTreeDocument.metrics[i] != null) { - metricWriters.get(i).addValue(docId, (long) starTreeDocument.metrics[i]); + ((SortedNumericDocValuesWriterWrapper) (metricWriters.get(i))).addValue( + docId, + (long) starTreeDocument.metrics[i] + ); } } else if (aggregatedValueType.equals(DOUBLE)) { if (starTreeDocument.metrics[i] != null) { - metricWriters.get(i).addValue(docId, NumericUtils.doubleToSortableLong((Double) starTreeDocument.metrics[i])); + ((SortedNumericDocValuesWriterWrapper) (metricWriters.get(i))).addValue( + docId, + NumericUtils.doubleToSortableLong((Double) starTreeDocument.metrics[i]) + ); } } else { throw new IllegalStateException("Unknown metric doc value type"); @@ -376,26 +492,68 @@ private void createSortedDocValuesIndices(DocValuesConsumer docValuesConsumer, A } } } - addStarTreeDocValueFields(docValuesConsumer, dimensionWriters, dimensionFieldInfoList, numDimensions); addStarTreeDocValueFields(docValuesConsumer, metricWriters, metricFieldInfoList, metricAggregatorInfos.size()); } + /** + * Adds startree field to respective field writers + */ + private void indexDocValue(DocValuesWriterWrapper dvWriter, int docId, long value, String field) throws IOException { + if (dvWriter instanceof SortedSetDocValuesWriterWrapper) { + // TODO : cache lookupOrd to make it faster + if (isMerge) { + OrdinalMap map = mergeSortedSetDimensionsOrdinalMap.get(field); + int segmentNumber = map.getFirstSegmentNumber(value); + long segmentOrd = map.getFirstSegmentOrd(value); + ((SortedSetDocValuesWriterWrapper) dvWriter).addValue( + docId, + mergeSortedSetDimensionsMap.get(field).get(segmentNumber).lookupOrd(segmentOrd) + ); + } else { + ((SortedSetDocValuesWriterWrapper) dvWriter).addValue(docId, flushSortedSetDocValuesMap.get(field).lookupOrd(value)); + } + } else if (dvWriter instanceof SortedNumericDocValuesWriterWrapper) { + ((SortedNumericDocValuesWriterWrapper) dvWriter).addValue(docId, value); + } + } + + @SuppressWarnings("unchecked") private void addStarTreeDocValueFields( DocValuesConsumer docValuesConsumer, - List docValuesWriters, + List> docValuesWriters, FieldInfo[] fieldInfoList, int fieldCount ) throws IOException { for (int i = 0; i < fieldCount; i++) { final int writerIndex = i; - DocValuesProducer docValuesProducer = new EmptyDocValuesProducer() { - @Override - public SortedNumericDocValues getSortedNumeric(FieldInfo field) { - return docValuesWriters.get(writerIndex).getDocValues(); - } - }; - docValuesConsumer.addSortedNumericField(fieldInfoList[i], docValuesProducer); + DocValuesProducer docValuesProducer; + switch (fieldInfoList[i].getDocValuesType()) { + case SORTED_NUMERIC: + docValuesProducer = new EmptyDocValuesProducer() { + @Override + public SortedNumericDocValues getSortedNumeric(FieldInfo field) { + DocValuesWriterWrapper wrapper = (DocValuesWriterWrapper< + SortedNumericDocValues>) docValuesWriters.get(writerIndex); + return wrapper.getDocValues(); + } + }; + docValuesConsumer.addSortedNumericField(fieldInfoList[i], docValuesProducer); + break; + case SORTED_SET: + docValuesProducer = new EmptyDocValuesProducer() { + @Override + public SortedSetDocValues getSortedSet(FieldInfo field) { + DocValuesWriterWrapper wrapper = (DocValuesWriterWrapper< + SortedSetDocValues>) docValuesWriters.get(writerIndex); + return wrapper.getDocValues(); + } + }; + docValuesConsumer.addSortedSetField(fieldInfoList[i], docValuesProducer); + break; + default: + throw new IllegalStateException("Unsupported doc values type"); + } } } @@ -405,13 +563,14 @@ public SortedNumericDocValues getSortedNumeric(FieldInfo field) { protected StarTreeDocument getStarTreeDocument( int currentDocId, SequentialDocValuesIterator[] dimensionReaders, - List metricReaders + List metricReaders, + Map longValues ) throws IOException { Long[] dims = new Long[numDimensions]; int i = 0; for (SequentialDocValuesIterator dimensionValueIterator : dimensionReaders) { dimensionValueIterator.nextEntry(currentDocId); - Long val = dimensionValueIterator.value(currentDocId); + Long val = dimensionValueIterator.value(currentDocId, longValues.get(starTreeField.getDimensionNames().get(i))); dims[i] = val; i++; } @@ -431,7 +590,7 @@ protected StarTreeDocument getStarTreeDocument( /** * Sets dimensions / metric readers nnd numSegmentDocs */ - protected void setReadersAndNumSegmentDocs( + protected void setReadersAndNumSegmentDocsDuringMerge( SequentialDocValuesIterator[] dimensionReaders, List metricReaders, AtomicInteger numSegmentDocs, @@ -452,7 +611,6 @@ protected void setReadersAndNumSegmentDocs( metricReaders.add(new SequentialDocValuesIterator(starTreeValues.getMetricValuesIterator(metricFullName))); } } - numSegmentDocs.set( Integer.parseInt(starTreeValues.getAttributes().getOrDefault(SEGMENT_DOCS_COUNT, String.valueOf(DocIdSetIterator.NO_MORE_DOCS))) ); @@ -669,6 +827,14 @@ private static Long getLong(Object metric) { return metricValue; } + /** + * Sets the sortedSetDocValuesMap. + * This is needed as we need to write the ordinals and also the bytesRef associated with it + */ + void setFlushSortedSetDocValuesMap(Map flushSortedSetDocValuesMap) { + this.flushSortedSetDocValuesMap = flushSortedSetDocValuesMap; + } + /** * Merges a star-tree document into an aggregated star-tree document. * A new aggregated star-tree document is created if the aggregated document is null. @@ -799,7 +965,6 @@ private void constructStarTree(InMemoryTreeNode node, int startDocId, int endDoc constructStarTree(child, child.getStartDocId(), child.getEndDocId()); } } - } /** @@ -837,7 +1002,6 @@ private void addChildNode(InMemoryTreeNode node, int endDocId, int dimensionId, childNodeDimensionValue = nodeDimensionValue; childNodeType = StarTreeNodeType.DEFAULT.getValue(); } - InMemoryTreeNode lastNode = getNewNode(dimensionId, nodeStartDocId, endDocId, childNodeType, childNodeDimensionValue); node.addChildNode(lastNode, nodeDimensionValue); } diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/OffHeapStarTreeBuilder.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/OffHeapStarTreeBuilder.java index 09d92e3da29c3..63659ef684744 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/OffHeapStarTreeBuilder.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/OffHeapStarTreeBuilder.java @@ -11,8 +11,10 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.DocValuesConsumer; +import org.apache.lucene.index.OrdinalMap; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.LongValues; import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.util.io.IOUtils; import org.opensearch.index.compositeindex.datacube.startree.StarTreeDocument; @@ -28,7 +30,9 @@ import java.util.Arrays; import java.util.Collections; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.concurrent.atomic.AtomicInteger; @@ -177,19 +181,27 @@ private Object[] getStarTreeMetricFieldValuesFromSegment(int currentDocId, List< Iterator mergeStarTrees(List starTreeValuesSubs) throws IOException { int numDocs = 0; int[] docIds; + this.isMerge = true; + Map ordinalMaps = getOrdinalMaps(starTreeValuesSubs); try { + int seg = 0; for (StarTreeValues starTreeValues : starTreeValuesSubs) { SequentialDocValuesIterator[] dimensionReaders = new SequentialDocValuesIterator[numDimensions]; List metricReaders = new ArrayList<>(); AtomicInteger numSegmentDocs = new AtomicInteger(); - setReadersAndNumSegmentDocs(dimensionReaders, metricReaders, numSegmentDocs, starTreeValues); + setReadersAndNumSegmentDocsDuringMerge(dimensionReaders, metricReaders, numSegmentDocs, starTreeValues); int currentDocId = 0; + Map longValuesMap = new LinkedHashMap<>(); + for (Map.Entry entry : ordinalMaps.entrySet()) { + longValuesMap.put(entry.getKey(), entry.getValue().getGlobalOrds(seg)); + } while (currentDocId < numSegmentDocs.get()) { - StarTreeDocument starTreeDocument = getStarTreeDocument(currentDocId, dimensionReaders, metricReaders); + StarTreeDocument starTreeDocument = getStarTreeDocument(currentDocId, dimensionReaders, metricReaders, longValuesMap); segmentDocumentFileManager.writeStarTreeDocument(starTreeDocument, true); numDocs++; currentDocId++; } + seg++; } docIds = new int[numDocs]; for (int i = 0; i < numDocs; i++) { diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/OnHeapStarTreeBuilder.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/OnHeapStarTreeBuilder.java index 07142fc5c8be7..c91f4c5db98bb 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/OnHeapStarTreeBuilder.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/OnHeapStarTreeBuilder.java @@ -8,8 +8,10 @@ package org.opensearch.index.compositeindex.datacube.startree.builder; import org.apache.lucene.codecs.DocValuesConsumer; +import org.apache.lucene.index.OrdinalMap; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.LongValues; import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.index.compositeindex.datacube.startree.StarTreeDocument; import org.opensearch.index.compositeindex.datacube.startree.StarTreeField; @@ -21,7 +23,9 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.concurrent.atomic.AtomicInteger; @@ -113,6 +117,7 @@ public void build( */ @Override Iterator mergeStarTrees(List starTreeValuesSubs) throws IOException { + this.isMerge = true; return sortAndAggregateStarTreeDocuments(getSegmentsStarTreeDocuments(starTreeValuesSubs), true); } @@ -125,17 +130,23 @@ Iterator mergeStarTrees(List starTreeValuesSub */ StarTreeDocument[] getSegmentsStarTreeDocuments(List starTreeValuesSubs) throws IOException { List starTreeDocuments = new ArrayList<>(); + Map ordinalMaps = getOrdinalMaps(starTreeValuesSubs); + int seg = 0; for (StarTreeValues starTreeValues : starTreeValuesSubs) { - SequentialDocValuesIterator[] dimensionReaders = new SequentialDocValuesIterator[numDimensions]; List metricReaders = new ArrayList<>(); AtomicInteger numSegmentDocs = new AtomicInteger(); - setReadersAndNumSegmentDocs(dimensionReaders, metricReaders, numSegmentDocs, starTreeValues); + setReadersAndNumSegmentDocsDuringMerge(dimensionReaders, metricReaders, numSegmentDocs, starTreeValues); int currentDocId = 0; + Map longValuesMap = new LinkedHashMap<>(); + for (Map.Entry entry : ordinalMaps.entrySet()) { + longValuesMap.put(entry.getKey(), entry.getValue().getGlobalOrds(seg)); + } while (currentDocId < numSegmentDocs.get()) { - starTreeDocuments.add(getStarTreeDocument(currentDocId, dimensionReaders, metricReaders)); + starTreeDocuments.add(getStarTreeDocument(currentDocId, dimensionReaders, metricReaders, longValuesMap)); currentDocId++; } + seg++; } StarTreeDocument[] starTreeDocumentsArr = new StarTreeDocument[starTreeDocuments.size()]; return starTreeDocuments.toArray(starTreeDocumentsArr); diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilder.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilder.java index 23415ddf29132..038164c9c842d 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilder.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilder.java @@ -47,6 +47,7 @@ void build( * @param starTreeValuesSubs contains the star tree values from multiple segments * @param fieldNumberAcrossStarTrees maintains the unique field number across the fields in the star tree * @param starTreeDocValuesConsumer consumer of star-tree doc values + * * @throws IOException when we are unable to build star-tree */ void build( diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeDocsFileManager.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeDocsFileManager.java index 7e920b912731d..98c3e5c6d71e6 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeDocsFileManager.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeDocsFileManager.java @@ -14,6 +14,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RandomAccessInput; +import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.util.io.IOUtils; import org.opensearch.index.compositeindex.datacube.startree.StarTreeDocument; import org.opensearch.index.compositeindex.datacube.startree.StarTreeField; @@ -45,7 +46,9 @@ *

The set of 'star-tree.documents' files is maintained, and a tracker array is used to keep track of the start document ID for each file. * Once the number of files reaches a set threshold, the files are merged. * + * @opensearch.experimental */ +@ExperimentalApi public class StarTreeDocsFileManager extends AbstractDocumentsFileManager implements Closeable { private static final Logger logger = LogManager.getLogger(StarTreeDocsFileManager.class); private static final String STAR_TREE_DOC_FILE_NAME = "star-tree.documents"; diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreesBuilder.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreesBuilder.java index bc598c9aeab7c..3d1a780c1c7ef 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreesBuilder.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreesBuilder.java @@ -106,10 +106,10 @@ public void close() throws IOException { /** * Merges star tree fields from multiple segments * - * @param metaOut an IndexInput for star-tree metadata - * @param dataOut an IndexInput for star-tree data - * @param starTreeValuesSubsPerField starTreeValuesSubs per field - * @param starTreeDocValuesConsumer a consumer to write star-tree doc values + * @param metaOut an IndexInput for star-tree metadata + * @param dataOut an IndexInput for star-tree data + * @param starTreeValuesSubsPerField starTreeValuesSubs per field + * @param starTreeDocValuesConsumer a consumer to write star-tree doc values */ public void buildDuringMerge( IndexOutput metaOut, diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/fileformats/meta/StarTreeMetadata.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/fileformats/meta/StarTreeMetadata.java index 7352c215ee390..57e47b1a5b9d9 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/fileformats/meta/StarTreeMetadata.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/fileformats/meta/StarTreeMetadata.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.DocValuesType; import org.apache.lucene.store.IndexInput; import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.index.compositeindex.CompositeIndexMetadata; @@ -62,9 +63,10 @@ public class StarTreeMetadata extends CompositeIndexMetadata { private final String starTreeFieldType; /** - * List of dimension fields used in the star-tree. + * Map of dimension fields to their associated DocValuesType.Insertion order needs to be maintained + * as it dictates dimensionSplitOrder */ - private final List dimensionFields; + LinkedHashMap dimensionFieldsToDocValuesMap; /** * List of metrics, containing field names and associated metric statistics. @@ -128,7 +130,7 @@ public StarTreeMetadata( this.starTreeFieldType = this.getCompositeFieldType().getName(); this.version = version; this.numberOfNodes = readNumberOfNodes(); - this.dimensionFields = readStarTreeDimensions(); + this.dimensionFieldsToDocValuesMap = readStarTreeDimensions(); this.metrics = readMetricEntries(); this.segmentAggregatedDocCount = readSegmentAggregatedDocCount(); this.starTreeDocCount = readStarTreeDocCount(); @@ -151,7 +153,7 @@ public StarTreeMetadata( * @param compositeFieldName name of the composite field. Here, name of the star-tree field. * @param compositeFieldType type of the composite field. Here, STAR_TREE field. * @param version The version of the star tree stored in the segments. - * @param dimensionFields list of dimension fields + * @param dimensionFieldsToDocValuesMap map of dimensionFields to docValues * @param metrics list of metric entries * @param segmentAggregatedDocCount segment aggregated doc count * @param starTreeDocCount the total number of star tree documents for the segment @@ -167,7 +169,7 @@ public StarTreeMetadata( IndexInput meta, Integer version, Integer numberOfNodes, - List dimensionFields, + LinkedHashMap dimensionFieldsToDocValuesMap, List metrics, Integer segmentAggregatedDocCount, Integer starTreeDocCount, @@ -183,7 +185,7 @@ public StarTreeMetadata( this.starTreeFieldType = compositeFieldType.getName(); this.version = version; this.numberOfNodes = numberOfNodes; - this.dimensionFields = dimensionFields; + this.dimensionFieldsToDocValuesMap = dimensionFieldsToDocValuesMap; this.metrics = metrics; this.segmentAggregatedDocCount = segmentAggregatedDocCount; this.starTreeDocCount = starTreeDocCount; @@ -202,15 +204,14 @@ private int readDimensionsCount() throws IOException { return meta.readVInt(); } - private List readStarTreeDimensions() throws IOException { + private LinkedHashMap readStarTreeDimensions() throws IOException { int dimensionCount = readDimensionsCount(); - List dimensionFields = new ArrayList<>(); + LinkedHashMap dimensionFieldsToDocValuesMap = new LinkedHashMap<>(); for (int i = 0; i < dimensionCount; i++) { - dimensionFields.add(meta.readString()); + dimensionFieldsToDocValuesMap.put(meta.readString(), getDocValuesType(meta, meta.readByte())); } - - return dimensionFields; + return dimensionFieldsToDocValuesMap; } private int readMetricsCount() throws IOException { @@ -314,8 +315,8 @@ public String getStarTreeFieldType() { * * @return star-tree dimension field numbers */ - public List getDimensionFields() { - return dimensionFields; + public Map getDimensionFields() { + return dimensionFieldsToDocValuesMap; } /** @@ -405,4 +406,23 @@ public int getVersion() { public int getNumberOfNodes() { return numberOfNodes; } + + private static DocValuesType getDocValuesType(IndexInput input, byte b) throws IOException { + switch (b) { + case 0: + return DocValuesType.NONE; + case 1: + return DocValuesType.NUMERIC; + case 2: + return DocValuesType.BINARY; + case 3: + return DocValuesType.SORTED; + case 4: + return DocValuesType.SORTED_SET; + case 5: + return DocValuesType.SORTED_NUMERIC; + default: + throw new CorruptIndexException("invalid docvalues byte: " + b, input); + } + } } diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/fileformats/meta/StarTreeMetadataWriter.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/fileformats/meta/StarTreeMetadataWriter.java index 42e6f3c59866a..569692ce18893 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/fileformats/meta/StarTreeMetadataWriter.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/fileformats/meta/StarTreeMetadataWriter.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.index.DocValuesType; import org.apache.lucene.store.IndexOutput; import org.opensearch.index.compositeindex.datacube.startree.StarTreeField; import org.opensearch.index.compositeindex.datacube.startree.aggregators.MetricAggregatorInfo; @@ -130,8 +131,9 @@ private static void writeMeta( metaOut.writeVInt(starTreeField.getDimensionNames().size()); // dimensions - for (String dim : starTreeField.getDimensionNames()) { - metaOut.writeString(dim); + for (int i = 0; i < starTreeField.getDimensionNames().size(); i++) { + metaOut.writeString(starTreeField.getDimensionNames().get(i)); + metaOut.writeByte(docValuesByte(starTreeField.getDimensionDocValueTypes().get(i))); } // number of metrics @@ -171,4 +173,24 @@ private static void writeMeta( metaOut.writeVLong(dataFileLength); } + + private static byte docValuesByte(DocValuesType type) { + switch (type) { + case NONE: + return 0; + case NUMERIC: + return 1; + case BINARY: + return 2; + case SORTED: + return 3; + case SORTED_SET: + return 4; + case SORTED_NUMERIC: + return 5; + default: + // BUG + throw new AssertionError("unhandled DocValuesType: " + type); + } + } } diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/index/StarTreeValues.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/index/StarTreeValues.java index 003ebeafeae45..6a13e6e789f3a 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/index/StarTreeValues.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/index/StarTreeValues.java @@ -9,9 +9,12 @@ package org.opensearch.index.compositeindex.datacube.startree.index; import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.store.IndexInput; import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.index.compositeindex.CompositeIndexMetadata; @@ -25,6 +28,7 @@ import org.opensearch.index.compositeindex.datacube.startree.node.StarTreeFactory; import org.opensearch.index.compositeindex.datacube.startree.node.StarTreeNode; import org.opensearch.index.compositeindex.datacube.startree.utils.iterator.SortedNumericStarTreeValuesIterator; +import org.opensearch.index.compositeindex.datacube.startree.utils.iterator.SortedSetStarTreeValuesIterator; import org.opensearch.index.compositeindex.datacube.startree.utils.iterator.StarTreeValuesIterator; import java.io.IOException; @@ -35,7 +39,6 @@ import java.util.Set; import java.util.function.Supplier; -import static org.opensearch.index.codec.composite.composite912.Composite912DocValuesReader.getSortedNumericDocValues; import static org.opensearch.index.compositeindex.CompositeIndexConstants.SEGMENT_DOCS_COUNT; import static org.opensearch.index.compositeindex.CompositeIndexConstants.STAR_TREE_DOCS_COUNT; import static org.opensearch.index.compositeindex.datacube.startree.utils.StarTreeUtils.fullyQualifiedFieldNameForStarTreeDimensionsDocValues; @@ -128,8 +131,15 @@ public StarTreeValues( // build dimensions List readDimensions = new ArrayList<>(); - for (String dimension : starTreeMetadata.getDimensionFields()) { - readDimensions.add(new ReadDimension(dimension)); + for (String dimension : starTreeMetadata.getDimensionFields().keySet()) { + readDimensions.add( + new ReadDimension( + dimension, + readState.fieldInfos.fieldInfo( + fullyQualifiedFieldNameForStarTreeDimensionsDocValues(starTreeMetadata.getCompositeFieldName(), dimension) + ).getDocValuesType() + ) + ); } // star-tree field @@ -151,19 +161,25 @@ public StarTreeValues( metricValuesIteratorMap = new LinkedHashMap<>(); // get doc id set iterators for dimensions - for (String dimension : starTreeMetadata.getDimensionFields()) { + for (String dimension : starTreeMetadata.getDimensionFields().keySet()) { dimensionValuesIteratorMap.put(dimension, () -> { try { - SortedNumericDocValues dimensionSortedNumericDocValues = null; + FieldInfo dimensionfieldInfo = null; if (readState != null) { - FieldInfo dimensionfieldInfo = readState.fieldInfos.fieldInfo( + dimensionfieldInfo = readState.fieldInfos.fieldInfo( fullyQualifiedFieldNameForStarTreeDimensionsDocValues(starTreeField.getName(), dimension) ); - if (dimensionfieldInfo != null) { - dimensionSortedNumericDocValues = compositeDocValuesProducer.getSortedNumeric(dimensionfieldInfo); - } } - return new SortedNumericStarTreeValuesIterator(getSortedNumericDocValues(dimensionSortedNumericDocValues)); + assert dimensionfieldInfo != null; + if (dimensionfieldInfo.getDocValuesType().equals(DocValuesType.SORTED_SET)) { + SortedSetDocValues dimensionSortedSetDocValues = compositeDocValuesProducer.getSortedSet(dimensionfieldInfo); + return new SortedSetStarTreeValuesIterator(getSortedSetDocValues(dimensionSortedSetDocValues)); + } else { + SortedNumericDocValues dimensionSortedNumericDocValues = compositeDocValuesProducer.getSortedNumeric( + dimensionfieldInfo + ); + return new SortedNumericStarTreeValuesIterator(getSortedNumericDocValues(dimensionSortedNumericDocValues)); + } } catch (IOException e) { throw new RuntimeException("Error loading dimension StarTreeValuesIterator", e); } @@ -272,4 +288,30 @@ public StarTreeValuesIterator getMetricValuesIterator(String fullyQualifiedMetri public int getStarTreeDocumentCount() { return starTreeMetadata.getStarTreeDocCount(); } + + /** + * Returns the sorted numeric doc values for the given sorted numeric field. + * If the sorted numeric field is null, it returns an empty doc id set iterator. + *

+ * Sorted numeric field can be null for cases where the segment doesn't hold a particular value. + * + * @param sortedNumeric the sorted numeric doc values for a field + * @return empty sorted numeric values if the field is not present, else sortedNumeric + */ + static SortedNumericDocValues getSortedNumericDocValues(SortedNumericDocValues sortedNumeric) { + return sortedNumeric == null ? DocValues.emptySortedNumeric() : sortedNumeric; + } + + /** + * Returns the sortedSet doc values for the given sortedSet field. + * If the sortedSet field is null, it returns an empty doc id set iterator. + *

+ * SortedSet field can be null for cases where the segment doesn't hold a particular value. + * + * @param sortedSetDv the sortedSet doc values for a field + * @return empty sortedSet values if the field is not present, else sortedSetDv + */ + static SortedSetDocValues getSortedSetDocValues(SortedSetDocValues sortedSetDv) { + return sortedSetDv == null ? DocValues.emptySortedSet() : sortedSetDv; + } } diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/SequentialDocValuesIterator.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/SequentialDocValuesIterator.java index 9029a451ca4d9..c4d3526648cac 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/SequentialDocValuesIterator.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/SequentialDocValuesIterator.java @@ -9,8 +9,10 @@ package org.opensearch.index.compositeindex.datacube.startree.utils; +import org.apache.lucene.util.LongValues; import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.index.compositeindex.datacube.startree.utils.iterator.SortedNumericStarTreeValuesIterator; +import org.opensearch.index.compositeindex.datacube.startree.utils.iterator.SortedSetStarTreeValuesIterator; import org.opensearch.index.compositeindex.datacube.startree.utils.iterator.StarTreeValuesIterator; import java.io.IOException; @@ -81,6 +83,33 @@ public Long value(int currentEntryId) throws IOException { } return ((SortedNumericStarTreeValuesIterator) starTreeValuesIterator).nextValue(); + } else if (starTreeValuesIterator instanceof SortedSetStarTreeValuesIterator) { + if (currentEntryId < 0) { + throw new IllegalStateException("invalid entry id to fetch the next value"); + } + if (currentEntryId == StarTreeValuesIterator.NO_MORE_ENTRIES) { + throw new IllegalStateException("StarTreeValuesIterator is already exhausted"); + } + if (entryId == StarTreeValuesIterator.NO_MORE_ENTRIES || entryId != currentEntryId) { + return null; + } + return ((SortedSetStarTreeValuesIterator) starTreeValuesIterator).nextOrd(); + } else { + throw new IllegalStateException("Unsupported Iterator requested for SequentialDocValuesIterator"); + } + } + + public Long value(int currentEntryId, LongValues globalOrdinalLongValues) throws IOException { + if (starTreeValuesIterator instanceof SortedNumericStarTreeValuesIterator) { + return value(currentEntryId); + } else if (starTreeValuesIterator instanceof SortedSetStarTreeValuesIterator) { + assert globalOrdinalLongValues != null; + Long val = value(currentEntryId); + // convert local ordinal to global ordinal + if (val != null) { + val = globalOrdinalLongValues.get(val); + } + return val; } else { throw new IllegalStateException("Unsupported Iterator requested for SequentialDocValuesIterator"); } diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/StarTreeUtils.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/StarTreeUtils.java index 2aae0d4ca7e29..240a727678d6f 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/StarTreeUtils.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/StarTreeUtils.java @@ -16,6 +16,7 @@ import java.util.Collections; import java.util.List; +import java.util.Map; /** * Util class for building star tree @@ -67,14 +68,17 @@ public static String fullyQualifiedFieldNameForStarTreeMetricsDocValues(String s * @param fields field names * @return field infos */ - public static FieldInfo[] getFieldInfoList(List fields) { + public static FieldInfo[] getFieldInfoList(List fields, Map dimDocValuesTypeMap) { FieldInfo[] fieldInfoList = new FieldInfo[fields.size()]; - // field number is not really used. We depend on unique field names to get the desired iterator int fieldNumber = 0; - for (String fieldName : fields) { - fieldInfoList[fieldNumber] = getFieldInfo(fieldName, DocValuesType.SORTED_NUMERIC, fieldNumber); + fieldInfoList[fieldNumber] = getFieldInfo( + fieldName, + // default is sortedNumeric since all metrics right now are sorted numeric + dimDocValuesTypeMap.getOrDefault(fieldName, DocValuesType.SORTED_NUMERIC), + fieldNumber + ); fieldNumber++; } return fieldInfoList; diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/iterator/SortedSetStarTreeValuesIterator.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/iterator/SortedSetStarTreeValuesIterator.java new file mode 100644 index 0000000000000..0cddffe5877e9 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/iterator/SortedSetStarTreeValuesIterator.java @@ -0,0 +1,59 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.compositeindex.datacube.startree.utils.iterator; + +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.automaton.CompiledAutomaton; +import org.opensearch.common.annotation.ExperimentalApi; + +import java.io.IOException; + +/** + * Wrapper iterator class for StarTree index to traverse through SortedNumericDocValues + * + * @opensearch.experimental + */ +@ExperimentalApi +public class SortedSetStarTreeValuesIterator extends StarTreeValuesIterator { + + public SortedSetStarTreeValuesIterator(DocIdSetIterator docIdSetIterator) { + super(docIdSetIterator); + } + + public long nextOrd() throws IOException { + return ((SortedSetDocValues) docIdSetIterator).nextOrd(); + } + + public int docValueCount() { + return ((SortedSetDocValues) docIdSetIterator).docValueCount(); + } + + public BytesRef lookupOrd(long ord) throws IOException { + return ((SortedSetDocValues) docIdSetIterator).lookupOrd(ord); + } + + public long getValueCount() { + return ((SortedSetDocValues) docIdSetIterator).getValueCount(); + } + + public long lookupTerm(BytesRef key) throws IOException { + return ((SortedSetDocValues) docIdSetIterator).lookupTerm(key); + } + + public TermsEnum termsEnum() throws IOException { + return ((SortedSetDocValues) docIdSetIterator).termsEnum(); + } + + public TermsEnum intersect(CompiledAutomaton automaton) throws IOException { + return ((SortedSetDocValues) docIdSetIterator).intersect(automaton); + } +} diff --git a/server/src/main/java/org/opensearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/KeywordFieldMapper.java index 54a1aead5fcc7..df14a5811f6a0 100644 --- a/server/src/main/java/org/opensearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/KeywordFieldMapper.java @@ -59,6 +59,7 @@ import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.analysis.IndexAnalyzers; import org.opensearch.index.analysis.NamedAnalyzer; +import org.opensearch.index.compositeindex.datacube.DimensionType; import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; import org.opensearch.index.query.QueryShardContext; @@ -73,6 +74,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.function.Supplier; import static org.opensearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES; @@ -254,6 +256,11 @@ public KeywordFieldMapper build(BuilderContext context) { this ); } + + @Override + public Optional getSupportedDataCubeDimensionType() { + return Optional.of(DimensionType.KEYWORD); + } } public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n, c.getIndexAnalyzers())); diff --git a/server/src/test/java/org/opensearch/index/codec/composite/SortedSetDocValuesWriterWrapperTests.java b/server/src/test/java/org/opensearch/index/codec/composite/SortedSetDocValuesWriterWrapperTests.java new file mode 100644 index 0000000000000..b0fdd712beafb --- /dev/null +++ b/server/src/test/java/org/opensearch/index/codec/composite/SortedSetDocValuesWriterWrapperTests.java @@ -0,0 +1,98 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.composite; + +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.SortedSetDocValuesWriterWrapper; +import org.apache.lucene.index.VectorEncoding; +import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.util.ByteBlockPool; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.Counter; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; +import java.util.Collections; + +public class SortedSetDocValuesWriterWrapperTests extends OpenSearchTestCase { + + private SortedSetDocValuesWriterWrapper wrapper; + private FieldInfo fieldInfo; + private Counter counter; + + @Override + public void setUp() throws Exception { + super.setUp(); + fieldInfo = new FieldInfo( + "field", + 1, + false, + false, + true, + IndexOptions.NONE, + DocValuesType.NONE, + -1, + Collections.emptyMap(), + 0, + 0, + 0, + 0, + VectorEncoding.FLOAT32, + VectorSimilarityFunction.EUCLIDEAN, + false, + false + ); + counter = Counter.newCounter(); + ByteBlockPool.DirectTrackingAllocator byteBlockAllocator = new ByteBlockPool.DirectTrackingAllocator(counter); + ByteBlockPool docValuesBytePool = new ByteBlockPool(byteBlockAllocator); + wrapper = new SortedSetDocValuesWriterWrapper(fieldInfo, counter, docValuesBytePool); + } + + public void testAddValue() throws IOException { + wrapper.addValue(0, new BytesRef("text1")); + wrapper.addValue(1, new BytesRef("text2")); + wrapper.addValue(2, new BytesRef("text3")); + + SortedSetDocValues docValues = wrapper.getDocValues(); + assertNotNull(docValues); + + assertEquals(0, docValues.nextDoc()); + assertEquals(0, docValues.nextOrd()); + assertEquals(1, docValues.nextDoc()); + assertEquals(1, docValues.nextOrd()); + assertEquals(2, docValues.nextDoc()); + assertEquals(2, docValues.nextOrd()); + } + + public void testGetDocValues() { + SortedSetDocValues docValues = wrapper.getDocValues(); + assertNotNull(docValues); + } + + public void testMultipleValues() throws IOException { + wrapper.addValue(0, new BytesRef("text1")); + wrapper.addValue(0, new BytesRef("text2")); + wrapper.addValue(1, new BytesRef("text3")); + + SortedSetDocValues docValues = wrapper.getDocValues(); + assertNotNull(docValues); + + assertEquals(0, docValues.nextDoc()); + assertEquals(0, docValues.nextOrd()); + assertEquals(1, docValues.nextOrd()); + assertEquals(-1, docValues.nextOrd()); + + assertEquals(1, docValues.nextDoc()); + assertEquals(2, docValues.nextOrd()); + assertEquals(-1, docValues.nextOrd()); + } +} diff --git a/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/AbstractStarTreeDVFormatTests.java b/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/AbstractStarTreeDVFormatTests.java new file mode 100644 index 0000000000000..4dfd8c08575f2 --- /dev/null +++ b/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/AbstractStarTreeDVFormatTests.java @@ -0,0 +1,126 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.composite912.datacube.startree; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.tests.index.BaseDocValuesFormatTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.opensearch.Version; +import org.opensearch.cluster.ClusterModule; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.common.CheckedConsumer; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.index.IndexSettings; +import org.opensearch.index.MapperTestUtils; +import org.opensearch.index.codec.composite.composite912.Composite912Codec; +import org.opensearch.index.compositeindex.datacube.startree.StarTreeFieldConfiguration; +import org.opensearch.index.compositeindex.datacube.startree.StarTreeIndexSettings; +import org.opensearch.index.mapper.MapperService; +import org.opensearch.indices.IndicesModule; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import static org.opensearch.common.util.FeatureFlags.STAR_TREE_INDEX; + +/** + * Abstract star tree doc values Lucene tests + */ +@LuceneTestCase.SuppressSysoutChecks(bugUrl = "we log a lot on purpose") +public abstract class AbstractStarTreeDVFormatTests extends BaseDocValuesFormatTestCase { + MapperService mapperService = null; + StarTreeFieldConfiguration.StarTreeBuildMode buildMode; + + public AbstractStarTreeDVFormatTests(StarTreeFieldConfiguration.StarTreeBuildMode buildMode) { + this.buildMode = buildMode; + } + + @ParametersFactory + public static Collection parameters() { + List parameters = new ArrayList<>(); + parameters.add(new Object[] { StarTreeFieldConfiguration.StarTreeBuildMode.ON_HEAP }); + parameters.add(new Object[] { StarTreeFieldConfiguration.StarTreeBuildMode.OFF_HEAP }); + return parameters; + } + + @BeforeClass + public static void createMapper() throws Exception { + FeatureFlags.initializeFeatureFlags(Settings.builder().put(STAR_TREE_INDEX, "true").build()); + } + + @AfterClass + public static void clearMapper() { + FeatureFlags.initializeFeatureFlags(Settings.EMPTY); + } + + @After + public void teardown() throws IOException { + mapperService.close(); + } + + @Override + protected Codec getCodec() { + final Logger testLogger = LogManager.getLogger(StarTreeDocValuesFormatTests.class); + + try { + mapperService = createMapperService(getMapping()); + } catch (IOException e) { + throw new RuntimeException(e); + } + Codec codec = new Composite912Codec(Lucene912Codec.Mode.BEST_SPEED, mapperService, testLogger); + return codec; + } + + public static MapperService createMapperService(XContentBuilder builder) throws IOException { + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) + .put(StarTreeIndexSettings.IS_COMPOSITE_INDEX_SETTING.getKey(), true) + .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(512, ByteSizeUnit.MB)) + .build(); + IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).putMapping(builder.toString()).build(); + IndicesModule indicesModule = new IndicesModule(Collections.emptyList()); + MapperService mapperService = MapperTestUtils.newMapperServiceWithHelperAnalyzer( + new NamedXContentRegistry(ClusterModule.getNamedXWriteables()), + createTempDir(), + settings, + indicesModule, + "test" + ); + mapperService.merge(indexMetadata, MapperService.MergeReason.INDEX_TEMPLATE); + return mapperService; + } + + abstract XContentBuilder getMapping() throws IOException; + + public static XContentBuilder topMapping(CheckedConsumer buildFields) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startObject("_doc"); + buildFields.accept(builder); + return builder.endObject().endObject(); + } + +} diff --git a/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeDocValuesFormatTests.java b/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeDocValuesFormatTests.java index f081cadc1362c..03798c6e4ce55 100644 --- a/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeDocValuesFormatTests.java +++ b/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeDocValuesFormatTests.java @@ -8,14 +8,9 @@ package org.opensearch.index.codec.composite912.datacube.startree; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; +import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.index.DirectoryReader; @@ -24,48 +19,25 @@ import org.apache.lucene.index.SegmentReader; import org.apache.lucene.index.Term; import org.apache.lucene.store.Directory; -import org.apache.lucene.tests.index.BaseDocValuesFormatTestCase; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.tests.util.TestUtil; -import org.opensearch.Version; -import org.opensearch.cluster.ClusterModule; -import org.opensearch.cluster.metadata.IndexMetadata; -import org.opensearch.common.CheckedConsumer; import org.opensearch.common.lucene.Lucene; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.core.common.unit.ByteSizeUnit; -import org.opensearch.core.common.unit.ByteSizeValue; -import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.index.IndexSettings; -import org.opensearch.index.MapperTestUtils; import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; import org.opensearch.index.codec.composite.CompositeIndexReader; -import org.opensearch.index.codec.composite.composite912.Composite912Codec; import org.opensearch.index.compositeindex.datacube.startree.StarTreeDocument; import org.opensearch.index.compositeindex.datacube.startree.StarTreeFieldConfiguration; -import org.opensearch.index.compositeindex.datacube.startree.StarTreeIndexSettings; import org.opensearch.index.compositeindex.datacube.startree.StarTreeTestUtils; import org.opensearch.index.compositeindex.datacube.startree.index.StarTreeValues; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.NumberFieldMapper; -import org.opensearch.indices.IndicesModule; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.BeforeClass; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import static org.opensearch.common.util.FeatureFlags.STAR_TREE_INDEX; import static org.opensearch.index.compositeindex.CompositeIndexConstants.STAR_TREE_DOCS_COUNT; import static org.opensearch.index.compositeindex.datacube.startree.StarTreeTestUtils.assertStarTreeDocuments; @@ -73,48 +45,10 @@ * Star tree doc values Lucene tests */ @LuceneTestCase.SuppressSysoutChecks(bugUrl = "we log a lot on purpose") -public class StarTreeDocValuesFormatTests extends BaseDocValuesFormatTestCase { - MapperService mapperService = null; - StarTreeFieldConfiguration.StarTreeBuildMode buildMode; +public class StarTreeDocValuesFormatTests extends AbstractStarTreeDVFormatTests { public StarTreeDocValuesFormatTests(StarTreeFieldConfiguration.StarTreeBuildMode buildMode) { - this.buildMode = buildMode; - } - - @ParametersFactory - public static Collection parameters() { - List parameters = new ArrayList<>(); - parameters.add(new Object[] { StarTreeFieldConfiguration.StarTreeBuildMode.ON_HEAP }); - parameters.add(new Object[] { StarTreeFieldConfiguration.StarTreeBuildMode.OFF_HEAP }); - return parameters; - } - - @BeforeClass - public static void createMapper() throws Exception { - FeatureFlags.initializeFeatureFlags(Settings.builder().put(STAR_TREE_INDEX, "true").build()); - } - - @AfterClass - public static void clearMapper() { - FeatureFlags.initializeFeatureFlags(Settings.EMPTY); - } - - @After - public void teardown() throws IOException { - mapperService.close(); - } - - @Override - protected Codec getCodec() { - final Logger testLogger = LogManager.getLogger(StarTreeDocValuesFormatTests.class); - - try { - mapperService = createMapperService(getExpandedMapping()); - } catch (IOException e) { - throw new RuntimeException(e); - } - Codec codec = new Composite912Codec(Lucene912Codec.Mode.BEST_SPEED, mapperService, testLogger); - return codec; + super(buildMode); } public void testStarTreeDocValues() throws IOException { @@ -124,24 +58,24 @@ public void testStarTreeDocValues() throws IOException { RandomIndexWriter iw = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); doc.add(new SortedNumericDocValuesField("sndv", 1)); - doc.add(new SortedNumericDocValuesField("dv", 1)); - doc.add(new SortedNumericDocValuesField("field", -1)); + doc.add(new SortedNumericDocValuesField("dv1", 1)); + doc.add(new SortedNumericDocValuesField("field1", -1)); iw.addDocument(doc); doc = new Document(); doc.add(new SortedNumericDocValuesField("sndv", 1)); - doc.add(new SortedNumericDocValuesField("dv", 1)); - doc.add(new SortedNumericDocValuesField("field", -1)); + doc.add(new SortedNumericDocValuesField("dv1", 1)); + doc.add(new SortedNumericDocValuesField("field1", -1)); iw.addDocument(doc); doc = new Document(); iw.forceMerge(1); doc.add(new SortedNumericDocValuesField("sndv", 2)); - doc.add(new SortedNumericDocValuesField("dv", 2)); - doc.add(new SortedNumericDocValuesField("field", -2)); + doc.add(new SortedNumericDocValuesField("dv1", 2)); + doc.add(new SortedNumericDocValuesField("field1", -2)); iw.addDocument(doc); doc = new Document(); doc.add(new SortedNumericDocValuesField("sndv", 2)); - doc.add(new SortedNumericDocValuesField("dv", 2)); - doc.add(new SortedNumericDocValuesField("field", -2)); + doc.add(new SortedNumericDocValuesField("dv1", 2)); + doc.add(new SortedNumericDocValuesField("field1", -2)); iw.addDocument(doc); iw.forceMerge(1); iw.close(); @@ -217,8 +151,9 @@ public void testStarTreeDocValuesWithDeletions() throws IOException { Directory directory = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(null); conf.setMergePolicy(newLogMergePolicy()); + conf.setSoftDeletesField(Lucene.SOFT_DELETES_FIELD); + conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE); RandomIndexWriter iw = new RandomIndexWriter(random(), directory, conf); - int iterations = 3; Map map = new HashMap<>(); List allIds = new ArrayList<>(); @@ -239,17 +174,25 @@ public void testStarTreeDocValuesWithDeletions() throws IOException { doc.add(new SortedNumericDocValuesField("dv", dvValue)); map.put(sndvValue + "-" + dvValue, fieldValue + map.getOrDefault(sndvValue + "-" + dvValue, 0)); + doc.add(new NumericDocValuesField("field-ndv", fieldValue)); + iw.addDocument(doc); } iw.flush(); } iw.commit(); - // Delete random number of documents + // Update random number of documents int docsToDelete = random().nextInt(9); // Delete up to 9 documents for (int i = 0; i < docsToDelete; i++) { if (!allIds.isEmpty()) { String idToDelete = allIds.remove(random().nextInt(allIds.size() - 1)); - iw.deleteDocuments(new Term("_id", idToDelete)); + Document doc = new Document(); + doc.add(new NumericDocValuesField("field-ndv", 1L)); + iw.w.softUpdateDocuments( + new Term("_id", idToDelete), + List.of(doc), + new NumericDocValuesField(Lucene.SOFT_DELETES_FIELD, 1) + ); allIds.remove(idToDelete); } } @@ -307,6 +250,11 @@ public void testStarTreeDocValuesWithDeletions() throws IOException { directory.close(); } + @Override + protected XContentBuilder getMapping() throws IOException { + return getExpandedMapping(); + } + public static XContentBuilder getExpandedMapping() throws IOException { return topMapping(b -> { b.startObject("composite"); @@ -319,12 +267,12 @@ public static XContentBuilder getExpandedMapping() throws IOException { b.field("name", "sndv"); b.endObject(); b.startObject(); - b.field("name", "dv"); + b.field("name", "dv1"); b.endObject(); b.endArray(); b.startArray("metrics"); b.startObject(); - b.field("name", "field"); + b.field("name", "field1"); b.startArray("stats"); b.value("sum"); b.value("value_count"); @@ -351,40 +299,13 @@ public static XContentBuilder getExpandedMapping() throws IOException { b.startObject("sndv"); b.field("type", "integer"); b.endObject(); - b.startObject("dv"); + b.startObject("dv1"); b.field("type", "integer"); b.endObject(); - b.startObject("field"); + b.startObject("field1"); b.field("type", "integer"); b.endObject(); b.endObject(); }); } - - public static XContentBuilder topMapping(CheckedConsumer buildFields) throws IOException { - XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startObject("_doc"); - buildFields.accept(builder); - return builder.endObject().endObject(); - } - - public static MapperService createMapperService(XContentBuilder builder) throws IOException { - Settings settings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) - .put(StarTreeIndexSettings.IS_COMPOSITE_INDEX_SETTING.getKey(), true) - .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(512, ByteSizeUnit.MB)) - .build(); - IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).putMapping(builder.toString()).build(); - IndicesModule indicesModule = new IndicesModule(Collections.emptyList()); - MapperService mapperService = MapperTestUtils.newMapperServiceWithHelperAnalyzer( - new NamedXContentRegistry(ClusterModule.getNamedXWriteables()), - createTempDir(), - settings, - indicesModule, - "test" - ); - mapperService.merge(indexMetadata, MapperService.MergeReason.INDEX_TEMPLATE); - return mapperService; - } } diff --git a/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeKeywordDocValuesFormatTests.java b/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeKeywordDocValuesFormatTests.java new file mode 100644 index 0000000000000..402ed1dbee98a --- /dev/null +++ b/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeKeywordDocValuesFormatTests.java @@ -0,0 +1,572 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.composite912.datacube.startree; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.document.SortedSetDocValuesField; +import org.apache.lucene.document.StringField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SegmentReader; +import org.apache.lucene.index.Term; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.tests.util.TestUtil; +import org.apache.lucene.util.BytesRef; +import org.opensearch.common.lucene.Lucene; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; +import org.opensearch.index.codec.composite.CompositeIndexReader; +import org.opensearch.index.compositeindex.datacube.startree.StarTreeDocument; +import org.opensearch.index.compositeindex.datacube.startree.StarTreeFieldConfiguration; +import org.opensearch.index.compositeindex.datacube.startree.StarTreeTestUtils; +import org.opensearch.index.compositeindex.datacube.startree.index.StarTreeValues; +import org.opensearch.index.compositeindex.datacube.startree.utils.iterator.SortedSetStarTreeValuesIterator; +import org.opensearch.index.mapper.NumberFieldMapper; + +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.opensearch.index.compositeindex.CompositeIndexConstants.STAR_TREE_DOCS_COUNT; +import static org.opensearch.index.compositeindex.datacube.startree.StarTreeTestUtils.assertStarTreeDocuments; + +/** + * Star tree doc values Lucene tests + */ +@LuceneTestCase.SuppressSysoutChecks(bugUrl = "we log a lot on purpose") +public class StarTreeKeywordDocValuesFormatTests extends AbstractStarTreeDVFormatTests { + + public StarTreeKeywordDocValuesFormatTests(StarTreeFieldConfiguration.StarTreeBuildMode buildMode) { + super(buildMode); + } + + public void testStarTreeKeywordDocValues() throws IOException { + Directory directory = newDirectory(); + IndexWriterConfig conf = newIndexWriterConfig(null); + conf.setMergePolicy(newLogMergePolicy()); + RandomIndexWriter iw = new RandomIndexWriter(random(), directory, conf); + Document doc = new Document(); + doc.add(new StringField("_id", "1", Field.Store.NO)); + doc.add(new SortedNumericDocValuesField("sndv", 1)); + doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text1"))); + doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text2"))); + iw.addDocument(doc); + doc = new Document(); + doc.add(new StringField("_id", "2", Field.Store.NO)); + doc.add(new SortedNumericDocValuesField("sndv", 1)); + doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text11"))); + doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text22"))); + iw.addDocument(doc); + iw.flush(); + iw.deleteDocuments(new Term("_id", "2")); + iw.flush(); + doc = new Document(); + doc.add(new StringField("_id", "3", Field.Store.NO)); + doc.add(new SortedNumericDocValuesField("sndv", 2)); + doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text1"))); + doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text2"))); + iw.addDocument(doc); + doc = new Document(); + doc.add(new StringField("_id", "4", Field.Store.NO)); + doc.add(new SortedNumericDocValuesField("sndv", 2)); + doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text11"))); + doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text22"))); + iw.addDocument(doc); + iw.flush(); + iw.deleteDocuments(new Term("_id", "4")); + iw.flush(); + iw.forceMerge(1); + + iw.close(); + + DirectoryReader ir = maybeWrapWithMergingReader(DirectoryReader.open(directory)); + TestUtil.checkReader(ir); + assertEquals(1, ir.leaves().size()); + + // Star tree documents + /** + keyword1 keyword2 | [ sum, value_count, min, max[sndv]] , doc_count + [0, 0] | [3.0, 2.0, 1.0, 2.0, 2.0] + [1, 1] | [3.0, 2.0, 1.0, 2.0, 2.0] + [null, 0] | [3.0, 2.0, 1.0, 2.0, 2.0] + [null, 1] | [3.0, 2.0, 1.0, 2.0, 2.0] + [null, null] | [6.0, 4.0, 1.0, 2.0, 4.0] + */ + StarTreeDocument[] expectedStarTreeDocuments = new StarTreeDocument[5]; + expectedStarTreeDocuments[0] = new StarTreeDocument(new Long[] { 0L, 0L }, new Double[] { 3.0, 2.0, 1.0, 2.0, 2.0 }); + expectedStarTreeDocuments[1] = new StarTreeDocument(new Long[] { 1L, 1L }, new Double[] { 3.0, 2.0, 1.0, 2.0, 2.0 }); + expectedStarTreeDocuments[2] = new StarTreeDocument(new Long[] { null, 0L }, new Double[] { 3.0, 2.0, 1.0, 2.0, 2.0 }); + expectedStarTreeDocuments[3] = new StarTreeDocument(new Long[] { null, 1L }, new Double[] { 3.0, 2.0, 1.0, 2.0, 2.0 }); + expectedStarTreeDocuments[4] = new StarTreeDocument(new Long[] { null, null }, new Double[] { 6.0, 4.0, 1.0, 2.0, 4.0 }); + + for (LeafReaderContext context : ir.leaves()) { + SegmentReader reader = Lucene.segmentReader(context.reader()); + CompositeIndexReader starTreeDocValuesReader = (CompositeIndexReader) reader.getDocValuesReader(); + List compositeIndexFields = starTreeDocValuesReader.getCompositeIndexFields(); + + for (CompositeIndexFieldInfo compositeIndexFieldInfo : compositeIndexFields) { + StarTreeValues starTreeValues = (StarTreeValues) starTreeDocValuesReader.getCompositeIndexValues(compositeIndexFieldInfo); + StarTreeDocument[] starTreeDocuments = StarTreeTestUtils.getSegmentsStarTreeDocuments( + List.of(starTreeValues), + List.of( + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.LONG, + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.LONG + ), + Integer.parseInt(starTreeValues.getAttributes().get(STAR_TREE_DOCS_COUNT)) + ); + assertStarTreeDocuments(starTreeDocuments, expectedStarTreeDocuments); + } + } + ir.close(); + directory.close(); + } + + public void testStarTreeKeywordDocValuesWithDeletions() throws IOException { + Directory directory = newDirectory(); + IndexWriterConfig conf = newIndexWriterConfig(null); + conf.setMergePolicy(newLogMergePolicy()); + RandomIndexWriter iw = new RandomIndexWriter(random(), directory, conf); + + int iterations = 3; + Set allIds = new HashSet<>(); + Map documents = new HashMap<>(); + Map map = new HashMap<>(); + for (int iter = 0; iter < iterations; iter++) { + // Add 10 documents + for (int i = 0; i < 10; i++) { + String id = String.valueOf(random().nextInt() + 1); + allIds.add(id); + Document doc = new Document(); + doc.add(new StringField("_id", id, Field.Store.YES)); + int sndvValue = random().nextInt(5) + 1; + doc.add(new SortedNumericDocValuesField("sndv", sndvValue)); + + String keyword1Value = "text" + random().nextInt(3); + + doc.add(new SortedSetDocValuesField("keyword1", new BytesRef(keyword1Value))); + String keyword2Value = "text" + random().nextInt(3); + + doc.add(new SortedSetDocValuesField("keyword2", new BytesRef(keyword2Value))); + map.put(keyword1Value + "-" + keyword2Value, sndvValue + map.getOrDefault(keyword1Value + "-" + keyword2Value, 0)); + iw.addDocument(doc); + documents.put(id, doc); + } + + iw.flush(); + + // Update random number of documents + int docsToDelete = random().nextInt(5); // Delete up to 5 documents + for (int i = 0; i < docsToDelete; i++) { + if (!allIds.isEmpty()) { + String idToDelete = allIds.iterator().next(); + Document doc = new Document(); + doc.add(new NumericDocValuesField("field-ndv", 1L)); + iw.w.softUpdateDocuments( + new Term("_id", idToDelete), + List.of(doc), + new NumericDocValuesField(Lucene.SOFT_DELETES_FIELD, 1) + ); + allIds.remove(idToDelete); + documents.remove(idToDelete); + } + } + + iw.flush(); + } + + iw.forceMerge(1); + iw.close(); + + DirectoryReader ir = maybeWrapWithMergingReader(DirectoryReader.open(directory)); + TestUtil.checkReader(ir); + assertEquals(1, ir.leaves().size()); + + // Assert star tree documents + for (LeafReaderContext context : ir.leaves()) { + SegmentReader reader = Lucene.segmentReader(context.reader()); + CompositeIndexReader starTreeDocValuesReader = (CompositeIndexReader) reader.getDocValuesReader(); + List compositeIndexFields = starTreeDocValuesReader.getCompositeIndexFields(); + + for (CompositeIndexFieldInfo compositeIndexFieldInfo : compositeIndexFields) { + StarTreeValues starTreeValues = (StarTreeValues) starTreeDocValuesReader.getCompositeIndexValues(compositeIndexFieldInfo); + StarTreeDocument[] actualStarTreeDocuments = StarTreeTestUtils.getSegmentsStarTreeDocuments( + List.of(starTreeValues), + List.of( + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.LONG, + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.LONG + ), + Integer.parseInt(starTreeValues.getAttributes().get(STAR_TREE_DOCS_COUNT)) + ); + SortedSetStarTreeValuesIterator k1 = (SortedSetStarTreeValuesIterator) starTreeValues.getDimensionValuesIterator( + "keyword1" + ); + SortedSetStarTreeValuesIterator k2 = (SortedSetStarTreeValuesIterator) starTreeValues.getDimensionValuesIterator( + "keyword2" + ); + for (StarTreeDocument starDoc : actualStarTreeDocuments) { + String keyword1 = null; + if (starDoc.dimensions[0] != null) { + keyword1 = k1.lookupOrd(starDoc.dimensions[0]).utf8ToString(); + } + + String keyword2 = null; + if (starDoc.dimensions[1] != null) { + keyword2 = k2.lookupOrd(starDoc.dimensions[1]).utf8ToString(); + } + double metric = (double) starDoc.metrics[0]; + if (map.containsKey(keyword1 + "-" + keyword2)) { + assertEquals((int) map.get(keyword1 + "-" + keyword2), (int) metric); + } + } + } + } + + ir.close(); + directory.close(); + } + + public void testStarKeywordDocValuesWithMissingDocs() throws IOException { + Directory directory = newDirectory(); + IndexWriterConfig conf = newIndexWriterConfig(null); + conf.setMergePolicy(newLogMergePolicy()); + RandomIndexWriter iw = new RandomIndexWriter(random(), directory, conf); + Document doc = new Document(); + doc.add(new SortedNumericDocValuesField("sndv", 1)); + doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text2"))); + iw.addDocument(doc); + doc = new Document(); + doc.add(new SortedNumericDocValuesField("sndv", 1)); + doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text22"))); + iw.addDocument(doc); + iw.forceMerge(1); + doc = new Document(); + doc.add(new SortedNumericDocValuesField("sndv", 2)); + doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text1"))); + doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text2"))); + iw.addDocument(doc); + doc = new Document(); + doc.add(new SortedNumericDocValuesField("sndv", 2)); + doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text11"))); + doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text22"))); + iw.addDocument(doc); + iw.forceMerge(1); + iw.close(); + + DirectoryReader ir = maybeWrapWithMergingReader(DirectoryReader.open(directory)); + TestUtil.checkReader(ir); + assertEquals(1, ir.leaves().size()); + + // Star tree documents + /** + * keyword1 keyword2 | [ sum, value_count, min, max[sndv]] , doc_count + [0, 0] | [2.0, 1.0, 2.0, 2.0, 1.0] + [1, 1] | [2.0, 1.0, 2.0, 2.0, 1.0] + [null, 0] | [1.0, 1.0, 1.0, 1.0, 1.0] + [null, 1] | [1.0, 1.0, 1.0, 1.0, 1.0] + [null, 0] | [3.0, 2.0, 1.0, 2.0, 2.0] + [null, 1] | [3.0, 2.0, 1.0, 2.0, 2.0] + [null, null] | [6.0, 4.0, 1.0, 2.0, 4.0] + [null, null] | [2.0, 2.0, 1.0, 1.0, 2.0] + */ + StarTreeDocument[] expectedStarTreeDocuments = new StarTreeDocument[8]; + expectedStarTreeDocuments[0] = new StarTreeDocument(new Long[] { 0L, 0L }, new Double[] { 2.0, 1.0, 2.0, 2.0, 1.0 }); + expectedStarTreeDocuments[1] = new StarTreeDocument(new Long[] { 1L, 1L }, new Double[] { 2.0, 1.0, 2.0, 2.0, 1.0 }); + expectedStarTreeDocuments[2] = new StarTreeDocument(new Long[] { null, 0L }, new Double[] { 1.0, 1.0, 1.0, 1.0, 1.0 }); + expectedStarTreeDocuments[3] = new StarTreeDocument(new Long[] { null, 1L }, new Double[] { 1.0, 1.0, 1.0, 1.0, 1.0 }); + expectedStarTreeDocuments[4] = new StarTreeDocument(new Long[] { null, 0L }, new Double[] { 3.0, 2.0, 1.0, 2.0, 2.0 }); + expectedStarTreeDocuments[5] = new StarTreeDocument(new Long[] { null, 1L }, new Double[] { 3.0, 2.0, 1.0, 2.0, 2.0 }); + expectedStarTreeDocuments[6] = new StarTreeDocument(new Long[] { null, null }, new Double[] { 6.0, 4.0, 1.0, 2.0, 4.0 }); + expectedStarTreeDocuments[7] = new StarTreeDocument(new Long[] { null, null }, new Double[] { 2.0, 2.0, 1.0, 1.0, 2.0 }); + + for (LeafReaderContext context : ir.leaves()) { + SegmentReader reader = Lucene.segmentReader(context.reader()); + CompositeIndexReader starTreeDocValuesReader = (CompositeIndexReader) reader.getDocValuesReader(); + List compositeIndexFields = starTreeDocValuesReader.getCompositeIndexFields(); + + for (CompositeIndexFieldInfo compositeIndexFieldInfo : compositeIndexFields) { + StarTreeValues starTreeValues = (StarTreeValues) starTreeDocValuesReader.getCompositeIndexValues(compositeIndexFieldInfo); + StarTreeDocument[] starTreeDocuments = StarTreeTestUtils.getSegmentsStarTreeDocuments( + List.of(starTreeValues), + List.of( + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.LONG, + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.LONG + ), + Integer.parseInt(starTreeValues.getAttributes().get(STAR_TREE_DOCS_COUNT)) + ); + assertStarTreeDocuments(starTreeDocuments, expectedStarTreeDocuments); + } + } + ir.close(); + directory.close(); + } + + public void testStarKeywordDocValuesWithMissingDocsInSegment() throws IOException { + Directory directory = newDirectory(); + IndexWriterConfig conf = newIndexWriterConfig(null); + conf.setMergePolicy(newLogMergePolicy()); + RandomIndexWriter iw = new RandomIndexWriter(random(), directory, conf); + Document doc = new Document(); + doc.add(new SortedNumericDocValuesField("sndv", 1)); + iw.addDocument(doc); + doc = new Document(); + doc.add(new SortedNumericDocValuesField("sndv", 1)); + iw.addDocument(doc); + iw.forceMerge(1); + doc = new Document(); + doc.add(new SortedNumericDocValuesField("sndv", 2)); + doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text1"))); + doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text2"))); + iw.addDocument(doc); + doc = new Document(); + doc.add(new SortedNumericDocValuesField("sndv", 2)); + doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text11"))); + doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text22"))); + iw.addDocument(doc); + iw.forceMerge(1); + iw.close(); + + DirectoryReader ir = maybeWrapWithMergingReader(DirectoryReader.open(directory)); + TestUtil.checkReader(ir); + assertEquals(1, ir.leaves().size()); + + // Star tree documents + /** + * keyword1 keyword2 | [ sum, value_count, min, max[sndv]] , doc_count + [0, 0] | [2.0, 1.0, 2.0, 2.0, 1.0] + [1, 1] | [2.0, 1.0, 2.0, 2.0, 1.0] + [null, null] | [2.0, 2.0, 1.0, 1.0, 2.0] // This is for missing doc + [null, 0] | [2.0, 1.0, 2.0, 2.0, 1.0] + [null, 1] | [2.0, 1.0, 2.0, 2.0, 1.0] + [null, null] | [2.0, 2.0, 1.0, 1.0, 2.0] + [null, null] | [6.0, 4.0, 1.0, 2.0, 4.0] // This is star document + */ + StarTreeDocument[] expectedStarTreeDocuments = new StarTreeDocument[7]; + expectedStarTreeDocuments[0] = new StarTreeDocument(new Long[] { 0L, 0L }, new Double[] { 2.0, 1.0, 2.0, 2.0, 1.0 }); + expectedStarTreeDocuments[1] = new StarTreeDocument(new Long[] { 1L, 1L }, new Double[] { 2.0, 1.0, 2.0, 2.0, 1.0 }); + expectedStarTreeDocuments[2] = new StarTreeDocument(new Long[] { null, null }, new Double[] { 2.0, 2.0, 1.0, 1.0, 2.0 }); + expectedStarTreeDocuments[3] = new StarTreeDocument(new Long[] { null, 0L }, new Double[] { 2.0, 1.0, 2.0, 2.0, 1.0 }); + expectedStarTreeDocuments[4] = new StarTreeDocument(new Long[] { null, 1L }, new Double[] { 2.0, 1.0, 2.0, 2.0, 1.0 }); + expectedStarTreeDocuments[5] = new StarTreeDocument(new Long[] { null, null }, new Double[] { 2.0, 2.0, 1.0, 1.0, 2.0 }); + expectedStarTreeDocuments[6] = new StarTreeDocument(new Long[] { null, null }, new Double[] { 6.0, 4.0, 1.0, 2.0, 4.0 }); + + for (LeafReaderContext context : ir.leaves()) { + SegmentReader reader = Lucene.segmentReader(context.reader()); + CompositeIndexReader starTreeDocValuesReader = (CompositeIndexReader) reader.getDocValuesReader(); + List compositeIndexFields = starTreeDocValuesReader.getCompositeIndexFields(); + + for (CompositeIndexFieldInfo compositeIndexFieldInfo : compositeIndexFields) { + StarTreeValues starTreeValues = (StarTreeValues) starTreeDocValuesReader.getCompositeIndexValues(compositeIndexFieldInfo); + StarTreeDocument[] starTreeDocuments = StarTreeTestUtils.getSegmentsStarTreeDocuments( + List.of(starTreeValues), + List.of( + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.LONG, + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.LONG + ), + Integer.parseInt(starTreeValues.getAttributes().get(STAR_TREE_DOCS_COUNT)) + ); + assertStarTreeDocuments(starTreeDocuments, expectedStarTreeDocuments); + } + } + ir.close(); + directory.close(); + } + + public void testStarKeywordDocValuesWithMissingDocsInAllSegments() throws IOException { + Directory directory = newDirectory(); + IndexWriterConfig conf = newIndexWriterConfig(null); + conf.setMergePolicy(newLogMergePolicy()); + RandomIndexWriter iw = new RandomIndexWriter(random(), directory, conf); + Document doc = new Document(); + doc.add(new SortedNumericDocValuesField("sndv", 1)); + iw.addDocument(doc); + doc = new Document(); + doc.add(new SortedNumericDocValuesField("sndv", 1)); + iw.addDocument(doc); + iw.forceMerge(1); + doc = new Document(); + doc.add(new SortedNumericDocValuesField("sndv", 2)); + iw.addDocument(doc); + doc = new Document(); + doc.add(new SortedNumericDocValuesField("sndv", 2)); + iw.addDocument(doc); + iw.forceMerge(1); + iw.close(); + + DirectoryReader ir = maybeWrapWithMergingReader(DirectoryReader.open(directory)); + TestUtil.checkReader(ir); + assertEquals(1, ir.leaves().size()); + + // Star tree documents + /** + * keyword1 keyword2 | [ sum, value_count, min, max[sndv]] , doc_count + [null, null] | [6.0, 4.0, 1.0, 2.0, 4.0] + + */ + StarTreeDocument[] expectedStarTreeDocuments = new StarTreeDocument[1]; + expectedStarTreeDocuments[0] = new StarTreeDocument(new Long[] { null, null }, new Double[] { 6.0, 4.0, 1.0, 2.0, 4.0 }); + + for (LeafReaderContext context : ir.leaves()) { + SegmentReader reader = Lucene.segmentReader(context.reader()); + CompositeIndexReader starTreeDocValuesReader = (CompositeIndexReader) reader.getDocValuesReader(); + List compositeIndexFields = starTreeDocValuesReader.getCompositeIndexFields(); + + for (CompositeIndexFieldInfo compositeIndexFieldInfo : compositeIndexFields) { + StarTreeValues starTreeValues = (StarTreeValues) starTreeDocValuesReader.getCompositeIndexValues(compositeIndexFieldInfo); + StarTreeDocument[] starTreeDocuments = StarTreeTestUtils.getSegmentsStarTreeDocuments( + List.of(starTreeValues), + List.of( + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.LONG, + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.LONG + ), + Integer.parseInt(starTreeValues.getAttributes().get(STAR_TREE_DOCS_COUNT)) + ); + assertStarTreeDocuments(starTreeDocuments, expectedStarTreeDocuments); + } + } + ir.close(); + directory.close(); + } + + public void testStarKeywordDocValuesWithMissingDocsInMixedSegments() throws IOException { + Directory directory = newDirectory(); + IndexWriterConfig conf = newIndexWriterConfig(null); + conf.setMergePolicy(newLogMergePolicy()); + RandomIndexWriter iw = new RandomIndexWriter(random(), directory, conf); + Document doc = new Document(); + doc.add(new SortedNumericDocValuesField("sndv", 1)); + iw.addDocument(doc); + doc = new Document(); + doc.add(new SortedNumericDocValuesField("sndv", 1)); + iw.addDocument(doc); + iw.forceMerge(1); + doc = new Document(); + doc.add(new SortedNumericDocValuesField("sndv", 2)); + doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text1"))); + iw.addDocument(doc); + doc = new Document(); + doc.add(new SortedNumericDocValuesField("sndv", 2)); + iw.addDocument(doc); + iw.forceMerge(1); + iw.close(); + + DirectoryReader ir = maybeWrapWithMergingReader(DirectoryReader.open(directory)); + TestUtil.checkReader(ir); + assertEquals(1, ir.leaves().size()); + + // Star tree documents + /** + * keyword1 keyword2 | [ sum, value_count, min, max[sndv]] , doc_count + [0, 0] | [2.0, 1.0, 2.0, 2.0, 1.0] + [1, 1] | [2.0, 1.0, 2.0, 2.0, 1.0] + [null, 0] | [1.0, 1.0, 1.0, 1.0, 1.0] + [null, 1] | [1.0, 1.0, 1.0, 1.0, 1.0] + [null, 0] | [3.0, 2.0, 1.0, 2.0, 2.0] + [null, 1] | [3.0, 2.0, 1.0, 2.0, 2.0] + [null, null] | [6.0, 4.0, 1.0, 2.0, 4.0] + [null, null] | [2.0, 2.0, 1.0, 1.0, 2.0] + */ + StarTreeDocument[] expectedStarTreeDocuments = new StarTreeDocument[3]; + expectedStarTreeDocuments[0] = new StarTreeDocument(new Long[] { 0L, null }, new Double[] { 2.0, 1.0, 2.0, 2.0, 1.0 }); + expectedStarTreeDocuments[1] = new StarTreeDocument(new Long[] { null, null }, new Double[] { 4.0, 3.0, 1.0, 2.0, 3.0 }); + expectedStarTreeDocuments[2] = new StarTreeDocument(new Long[] { null, null }, new Double[] { 6.0, 4.0, 1.0, 2.0, 4.0 }); + + for (LeafReaderContext context : ir.leaves()) { + SegmentReader reader = Lucene.segmentReader(context.reader()); + CompositeIndexReader starTreeDocValuesReader = (CompositeIndexReader) reader.getDocValuesReader(); + List compositeIndexFields = starTreeDocValuesReader.getCompositeIndexFields(); + + for (CompositeIndexFieldInfo compositeIndexFieldInfo : compositeIndexFields) { + StarTreeValues starTreeValues = (StarTreeValues) starTreeDocValuesReader.getCompositeIndexValues(compositeIndexFieldInfo); + StarTreeDocument[] starTreeDocuments = StarTreeTestUtils.getSegmentsStarTreeDocuments( + List.of(starTreeValues), + List.of( + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.LONG, + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.DOUBLE, + NumberFieldMapper.NumberType.LONG + ), + Integer.parseInt(starTreeValues.getAttributes().get(STAR_TREE_DOCS_COUNT)) + ); + assertStarTreeDocuments(starTreeDocuments, expectedStarTreeDocuments); + } + } + ir.close(); + directory.close(); + } + + @Override + protected XContentBuilder getMapping() throws IOException { + return topMapping(b -> { + b.startObject("composite"); + b.startObject("startree"); + b.field("type", "star_tree"); + b.startObject("config"); + b.field("max_leaf_docs", 1); + b.startArray("ordered_dimensions"); + b.startObject(); + b.field("name", "keyword1"); + b.endObject(); + b.startObject(); + b.field("name", "keyword2"); + b.endObject(); + b.endArray(); + b.startArray("metrics"); + b.startObject(); + b.field("name", "sndv"); + b.startArray("stats"); + b.value("sum"); + b.value("value_count"); + b.value("avg"); + b.value("min"); + b.value("max"); + b.endArray(); + b.endObject(); + b.endArray(); + b.endObject(); + b.endObject(); + b.endObject(); + b.startObject("properties"); + b.startObject("sndv"); + b.field("type", "integer"); + b.endObject(); + b.startObject("keyword1"); + b.field("type", "keyword"); + b.endObject(); + b.startObject("keyword2"); + b.field("type", "keyword"); + b.endObject(); + b.endObject(); + }); + } +} diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/StarTreeTestUtils.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/StarTreeTestUtils.java index dc8b3320f3de2..44e40f1db4cc8 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/StarTreeTestUtils.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/StarTreeTestUtils.java @@ -99,6 +99,7 @@ public static StarTreeDocument getStarTreeDocument( ) throws IOException { Long[] dims = new Long[dimensionReaders.length]; int i = 0; + for (SequentialDocValuesIterator dimensionDocValueIterator : dimensionReaders) { dimensionDocValueIterator.nextEntry(currentDocId); Long val = dimensionDocValueIterator.value(currentDocId); @@ -117,6 +118,9 @@ public static StarTreeDocument getStarTreeDocument( public static Double toAggregatorValueType(Long value, FieldValueConverter fieldValueConverter) { try { + if (value == null) { + return 0.0; + } return fieldValueConverter.toDoubleValue(value); } catch (Exception e) { throw new IllegalStateException("Cannot convert " + value + " to sortable aggregation type", e); diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilderTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilderTests.java index 012d04c575f55..ac729f6392f63 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilderTests.java @@ -201,7 +201,7 @@ public List getStarTreeDocuments() { @Override public Long getDimensionValue(int docId, int dimensionId) throws IOException { - return 0l; + return 0L; } @Override diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BuilderTestsUtils.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BuilderTestsUtils.java index cc6c1758697dd..077bf0422ab50 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BuilderTestsUtils.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/BuilderTestsUtils.java @@ -18,11 +18,13 @@ import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.VectorEncoding; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.InfoStream; import org.apache.lucene.util.Version; import org.opensearch.index.codec.composite.LuceneDocValuesProducerFactory; @@ -150,6 +152,65 @@ public long cost() { }; } + public static SortedSetDocValues getSortedSetMock(List dimList, List docsWithField) { + return getSortedSetMock(dimList, docsWithField, 1); + } + + public static SortedSetDocValues getSortedSetMock(List dimList, List docsWithField, int valueCount) { + return new SortedSetDocValues() { + int index = -1; + + @Override + public long nextOrd() throws IOException { + return dimList.get(index); + } + + @Override + public int docValueCount() { + return 1; + } + + @Override + public BytesRef lookupOrd(long l) throws IOException { + return new BytesRef("dummy" + l); + } + + @Override + public long getValueCount() { + return valueCount; + } + + @Override + public boolean advanceExact(int target) { + return false; + } + + @Override + public int docID() { + return index; + } + + @Override + public int nextDoc() { + if (index == docsWithField.size() - 1) { + return NO_MORE_DOCS; + } + index++; + return docsWithField.get(index); + } + + @Override + public int advance(int target) { + return 0; + } + + @Override + public long cost() { + return 0; + } + }; + } + public static void validateStarTree( InMemoryTreeNode root, int totalDimensions, @@ -386,7 +447,7 @@ public static void validateStarTreeFileFormats( public static SegmentReadState getReadState( int numDocs, - List dimensionFields, + Map dimensionFields, List metrics, StarTreeField compositeField, SegmentWriteState writeState, @@ -401,7 +462,7 @@ public static SegmentReadState getReadState( FieldInfo[] fields = new FieldInfo[dimensionFields.size() + numMetrics]; int i = 0; - for (String dimension : dimensionFields) { + for (String dimension : dimensionFields.keySet()) { fields[i] = new FieldInfo( fullyQualifiedFieldNameForStarTreeDimensionsDocValues(compositeField.getName(), dimension), i, @@ -409,7 +470,7 @@ public static SegmentReadState getReadState( false, true, IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, - DocValuesType.SORTED_NUMERIC, + dimensionFields.get(dimension), -1, Collections.emptyMap(), 0, diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuildMetricTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuildMetricTests.java index c4d6fe6f19812..95adae9335740 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuildMetricTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuildMetricTests.java @@ -52,6 +52,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -591,13 +592,19 @@ public void test_build_multipleStarTrees() throws IOException { metaOut.close(); dataOut.close(); + LinkedHashMap fieldsMap = new LinkedHashMap<>(); + fieldsMap.put("field1", DocValuesType.SORTED_NUMERIC); + fieldsMap.put("field3", DocValuesType.SORTED_NUMERIC); + fieldsMap.put("field5", DocValuesType.SORTED_NUMERIC); + fieldsMap.put("field8", DocValuesType.SORTED_NUMERIC); + StarTreeMetadata starTreeMetadata = new StarTreeMetadata( "test", STAR_TREE, mock(IndexInput.class), VERSION_CURRENT, builder.numStarTreeNodes, - List.of("field1", "field3", "field5", "field8"), + fieldsMap, List.of( new Metric("field2", List.of(MetricStat.SUM)), new Metric("field4", List.of(MetricStat.SUM)), @@ -614,13 +621,18 @@ public void test_build_multipleStarTrees() throws IOException { 330 ); + LinkedHashMap fieldsMap1 = new LinkedHashMap<>(); + fieldsMap1.put("fieldC", DocValuesType.SORTED_NUMERIC); + fieldsMap1.put("fieldB", DocValuesType.SORTED_NUMERIC); + fieldsMap1.put("fieldL", DocValuesType.SORTED_NUMERIC); + StarTreeMetadata starTreeMetadata2 = new StarTreeMetadata( "test", STAR_TREE, mock(IndexInput.class), VERSION_CURRENT, builder.numStarTreeNodes, - List.of("fieldC", "fieldB", "fieldL"), + fieldsMap1, List.of(new Metric("fieldI", List.of(MetricStat.SUM))), 7, 27, @@ -631,9 +643,8 @@ public void test_build_multipleStarTrees() throws IOException { 1287 ); - List totalDimensionFields = new ArrayList<>(); - totalDimensionFields.addAll(starTreeMetadata.getDimensionFields()); - totalDimensionFields.addAll(starTreeMetadata2.getDimensionFields()); + LinkedHashMap totalDimensionFields = new LinkedHashMap<>(starTreeMetadata.getDimensionFields()); + totalDimensionFields.putAll(starTreeMetadata2.getDimensionFields()); List metrics = new ArrayList<>(); metrics.addAll(starTreeMetadata.getMetrics()); diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderFlushFlowTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderFlushFlowTests.java index a92ac39cb7020..440268f1f803c 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderFlushFlowTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderFlushFlowTests.java @@ -9,15 +9,18 @@ package org.opensearch.index.compositeindex.datacube.startree.builder; import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.EmptyDocValuesProducer; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.store.IndexInput; import org.opensearch.index.codec.composite.LuceneDocValuesConsumerFactory; import org.opensearch.index.codec.composite.composite912.Composite912DocValuesFormat; import org.opensearch.index.compositeindex.datacube.Dimension; +import org.opensearch.index.compositeindex.datacube.KeywordDimension; import org.opensearch.index.compositeindex.datacube.Metric; import org.opensearch.index.compositeindex.datacube.MetricStat; import org.opensearch.index.compositeindex.datacube.NumericDimension; @@ -27,17 +30,20 @@ import org.opensearch.index.compositeindex.datacube.startree.fileformats.meta.StarTreeMetadata; import org.opensearch.index.compositeindex.datacube.startree.utils.SequentialDocValuesIterator; import org.opensearch.index.compositeindex.datacube.startree.utils.iterator.SortedNumericStarTreeValuesIterator; +import org.opensearch.index.compositeindex.datacube.startree.utils.iterator.SortedSetStarTreeValuesIterator; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import static org.opensearch.index.compositeindex.datacube.startree.builder.BuilderTestsUtils.getSortedNumericMock; +import static org.opensearch.index.compositeindex.datacube.startree.builder.BuilderTestsUtils.getSortedSetMock; import static org.opensearch.index.compositeindex.datacube.startree.builder.BuilderTestsUtils.validateStarTree; import static org.opensearch.index.compositeindex.datacube.startree.fileformats.StarTreeWriter.VERSION_CURRENT; import static org.opensearch.index.mapper.CompositeMappedFieldType.CompositeFieldType.STAR_TREE; @@ -124,14 +130,16 @@ public void testFlushFlow() throws IOException { metaOut.close(); dataOut.close(); docValuesConsumer.close(); - + LinkedHashMap docValues = new LinkedHashMap<>(); + docValues.put("field1", DocValuesType.SORTED_NUMERIC); + docValues.put("field3", DocValuesType.SORTED_NUMERIC); StarTreeMetadata starTreeMetadata = new StarTreeMetadata( "sf", STAR_TREE, mock(IndexInput.class), VERSION_CURRENT, builder.numStarTreeNodes, - List.of("field1", "field3"), + docValues, List.of(new Metric("field2", List.of(MetricStat.SUM, MetricStat.VALUE_COUNT, MetricStat.AVG))), 6, builder.numStarTreeDocs, @@ -222,13 +230,16 @@ public void testFlushFlowDimsReverse() throws IOException { dataOut.close(); docValuesConsumer.close(); + LinkedHashMap docValues = new LinkedHashMap<>(); + docValues.put("field1", DocValuesType.SORTED_NUMERIC); + docValues.put("field3", DocValuesType.SORTED_NUMERIC); StarTreeMetadata starTreeMetadata = new StarTreeMetadata( "sf", STAR_TREE, mock(IndexInput.class), VERSION_CURRENT, builder.numStarTreeNodes, - List.of("field1", "field3"), + docValues, List.of(new Metric("field2", List.of(MetricStat.SUM, MetricStat.VALUE_COUNT, MetricStat.AVG))), 6, builder.numStarTreeDocs, @@ -322,7 +333,10 @@ public void testFlushFlowBuild() throws IOException { dataOut.close(); docValuesConsumer.close(); - StarTreeMetadata starTreeMetadata = getStarTreeMetadata(List.of("field1", "field3"), 100, 1, 6699); + LinkedHashMap map = new LinkedHashMap<>(); + map.put("field1", DocValuesType.SORTED_NUMERIC); + map.put("field3", DocValuesType.SORTED_NUMERIC); + StarTreeMetadata starTreeMetadata = getStarTreeMetadata(map, 100, 1, 6699); validateStarTreeFileFormats( builder.getRootNode(), @@ -396,6 +410,115 @@ public void testFlushFlowWithTimestamps() throws IOException { validateStarTree(builder.getRootNode(), 3, 10, builder.getStarTreeDocuments()); } + public void testFlushFlowForKeywords() throws IOException { + List dimList = List.of(0L, 1L, 2L, 3L, 4L, 5L); + List docsWithField = List.of(0, 1, 2, 3, 4, 5); + List dimList2 = List.of(0L, 1L, 2L, 3L, 4L, 5L); + List docsWithField2 = List.of(0, 1, 2, 3, 4, 5); + + List metricsList = List.of( + getLongFromDouble(0.0), + getLongFromDouble(10.0), + getLongFromDouble(20.0), + getLongFromDouble(30.0), + getLongFromDouble(40.0), + getLongFromDouble(50.0) + ); + List metricsWithField = List.of(0, 1, 2, 3, 4, 5); + + compositeField = getStarTreeFieldWithKeywordField(); + SortedSetStarTreeValuesIterator d1sndv = new SortedSetStarTreeValuesIterator(getSortedSetMock(dimList, docsWithField)); + SortedSetStarTreeValuesIterator d2sndv = new SortedSetStarTreeValuesIterator(getSortedSetMock(dimList2, docsWithField2)); + SortedNumericStarTreeValuesIterator m1sndv = new SortedNumericStarTreeValuesIterator( + getSortedNumericMock(metricsList, metricsWithField) + ); + SortedNumericStarTreeValuesIterator m2sndv = new SortedNumericStarTreeValuesIterator( + getSortedNumericMock(metricsList, metricsWithField) + ); + + writeState = getWriteState(6, writeState.segmentInfo.getId()); + builder = getStarTreeBuilder(metaOut, dataOut, compositeField, writeState, mapperService); + SequentialDocValuesIterator[] dimDvs = { new SequentialDocValuesIterator(d1sndv), new SequentialDocValuesIterator(d2sndv) }; + Iterator starTreeDocumentIterator = builder.sortAndAggregateSegmentDocuments( + dimDvs, + List.of(new SequentialDocValuesIterator(m1sndv), new SequentialDocValuesIterator(m2sndv)) + ); + /** + * Asserting following dim / metrics [ dim1, dim2 / Sum [metric], count [metric] ] + [0, 0] | [0.0, 1] + [1, 1] | [10.0, 1] + [2, 2] | [20.0, 1] + [3, 3] | [30.0, 1] + [4, 4] | [40.0, 1] + [5, 5] | [50.0, 1] + */ + + SegmentWriteState w = getWriteState(DocIdSetIterator.NO_MORE_DOCS, writeState.segmentInfo.getId()); + this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( + w, + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION + ); + Map dv = new LinkedHashMap<>(); + dv.put("field1", getSortedSetMock(dimList, docsWithField)); + dv.put("field3", getSortedSetMock(dimList2, docsWithField2)); + builder.setFlushSortedSetDocValuesMap(dv); + builder.build(starTreeDocumentIterator, new AtomicInteger(), docValuesConsumer); + + List starTreeDocuments = builder.getStarTreeDocuments(); + int count = 0; + for (StarTreeDocument starTreeDocument : starTreeDocuments) { + count++; + if (starTreeDocument.dimensions[1] != null) { + assertEquals( + starTreeDocument.dimensions[0] == null + ? starTreeDocument.dimensions[1] * 1 * 10.0 + : starTreeDocument.dimensions[0] * 10, + starTreeDocument.metrics[0] + ); + assertEquals(1L, starTreeDocument.metrics[1]); + } else { + assertEquals(150D, starTreeDocument.metrics[0]); + assertEquals(6L, starTreeDocument.metrics[1]); + } + } + assertEquals(13, count); + validateStarTree(builder.getRootNode(), 2, 1000, builder.getStarTreeDocuments()); + + metaOut.close(); + dataOut.close(); + docValuesConsumer.close(); + LinkedHashMap docValues = new LinkedHashMap<>(); + docValues.put("field1", DocValuesType.SORTED_SET); + docValues.put("field3", DocValuesType.SORTED_SET); + StarTreeMetadata starTreeMetadata = new StarTreeMetadata( + "sf", + STAR_TREE, + mock(IndexInput.class), + VERSION_CURRENT, + builder.numStarTreeNodes, + docValues, + List.of(new Metric("field2", List.of(MetricStat.SUM, MetricStat.VALUE_COUNT, MetricStat.AVG))), + 6, + builder.numStarTreeDocs, + 1000, + Set.of(), + getBuildMode(), + 0, + 264 + ); + + validateStarTreeFileFormats( + builder.getRootNode(), + builder.getStarTreeDocuments().size(), + starTreeMetadata, + builder.getStarTreeDocuments() + ); + + } + private StarTreeField getStarTreeFieldWithMultipleMetrics() { Dimension d1 = new NumericDimension("field1"); Dimension d2 = new NumericDimension("field3"); @@ -408,6 +531,18 @@ private StarTreeField getStarTreeFieldWithMultipleMetrics() { return new StarTreeField("sf", dims, metrics, c); } + private StarTreeField getStarTreeFieldWithKeywordField() { + Dimension d1 = new KeywordDimension("field1"); + Dimension d2 = new KeywordDimension("field3"); + Metric m1 = new Metric("field2", List.of(MetricStat.SUM)); + Metric m2 = new Metric("field2", List.of(MetricStat.VALUE_COUNT)); + Metric m3 = new Metric("field2", List.of(MetricStat.AVG)); + List dims = List.of(d1, d2); + List metrics = List.of(m1, m2, m3); + StarTreeFieldConfiguration c = new StarTreeFieldConfiguration(1000, new HashSet<>(), getBuildMode()); + return new StarTreeField("sf", dims, metrics, c); + } + private static DocValuesProducer getDocValuesProducer(SortedNumericDocValues sndv) { return new EmptyDocValuesProducer() { @Override diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderMergeFlowTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderMergeFlowTests.java index 00e53534a7606..be16961e781db 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderMergeFlowTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderMergeFlowTests.java @@ -9,8 +9,10 @@ package org.opensearch.index.compositeindex.datacube.startree.builder; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.opensearch.common.settings.Settings; import org.opensearch.index.codec.composite.LuceneDocValuesConsumerFactory; @@ -26,6 +28,7 @@ import org.opensearch.index.compositeindex.datacube.startree.fileformats.meta.StarTreeMetadata; import org.opensearch.index.compositeindex.datacube.startree.index.StarTreeValues; import org.opensearch.index.compositeindex.datacube.startree.utils.iterator.SortedNumericStarTreeValuesIterator; +import org.opensearch.index.compositeindex.datacube.startree.utils.iterator.SortedSetStarTreeValuesIterator; import org.opensearch.index.compositeindex.datacube.startree.utils.iterator.StarTreeValuesIterator; import org.opensearch.index.mapper.ContentPath; import org.opensearch.index.mapper.DocumentMapper; @@ -49,6 +52,7 @@ import static org.opensearch.index.compositeindex.CompositeIndexConstants.SEGMENT_DOCS_COUNT; import static org.opensearch.index.compositeindex.datacube.startree.builder.BuilderTestsUtils.getSortedNumericMock; +import static org.opensearch.index.compositeindex.datacube.startree.builder.BuilderTestsUtils.getSortedSetMock; import static org.opensearch.index.compositeindex.datacube.startree.builder.BuilderTestsUtils.traverseStarTree; import static org.opensearch.index.compositeindex.datacube.startree.builder.BuilderTestsUtils.validateStarTree; import static org.opensearch.index.compositeindex.datacube.startree.utils.StarTreeUtils.fullyQualifiedFieldNameForStarTreeMetricsDocValues; @@ -348,8 +352,10 @@ public void testMergeFlowWithSum() throws IOException { metaOut.close(); dataOut.close(); docValuesConsumer.close(); - - StarTreeMetadata starTreeMetadata = getStarTreeMetadata(List.of("field1", "field3"), 6, 1000, 264); + LinkedHashMap map = new LinkedHashMap<>(); + map.put("field1", DocValuesType.SORTED_NUMERIC); + map.put("field3", DocValuesType.SORTED_NUMERIC); + StarTreeMetadata starTreeMetadata = getStarTreeMetadata(map, 6, 1000, 264); validateStarTreeFileFormats( builder.getRootNode(), @@ -421,8 +427,10 @@ public void testMergeFlowWithCount() throws IOException { metaOut.close(); dataOut.close(); docValuesConsumer.close(); - - StarTreeMetadata starTreeMetadata = getStarTreeMetadata(List.of("field1", "field3"), 6, 1000, 264); + LinkedHashMap map = new LinkedHashMap<>(); + map.put("field1", DocValuesType.SORTED_NUMERIC); + map.put("field3", DocValuesType.SORTED_NUMERIC); + StarTreeMetadata starTreeMetadata = getStarTreeMetadata(map, 6, 1000, 264); validateStarTreeFileFormats( builder.getRootNode(), @@ -568,8 +576,10 @@ public void testMergeFlowWithMissingDocs() throws IOException { metaOut.close(); dataOut.close(); docValuesConsumer.close(); - - StarTreeMetadata starTreeMetadata = getStarTreeMetadata(List.of("field1", "field3"), 10, 1000, 363); + LinkedHashMap map = new LinkedHashMap<>(); + map.put("field1", DocValuesType.SORTED_NUMERIC); + map.put("field3", DocValuesType.SORTED_NUMERIC); + StarTreeMetadata starTreeMetadata = getStarTreeMetadata(map, 10, 1000, 363); validateStarTreeFileFormats( builder.getRootNode(), @@ -656,8 +666,10 @@ public void testMergeFlowWithMissingDocsWithZero() throws IOException { metaOut.close(); dataOut.close(); docValuesConsumer.close(); - - StarTreeMetadata starTreeMetadata = getStarTreeMetadata(List.of("field1", "field3"), 6, 1000, 231); + LinkedHashMap map = new LinkedHashMap<>(); + map.put("field1", DocValuesType.SORTED_NUMERIC); + map.put("field3", DocValuesType.SORTED_NUMERIC); + StarTreeMetadata starTreeMetadata = getStarTreeMetadata(map, 6, 1000, 231); validateStarTreeFileFormats( builder.getRootNode(), @@ -747,8 +759,10 @@ public void testMergeFlowWithMissingDocsWithZeroComplexCase() throws IOException metaOut.close(); dataOut.close(); docValuesConsumer.close(); - - StarTreeMetadata starTreeMetadata = getStarTreeMetadata(List.of("field1", "field3"), 7, 1000, 231); + LinkedHashMap map = new LinkedHashMap<>(); + map.put("field1", DocValuesType.SORTED_NUMERIC); + map.put("field3", DocValuesType.SORTED_NUMERIC); + StarTreeMetadata starTreeMetadata = getStarTreeMetadata(map, 7, 1000, 231); validateStarTreeFileFormats( builder.getRootNode(), @@ -834,8 +848,10 @@ public void testMergeFlowWithMissingDocsInSecondDim() throws IOException { metaOut.close(); dataOut.close(); docValuesConsumer.close(); - - StarTreeMetadata starTreeMetadata = getStarTreeMetadata(List.of("field1", "field3"), 10, 1000, 363); + LinkedHashMap map = new LinkedHashMap<>(); + map.put("field1", DocValuesType.SORTED_NUMERIC); + map.put("field3", DocValuesType.SORTED_NUMERIC); + StarTreeMetadata starTreeMetadata = getStarTreeMetadata(map, 10, 1000, 363); validateStarTreeFileFormats( builder.getRootNode(), @@ -919,8 +935,10 @@ public void testMergeFlowWithDocsMissingAtTheEnd() throws IOException { metaOut.close(); dataOut.close(); docValuesConsumer.close(); - - StarTreeMetadata starTreeMetadata = getStarTreeMetadata(List.of("field1", "field3"), 10, 1000, 363); + LinkedHashMap map = new LinkedHashMap<>(); + map.put("field1", DocValuesType.SORTED_NUMERIC); + map.put("field3", DocValuesType.SORTED_NUMERIC); + StarTreeMetadata starTreeMetadata = getStarTreeMetadata(map, 10, 1000, 363); validateStarTreeFileFormats( builder.getRootNode(), @@ -992,8 +1010,10 @@ public void testMergeFlowWithEmptyFieldsInOneSegment() throws IOException { metaOut.close(); dataOut.close(); docValuesConsumer.close(); - - StarTreeMetadata starTreeMetadata = getStarTreeMetadata(List.of("field1", "field3"), 6, 1000, 264); + LinkedHashMap map = new LinkedHashMap<>(); + map.put("field1", DocValuesType.SORTED_NUMERIC); + map.put("field3", DocValuesType.SORTED_NUMERIC); + StarTreeMetadata starTreeMetadata = getStarTreeMetadata(map, 6, 1000, 264); validateStarTreeFileFormats( builder.getRootNode(), @@ -1391,8 +1411,10 @@ public void testMergeFlowWithDifferentDocsFromSegments() throws IOException { metaOut.close(); dataOut.close(); docValuesConsumer.close(); - - StarTreeMetadata starTreeMetadata = getStarTreeMetadata(List.of("field1", "field3"), 9, 1000, 330); + LinkedHashMap map = new LinkedHashMap<>(); + map.put("field1", DocValuesType.SORTED_NUMERIC); + map.put("field3", DocValuesType.SORTED_NUMERIC); + StarTreeMetadata starTreeMetadata = getStarTreeMetadata(map, 9, 1000, 330); validateStarTreeFileFormats( builder.getRootNode(), @@ -1776,6 +1798,110 @@ public void testMergeFlowWithTimestamps() throws IOException { ); } + public void testMergeFlowWithKeywords() throws IOException { + List dimList = List.of(0L, 1L, 2L, 3L, 4L, 5L, 6L); + List docsWithField = List.of(0, 1, 2, 3, 4, 5, 6); + List dimList2 = List.of(0L, 1L, 2L, 3L, 4L, 5L, -1L); + List docsWithField2 = List.of(0, 1, 2, 3, 4, 5, 6); + List metricsList1 = List.of( + getLongFromDouble(0.0), + getLongFromDouble(10.0), + getLongFromDouble(20.0), + getLongFromDouble(30.0), + getLongFromDouble(40.0), + getLongFromDouble(50.0), + getLongFromDouble(60.0) + ); + List metricsWithField1 = List.of(0, 1, 2, 3, 4, 5, 6); + List metricsList = List.of(0L, 1L, 2L, 3L, 4L, 5L, 6L); + List metricsWithField = List.of(0, 1, 2, 3, 4, 5, 6); + + List dimList3 = List.of(0L, 1L, 2L, 3L, -1L); + List docsWithField3 = List.of(0, 1, 2, 3, 4); + List dimList4 = List.of(0L, 1L, 2L, 3L, -1L); + List docsWithField4 = List.of(0, 1, 2, 3, 4); + List metricsList21 = List.of( + getLongFromDouble(0.0), + getLongFromDouble(10.0), + getLongFromDouble(20.0), + getLongFromDouble(30.0), + getLongFromDouble(40.0) + ); + List metricsWithField21 = List.of(0, 1, 2, 3, 4); + List metricsList2 = List.of(0L, 1L, 2L, 3L, 4L); + List metricsWithField2 = List.of(0, 1, 2, 3, 4); + + compositeField = getStarTreeFieldWithKeywords(); + StarTreeValues starTreeValues = getStarTreeValuesWithKeywords( + getSortedSetMock(dimList, docsWithField), + getSortedSetMock(dimList2, docsWithField2), + getSortedNumericMock(metricsList, metricsWithField), + getSortedNumericMock(metricsList1, metricsWithField1), + compositeField, + "6" + ); + + StarTreeValues starTreeValues2 = getStarTreeValuesWithKeywords( + getSortedSetMock(dimList3, docsWithField3), + getSortedSetMock(dimList4, docsWithField4), + getSortedNumericMock(metricsList2, metricsWithField2), + getSortedNumericMock(metricsList21, metricsWithField21), + compositeField, + "4" + ); + this.docValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( + writeState, + Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, + Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, + Composite912DocValuesFormat.META_DOC_VALUES_CODEC, + Composite912DocValuesFormat.META_DOC_VALUES_EXTENSION + ); + builder = getStarTreeBuilder(metaOut, dataOut, compositeField, getWriteState(4, writeState.segmentInfo.getId()), mapperService); + // Initialize the mock MergeState within the method + + Iterator starTreeDocumentIterator = builder.mergeStarTrees(List.of(starTreeValues, starTreeValues2)); + /** + [0, 0] | [5, 50.0] + [1, 1] | [7, 70.0] + [2, 2] | [9, 90.0] + [3, 3] | [11, 110.0] + [4, 4] | [4, 40.0] + [5, 5] | [5, 50.0] + */ + int count = 0; + builder.appendDocumentsToStarTree(starTreeDocumentIterator); + for (StarTreeDocument starTreeDocument : builder.getStarTreeDocuments()) { + count++; + if (count <= 4) { + assertEquals(starTreeDocument.dimensions[0] * 2, (long) starTreeDocument.metrics[0], 0); + assertEquals(starTreeDocument.dimensions[0] * 20.0, (double) starTreeDocument.metrics[1], 0); + } else { + assertEquals(starTreeDocument.dimensions[0], (long) starTreeDocument.metrics[0], 0); + assertEquals(starTreeDocument.dimensions[0] * 10.0, (double) starTreeDocument.metrics[1], 0); + } + } + assertEquals(6, count); + builder.build(starTreeDocumentIterator, new AtomicInteger(), docValuesConsumer); + validateStarTree(builder.getRootNode(), 4, 10, builder.getStarTreeDocuments()); + metaOut.close(); + dataOut.close(); + docValuesConsumer.close(); + + StarTreeMetadata starTreeMetadata = getStarTreeMetadata( + getStarTreeDimensionNames(compositeField.getDimensionsOrder()), + 6, + compositeField.getStarTreeConfig().maxLeafDocs(), + 264 + ); + + validateStarTreeFileFormats( + builder.getRootNode(), + builder.getStarTreeDocuments().size(), + starTreeMetadata, + builder.getStarTreeDocuments() + ); + } + private StarTreeValues getStarTreeValuesWithDates( SortedNumericDocValues dimList, SortedNumericDocValues dimList2, @@ -1857,6 +1983,93 @@ private StarTreeValues getStarTreeValues( return starTreeValues; } + private StarTreeValues getStarTreeValuesWithKeywords( + SortedSetDocValues dimList, + SortedSetDocValues dimList2, + SortedNumericDocValues metricsList, + SortedNumericDocValues metricsList1, + StarTreeField sf, + String number + ) { + SortedSetDocValues d1sndv = dimList; + SortedSetDocValues d2sndv = dimList2; + SortedNumericDocValues m1sndv = metricsList; + Map> dimDocIdSetIterators = Map.of( + "field1", + () -> new SortedSetStarTreeValuesIterator(d1sndv), + "field3", + () -> new SortedSetStarTreeValuesIterator(d2sndv) + ); + + Map> metricDocIdSetIterators = new LinkedHashMap<>(); + metricDocIdSetIterators.put( + fullyQualifiedFieldNameForStarTreeMetricsDocValues( + sf.getName(), + "field2", + sf.getMetrics().get(0).getMetrics().get(0).getTypeName() + ), + () -> new SortedNumericStarTreeValuesIterator(metricsList) + ); + metricDocIdSetIterators.put( + fullyQualifiedFieldNameForStarTreeMetricsDocValues( + sf.getName(), + "field2", + sf.getMetrics().get(0).getMetrics().get(1).getTypeName() + ), + () -> new SortedNumericStarTreeValuesIterator(metricsList1) + ); + StarTreeValues starTreeValues = new StarTreeValues( + sf, + null, + dimDocIdSetIterators, + metricDocIdSetIterators, + Map.of(CompositeIndexConstants.SEGMENT_DOCS_COUNT, number), + null + ); + return starTreeValues; + } + + private StarTreeValues getStarTreeValuesWithKeywords( + SortedSetDocValues dimList, + SortedSetDocValues dimList2, + SortedSetDocValues dimList4, + SortedSetDocValues dimList3, + SortedNumericDocValues metricsList, + SortedNumericDocValues metricsList1, + StarTreeField sf, + String number + ) { + Map> dimDocIdSetIterators = Map.of( + "field1_minute", + () -> new SortedSetStarTreeValuesIterator(dimList), + "field1_half-hour", + () -> new SortedSetStarTreeValuesIterator(dimList4), + "field1_hour", + () -> new SortedSetStarTreeValuesIterator(dimList2), + "field3", + () -> new SortedSetStarTreeValuesIterator(dimList3) + ); + Map> metricDocIdSetIterators = new LinkedHashMap<>(); + + metricDocIdSetIterators.put( + fullyQualifiedFieldNameForStarTreeMetricsDocValues( + sf.getName(), + "field2", + sf.getMetrics().get(0).getMetrics().get(0).getTypeName() + ), + () -> new SortedNumericStarTreeValuesIterator(metricsList) + ); + metricDocIdSetIterators.put( + fullyQualifiedFieldNameForStarTreeMetricsDocValues( + sf.getName(), + "field2", + sf.getMetrics().get(0).getMetrics().get(1).getTypeName() + ), + () -> new SortedNumericStarTreeValuesIterator(metricsList1) + ); + return new StarTreeValues(sf, null, dimDocIdSetIterators, metricDocIdSetIterators, Map.of(SEGMENT_DOCS_COUNT, number), null); + } + private StarTreeValues getStarTreeValues( List dimList1, List docsWithField1, diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderTestCase.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderTestCase.java index 6733cac12f657..9c9beaea4f52c 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderTestCase.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderTestCase.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.MergeState; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.VectorEncoding; @@ -31,6 +32,7 @@ import org.opensearch.index.compositeindex.datacube.DataCubeDateTimeUnit; import org.opensearch.index.compositeindex.datacube.DateDimension; import org.opensearch.index.compositeindex.datacube.Dimension; +import org.opensearch.index.compositeindex.datacube.KeywordDimension; import org.opensearch.index.compositeindex.datacube.Metric; import org.opensearch.index.compositeindex.datacube.MetricStat; import org.opensearch.index.compositeindex.datacube.NumericDimension; @@ -60,6 +62,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -87,6 +90,7 @@ public abstract class StarTreeBuilderTestCase extends OpenSearchTestCase { protected String dataFileName; protected String metaFileName; protected List dimensionsOrder; + protected MergeState mergeState; public StarTreeBuilderTestCase(StarTreeFieldConfiguration.StarTreeBuildMode buildMode) { this.buildMode = buildMode; @@ -155,6 +159,8 @@ public void setup() throws IOException { } writeState = getWriteState(5, UUID.randomUUID().toString().substring(0, 16).getBytes(StandardCharsets.UTF_8)); + mergeState = new MergeState(null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, false); + dataFileName = IndexFileNames.segmentFileName( writeState.segmentInfo.name, writeState.segmentSuffix, @@ -240,7 +246,7 @@ SegmentWriteState getWriteState(int numDocs, byte[] id) { return BuilderTestsUtils.getWriteState(numDocs, id, fieldsInfo, directory); } - SegmentReadState getReadState(int numDocs, List dimensionFields, List metrics) { + SegmentReadState getReadState(int numDocs, Map dimensionFields, List metrics) { return BuilderTestsUtils.getReadState(numDocs, dimensionFields, metrics, compositeField, writeState, directory); } @@ -248,10 +254,12 @@ protected Map getAttributes(int numSegmentDocs) { return Map.of(CompositeIndexConstants.SEGMENT_DOCS_COUNT, String.valueOf(numSegmentDocs)); } - protected List getStarTreeDimensionNames(List dimensionsOrder) { - List dimensionNames = new ArrayList<>(); + protected LinkedHashMap getStarTreeDimensionNames(List dimensionsOrder) { + LinkedHashMap dimensionNames = new LinkedHashMap<>(); for (Dimension dimension : dimensionsOrder) { - dimensionNames.addAll(dimension.getSubDimensionNames()); + for (String dimensionName : dimension.getSubDimensionNames()) { + dimensionNames.put(dimensionName, dimension.getDocValuesType()); + } } return dimensionNames; } @@ -320,7 +328,12 @@ protected long getLongFromDouble(double value) { return NumericUtils.doubleToSortableLong(value); } - protected StarTreeMetadata getStarTreeMetadata(List fields, int segmentAggregatedDocCount, int maxLeafDocs, int dataLength) { + protected StarTreeMetadata getStarTreeMetadata( + LinkedHashMap fields, + int segmentAggregatedDocCount, + int maxLeafDocs, + int dataLength + ) { return new StarTreeMetadata( "sf", STAR_TREE, @@ -339,6 +352,17 @@ protected StarTreeMetadata getStarTreeMetadata(List fields, int segmentA ); } + protected StarTreeField getStarTreeFieldWithKeywords() { + Dimension d1 = new KeywordDimension("field1"); + Dimension d2 = new KeywordDimension("field3"); + Metric m1 = new Metric("field2", List.of(MetricStat.VALUE_COUNT, MetricStat.SUM)); + List dims = List.of(d1, d2); + List metrics = List.of(m1); + StarTreeFieldConfiguration c = new StarTreeFieldConfiguration(10, new HashSet<>(), getBuildMode()); + StarTreeField sf = new StarTreeField("sf", dims, metrics, c); + return sf; + } + protected StarTreeField getStarTreeFieldWithDateDimension() { List intervals = new ArrayList<>(); intervals.add(new DateTimeUnitAdapter(Rounding.DateTimeUnit.MINUTES_OF_HOUR)); diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/fileformats/meta/StarTreeMetadataTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/fileformats/meta/StarTreeMetadataTests.java index c8636426449ad..cc91d69be97c1 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/fileformats/meta/StarTreeMetadataTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/fileformats/meta/StarTreeMetadataTests.java @@ -42,6 +42,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.UUID; @@ -181,8 +182,11 @@ public void test_starTreeMetadata() throws IOException { assertEquals(starTreeMetadata.getNumberOfNodes(), numberOfNodes); assertNotNull(starTreeMetadata); - for (int i = 0; i < dimensionsOrder.size(); i++) { - assertEquals(dimensionsOrder.get(i).getField(), starTreeMetadata.getDimensionFields().get(i)); + assertEquals(dimensionsOrder.size(), starTreeMetadata.dimensionFieldsToDocValuesMap.size()); + int k = 0; + for (Map.Entry entry : starTreeMetadata.dimensionFieldsToDocValuesMap.entrySet()) { + assertEquals(dimensionsOrder.get(k).getField(), entry.getKey()); + k++; } assertEquals(starTreeField.getMetrics().size(), starTreeMetadata.getMetrics().size()); diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/utils/StarTreeUtilsTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/utils/StarTreeUtilsTests.java index 9cca0b04e9ea4..7e438c18d9ab9 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/utils/StarTreeUtilsTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/utils/StarTreeUtilsTests.java @@ -17,6 +17,7 @@ import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.UUID; @@ -43,7 +44,7 @@ public void testFullyQualifiedFieldNameForStarTreeMetricsDocValues() { public void testGetFieldInfoList() { List fieldNames = Arrays.asList("field1", "field2", "field3"); - FieldInfo[] actualFieldInfos = StarTreeUtils.getFieldInfoList(fieldNames); + FieldInfo[] actualFieldInfos = StarTreeUtils.getFieldInfoList(fieldNames, new HashMap<>()); for (int i = 0; i < fieldNames.size(); i++) { assertFieldInfos(actualFieldInfos[i], fieldNames.get(i), i); } diff --git a/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java index c35cf3fc1e591..77534b514a59a 100644 --- a/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java @@ -540,9 +540,14 @@ public void testCompositeFields() throws Exception { .endObject() .toString(); + Settings settings = Settings.builder() + .put(StarTreeIndexSettings.IS_COMPOSITE_INDEX_SETTING.getKey(), true) + .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(512, ByteSizeUnit.MB)) + .build(); + IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, - () -> createIndex("invalid").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)) + () -> createIndex("invalid", settings).mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)) ); assertEquals( "star tree index is under an experimental feature and can be activated only by enabling opensearch.experimental.feature.composite_index.star_tree.enabled feature flag in the JVM options", @@ -552,10 +557,6 @@ public void testCompositeFields() throws Exception { final Settings starTreeEnabledSettings = Settings.builder().put(STAR_TREE_INDEX, "true").build(); FeatureFlags.initializeFeatureFlags(starTreeEnabledSettings); - Settings settings = Settings.builder() - .put(StarTreeIndexSettings.IS_COMPOSITE_INDEX_SETTING.getKey(), true) - .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(512, ByteSizeUnit.MB)) - .build(); DocumentMapper documentMapper = createIndex("test", settings).mapperService() .documentMapperParser() .parse("tweet", new CompressedXContent(mapping)); diff --git a/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java index aac460bd5e332..8ec34b3eb660c 100644 --- a/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java @@ -672,6 +672,9 @@ private XContentBuilder getExpandedMappingWithJustAvg(String dim, String metric) b.startObject("size"); b.field("type", "integer"); b.endObject(); + b.startObject("keyword1"); + b.field("type", "keyword"); + b.endObject(); b.endObject(); }); } @@ -718,6 +721,7 @@ private XContentBuilder getMappingWithDuplicateFields(boolean isDuplicateDim, bo .field("type", "integer") .field("doc_values", true) .endObject() + .endObject() .endObject(); } catch (IOException e) { @@ -772,6 +776,9 @@ private XContentBuilder getExpandedMappingWithJustSum(String dim, String metric) b.startObject("size"); b.field("type", "integer"); b.endObject(); + b.startObject("keyword1"); + b.field("type", "keyword"); + b.endObject(); b.endObject(); }); } @@ -823,6 +830,9 @@ private XContentBuilder getExpandedMappingWithSumAndCount(String dim, String met b.startObject("size"); b.field("type", "integer"); b.endObject(); + b.startObject("keyword1"); + b.field("type", "keyword"); + b.endObject(); b.endObject(); }); } @@ -866,6 +876,9 @@ private XContentBuilder getMinMappingWithDateDims(boolean calendarIntervalsExcee b.startObject(); b.field("name", "metric_field"); b.endObject(); + b.startObject(); + b.field("name", "keyword1"); + b.endObject(); } b.endArray(); @@ -895,6 +908,9 @@ private XContentBuilder getMinMappingWithDateDims(boolean calendarIntervalsExcee b.startObject("metric_field"); b.field("type", "integer"); b.endObject(); + b.startObject("keyword1"); + b.field("type", "keyword"); + b.endObject(); b.endObject(); }); @@ -920,6 +936,9 @@ private XContentBuilder getMinMapping( b.startObject(); b.field("name", "status"); b.endObject(); + b.startObject(); + b.field("name", "keyword1"); + b.endObject(); b.endArray(); } if (!isEmptyMetrics) { @@ -951,6 +970,9 @@ private XContentBuilder getMinMapping( b.field("type", "integer"); b.endObject(); } + b.startObject("keyword1"); + b.field("type", "keyword"); + b.endObject(); b.endObject(); }); } @@ -1018,7 +1040,9 @@ private XContentBuilder getMinMappingWith2StarTrees() throws IOException { b.startObject("metric_field"); b.field("type", "integer"); b.endObject(); - + b.startObject("keyword1"); + b.field("type", "keyword"); + b.endObject(); b.endObject(); }); } @@ -1058,6 +1082,9 @@ private XContentBuilder getInvalidMapping( b.startObject(); b.field("name", "status"); b.endObject(); + b.startObject(); + b.field("name", "keyword1"); + b.endObject(); } b.endArray(); b.startArray("metrics"); @@ -1090,7 +1117,7 @@ private XContentBuilder getInvalidMapping( if (!invalidDimType) { b.field("type", "integer"); } else { - b.field("type", "keyword"); + b.field("type", "ip"); } b.endObject(); b.startObject("metric_field"); @@ -1100,6 +1127,9 @@ private XContentBuilder getInvalidMapping( b.field("type", "integer"); } b.endObject(); + b.startObject("keyword1"); + b.field("type", "keyword"); + b.endObject(); b.endObject(); }); } @@ -1132,6 +1162,9 @@ private XContentBuilder getInvalidMappingWithDv( b.startObject(); b.field("name", "status"); b.endObject(); + b.startObject(); + b.field("name", "keyword1"); + b.endObject(); } b.endArray(); b.startArray("metrics"); @@ -1168,6 +1201,9 @@ private XContentBuilder getInvalidMappingWithDv( b.field("doc_values", "true"); } b.endObject(); + b.startObject("keyword1"); + b.field("type", "keyword"); + b.endObject(); b.endObject(); }); } @@ -1224,6 +1260,9 @@ public void testEmptyName() { b.startObject("status"); b.field("type", "integer"); b.endObject(); + b.startObject("keyword1"); + b.field("type", "keyword"); + b.endObject(); b.endObject(); }))); assertThat(e.getMessage(), containsString("name cannot be empty string")); diff --git a/server/src/test/java/org/opensearch/search/SearchServiceStarTreeTests.java b/server/src/test/java/org/opensearch/search/SearchServiceStarTreeTests.java index 0c88154ca2b38..3b32e9e4ac6b7 100644 --- a/server/src/test/java/org/opensearch/search/SearchServiceStarTreeTests.java +++ b/server/src/test/java/org/opensearch/search/SearchServiceStarTreeTests.java @@ -17,7 +17,6 @@ import org.opensearch.core.common.Strings; import org.opensearch.index.IndexService; import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; -import org.opensearch.index.codec.composite912.datacube.startree.StarTreeDocValuesFormatTests; import org.opensearch.index.compositeindex.CompositeIndexSettings; import org.opensearch.index.compositeindex.datacube.startree.StarTreeIndexSettings; import org.opensearch.index.mapper.CompositeMappedFieldType; @@ -26,6 +25,7 @@ import org.opensearch.index.shard.IndexShard; import org.opensearch.indices.IndicesService; import org.opensearch.search.aggregations.AggregationBuilders; +import org.opensearch.search.aggregations.startree.StarTreeFilterTests; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.internal.AliasFilter; import org.opensearch.search.internal.ReaderContext; @@ -55,7 +55,7 @@ public void testParseQueryToOriginalOrStarTreeQuery() throws IOException { .indices() .prepareCreate("test") .setSettings(settings) - .setMapping(StarTreeDocValuesFormatTests.getExpandedMapping()); + .setMapping(StarTreeFilterTests.getExpandedMapping(1, false)); createIndex("test", builder); IndicesService indicesService = getInstanceFromNode(IndicesService.class); diff --git a/server/src/test/java/org/opensearch/search/aggregations/startree/MetricAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/startree/MetricAggregatorTests.java index 0327bd9990784..12e83cbbadd5d 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/startree/MetricAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/startree/MetricAggregatorTests.java @@ -90,7 +90,7 @@ protected Codec getCodec() { final Logger testLogger = LogManager.getLogger(MetricAggregatorTests.class); MapperService mapperService; try { - mapperService = StarTreeDocValuesFormatTests.createMapperService(StarTreeDocValuesFormatTests.getExpandedMapping()); + mapperService = StarTreeDocValuesFormatTests.createMapperService(StarTreeFilterTests.getExpandedMapping(1, false)); } catch (IOException e) { throw new RuntimeException(e); } diff --git a/server/src/test/java/org/opensearch/search/aggregations/startree/StarTreeFilterTests.java b/server/src/test/java/org/opensearch/search/aggregations/startree/StarTreeFilterTests.java index f8eb71a40319a..b03cb5ac7bb9d 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/startree/StarTreeFilterTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/startree/StarTreeFilterTests.java @@ -47,7 +47,7 @@ import java.util.List; import java.util.Map; -import static org.opensearch.index.codec.composite912.datacube.startree.StarTreeDocValuesFormatTests.topMapping; +import static org.opensearch.index.codec.composite912.datacube.startree.AbstractStarTreeDVFormatTests.topMapping; public class StarTreeFilterTests extends AggregatorTestCase {