Skip to content

Commit

Permalink
Merge branch 'main' into CoordinatorStats
Browse files Browse the repository at this point in the history
Signed-off-by: sahil buddharaju <[email protected]>
  • Loading branch information
sahil buddharaju committed Aug 10, 2023
2 parents 84ab313 + 7278f43 commit 18aeaa8
Show file tree
Hide file tree
Showing 537 changed files with 2,965 additions and 2,514 deletions.
5 changes: 3 additions & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),

### Dependencies
- Bump `org.apache.logging.log4j:log4j-core` from 2.17.1 to 2.20.0 ([#8307](https://github.com/opensearch-project/OpenSearch/pull/8307))
- Bump `io.grpc:grpc-context` from 1.46.0 to 1.56.1 ([#8726](https://github.com/opensearch-project/OpenSearch/pull/8726))
- Bump `io.grpc:grpc-context` from 1.46.0 to 1.57.1 ([#8726](https://github.com/opensearch-project/OpenSearch/pull/8726), [#9145](https://github.com/opensearch-project/OpenSearch/pull/9145))
- Bump `com.netflix.nebula:gradle-info-plugin` from 12.1.5 to 12.1.6 ([#8724](https://github.com/opensearch-project/OpenSearch/pull/8724))
- Bump `commons-codec:commons-codec` from 1.15 to 1.16.0 ([#8725](https://github.com/opensearch-project/OpenSearch/pull/8725))
- Bump `org.apache.zookeeper:zookeeper` from 3.8.1 to 3.9.0 ([#8844](https://github.com/opensearch-project/OpenSearch/pull/8844), [#9146](https://github.com/opensearch-project/OpenSearch/pull/9146))
Expand Down Expand Up @@ -118,6 +118,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Add support for aggregation profiler with concurrent aggregation ([#8801](https://github.com/opensearch-project/OpenSearch/pull/8801))
- [Remove] Deprecated Fractional ByteSizeValue support #9005 ([#9005](https://github.com/opensearch-project/OpenSearch/pull/9005))
- Make MultiBucketConsumerService thread safe to use across slices during search ([#9047](https://github.com/opensearch-project/OpenSearch/pull/9047))
- Change shard_size and shard_min_doc_count evaluation to happen in shard level reduce phase ([#9085](https://github.com/opensearch-project/OpenSearch/pull/9085))

### Deprecated

Expand All @@ -129,4 +130,4 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
### Security

[Unreleased 3.0]: https://github.com/opensearch-project/OpenSearch/compare/2.x...HEAD
[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.10...2.x
[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.10...2.x
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@
import org.opensearch.search.aggregations.InternalAggregations;
import org.opensearch.search.aggregations.MultiBucketConsumerService;
import org.opensearch.search.aggregations.bucket.terms.StringTerms;
import org.opensearch.search.aggregations.bucket.terms.TermsAggregator;
import org.opensearch.search.aggregations.pipeline.PipelineAggregator;
import org.opensearch.search.builder.SearchSourceBuilder;
import org.opensearch.search.query.QuerySearchResult;
Expand Down Expand Up @@ -170,15 +171,14 @@ private StringTerms newTerms(Random rand, BytesRef[] dict, boolean withNested) {
"terms",
BucketOrder.key(true),
BucketOrder.count(false),
topNSize,
1,
Collections.emptyMap(),
DocValueFormat.RAW,
numShards,
true,
0,
buckets,
0
0,
new TermsAggregator.BucketCountThresholds(1, 0, topNSize, numShards)
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.opensearch.search.aggregations.bucket.terms.TermsAggregator;

import java.util.ArrayList;
import java.util.List;
Expand Down Expand Up @@ -86,15 +87,14 @@ private StringTerms newTerms(boolean withNested) {
"test",
BucketOrder.key(true),
BucketOrder.key(true),
buckets,
1,
null,
DocValueFormat.RAW,
buckets,
false,
100000,
resultBuckets,
0
0,
new TermsAggregator.BucketCountThresholds(1, 0, buckets, buckets)
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,9 +78,9 @@
import org.opensearch.common.unit.TimeValue;
import org.opensearch.core.common.util.CollectionUtils;
import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.DeprecationHandler;
import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContent;
Expand Down Expand Up @@ -119,7 +119,7 @@
* @opensearch.api
*/
final class RequestConverters {
static final XContentType REQUEST_BODY_CONTENT_TYPE = XContentType.JSON;
static final MediaType REQUEST_BODY_CONTENT_TYPE = MediaTypeRegistry.JSON;

private RequestConverters() {
// Contains only status utility methods
Expand Down Expand Up @@ -177,7 +177,7 @@ static Request bulk(BulkRequest bulkRequest) throws IOException {
}

if (bulkContentType == null) {
bulkContentType = XContentType.JSON;
bulkContentType = MediaTypeRegistry.JSON;
}

final byte separator = bulkContentType.xContent().streamSeparator();
Expand Down Expand Up @@ -266,7 +266,12 @@ static Request bulk(BulkRequest bulkRequest) throws IOException {
}
}
} else if (opType == DocWriteRequest.OpType.UPDATE) {
source = XContentHelper.toXContent((UpdateRequest) action, bulkContentType, ToXContent.EMPTY_PARAMS, false).toBytesRef();
source = org.opensearch.core.xcontent.XContentHelper.toXContent(
(UpdateRequest) action,
bulkContentType,
ToXContent.EMPTY_PARAMS,
false
).toBytesRef();
}

if (source != null) {
Expand Down Expand Up @@ -821,7 +826,8 @@ static HttpEntity createEntity(ToXContent toXContent, MediaType mediaType) throw
}

static HttpEntity createEntity(ToXContent toXContent, MediaType mediaType, ToXContent.Params toXContentParams) throws IOException {
BytesRef source = XContentHelper.toXContent(toXContent, mediaType, toXContentParams, false).toBytesRef();
BytesRef source = org.opensearch.core.xcontent.XContentHelper.toXContent(toXContent, mediaType, toXContentParams, false)
.toBytesRef();
return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(mediaType));
}

Expand Down Expand Up @@ -868,12 +874,12 @@ static String endpoint(String[] indices, String endpoint, String type) {
}

/**
* Returns a {@link ContentType} from a given {@link XContentType}.
* Returns a {@link ContentType} from a given {@link MediaType}.
*
* @param mediaType the {@link MediaType}
* @return the {@link ContentType}
*/
@SuppressForbidden(reason = "Only allowed place to convert a XContentType to a ContentType")
@SuppressForbidden(reason = "Only allowed place to convert a MediaType to a ContentType")
public static ContentType createContentType(final MediaType mediaType) {
return ContentType.create(mediaType.mediaTypeWithoutParameters(), (Charset) null);
}
Expand Down Expand Up @@ -1252,7 +1258,7 @@ Params withWaitForEvents(Priority waitForEvents) {
*/
static MediaType enforceSameContentType(IndexRequest indexRequest, @Nullable MediaType mediaType) {
MediaType requestContentType = indexRequest.getContentType();
if (requestContentType != XContentType.JSON && requestContentType != XContentType.SMILE) {
if (requestContentType != MediaTypeRegistry.JSON && requestContentType != MediaTypeRegistry.fromFormat("smile")) {
throw new IllegalArgumentException(
"Unsupported content-type found for request with content-type ["
+ requestContentType
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,11 @@
import org.opensearch.core.ParseField;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.xcontent.ConstructingObjectParser;
import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ObjectParser;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.index.mapper.Mapper;

import java.io.IOException;
Expand Down Expand Up @@ -150,7 +150,7 @@ public String fullName() {
* Returns the mappings as a map. Note that the returned map has a single key which is always the field's {@link Mapper#name}.
*/
public Map<String, Object> sourceAsMap() {
return XContentHelper.convertToMap(source, true, XContentType.JSON).v2();
return XContentHelper.convertToMap(source, true, MediaTypeRegistry.JSON).v2();
}

// pkg-private for testing
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -217,10 +217,10 @@ public Settings settings() {
* Adds mapping that will be added when the index gets created.
*
* @param source The mapping source
* @param xContentType The type of content contained within the source
* @param mediaType The type of content contained within the source
*/
public PutIndexTemplateRequest mapping(String source, XContentType xContentType) {
internalMapping(XContentHelper.convertToMap(new BytesArray(source), true, xContentType).v2());
public PutIndexTemplateRequest mapping(String source, MediaType mediaType) {
internalMapping(XContentHelper.convertToMap(new BytesArray(source), true, mediaType).v2());
return this;
}

Expand Down Expand Up @@ -268,7 +268,7 @@ public PutIndexTemplateRequest mapping(Map<String, Object> source) {

private PutIndexTemplateRequest internalMapping(Map<String, Object> source) {
try {
XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON);
XContentBuilder builder = MediaTypeRegistry.JSON.contentBuilder();
builder.map(source);
MediaType mediaType = builder.contentType();
Objects.requireNonNull(mediaType);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,10 @@
package org.opensearch.client.slm;

import org.opensearch.common.Nullable;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.ParseField;
import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.ConstructingObjectParser;
import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContentObject;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.core.xcontent.XContentParser;
Expand Down Expand Up @@ -169,6 +169,6 @@ public boolean equals(Object obj) {

@Override
public String toString() {
return Strings.toString(XContentType.JSON, this);
return Strings.toString(MediaTypeRegistry.JSON, this);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,10 @@
package org.opensearch.client.slm;

import org.opensearch.common.Nullable;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.ParseField;
import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.ConstructingObjectParser;
import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContentObject;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.core.xcontent.XContentParser;
Expand Down Expand Up @@ -289,7 +289,7 @@ public boolean equals(Object obj) {

@Override
public String toString() {
return Strings.toString(XContentType.JSON, this);
return Strings.toString(MediaTypeRegistry.JSON, this);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,10 @@
package org.opensearch.client.slm;

import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.ParseField;
import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.ConstructingObjectParser;
import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContentFragment;
import org.opensearch.core.xcontent.ToXContentObject;
import org.opensearch.core.xcontent.XContentBuilder;
Expand Down Expand Up @@ -188,7 +188,7 @@ public boolean equals(Object obj) {

@Override
public String toString() {
return Strings.toString(XContentType.JSON, this);
return Strings.toString(MediaTypeRegistry.JSON, this);
}

public static class SnapshotPolicyStats implements ToXContentFragment {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,10 @@

import org.opensearch.common.Nullable;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.ParseField;
import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.ConstructingObjectParser;
import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContentObject;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.core.xcontent.XContentParser;
Expand Down Expand Up @@ -151,6 +151,6 @@ public boolean equals(Object obj) {

@Override
public String toString() {
return Strings.toString(XContentType.JSON, this);
return Strings.toString(MediaTypeRegistry.JSON, this);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.

@defaultMessage Use Request#createContentType(XContentType) to be sure to pass the right MIME type
@defaultMessage Use Request#createContentType(MediaType) to be sure to pass the right MIME type
org.apache.hc.core5.http.ContentType#create(java.lang.String)
org.apache.hc.core5.http.ContentType#create(java.lang.String,java.lang.String)
org.apache.hc.core5.http.ContentType#create(java.lang.String,java.nio.charset.Charset)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,12 +42,12 @@
import org.opensearch.action.get.MultiGetResponse;
import org.opensearch.action.index.IndexRequest;
import org.opensearch.action.search.SearchRequest;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.search.SearchHit;
import org.hamcrest.Matcher;

Expand Down Expand Up @@ -277,12 +277,12 @@ public void testBulkProcessorConcurrentRequestsReadOnlyIndex() throws Exception
// let's make sure we get at least 1 item in the MultiGetRequest regardless of the randomising roulette
if (randomBoolean() || multiGetRequest.getItems().size() == 0) {
testDocs++;
processor.add(new IndexRequest("test").id(Integer.toString(testDocs)).source(XContentType.JSON, "field", "value"));
processor.add(new IndexRequest("test").id(Integer.toString(testDocs)).source(MediaTypeRegistry.JSON, "field", "value"));
multiGetRequest.add("test", Integer.toString(testDocs));
} else {
testReadOnlyDocs++;
processor.add(
new IndexRequest("test-ro").id(Integer.toString(testReadOnlyDocs)).source(XContentType.JSON, "field", "value")
new IndexRequest("test-ro").id(Integer.toString(testReadOnlyDocs)).source(MediaTypeRegistry.JSON, "field", "value")
);
}
}
Expand Down Expand Up @@ -333,9 +333,9 @@ public void testGlobalParametersAndSingleRequest() throws Exception {


processor.add(new IndexRequest() // <1>
.source(XContentType.JSON, "user", "some user"));
.source(MediaTypeRegistry.JSON, "user", "some user"));
processor.add(new IndexRequest("blogs").id("1") // <2>
.source(XContentType.JSON, "title", "some title"));
.source(MediaTypeRegistry.JSON, "title", "some title"));
}
// end::bulk-processor-mix-parameters
latch.await();
Expand Down Expand Up @@ -399,11 +399,11 @@ private MultiGetRequest indexDocs(BulkProcessor processor, int numDocs, String l
if (randomBoolean()) {
processor.add(
new IndexRequest(localIndex).id(Integer.toString(i))
.source(XContentType.JSON, "field", randomRealisticUnicodeOfLengthBetween(1, 30))
.source(MediaTypeRegistry.JSON, "field", randomRealisticUnicodeOfLengthBetween(1, 30))
);
} else {
BytesArray data = bytesBulkRequest(localIndex, i);
processor.add(data, globalIndex, globalPipeline, XContentType.JSON);
processor.add(data, globalIndex, globalPipeline, MediaTypeRegistry.JSON);
}
multiGetRequest.add(localIndex, Integer.toString(i));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,8 @@
import org.opensearch.action.get.MultiGetRequest;
import org.opensearch.action.index.IndexRequest;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.rest.RestStatus;
import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.transport.RemoteTransportException;

import java.util.Collections;
Expand Down Expand Up @@ -170,7 +170,7 @@ private static MultiGetRequest indexDocs(BulkProcessor processor, int numDocs) {
for (int i = 1; i <= numDocs; i++) {
processor.add(
new IndexRequest(INDEX_NAME).id(Integer.toString(i))
.source(XContentType.JSON, "field", randomRealisticUnicodeOfCodepointLengthBetween(1, 30))
.source(MediaTypeRegistry.JSON, "field", randomRealisticUnicodeOfCodepointLengthBetween(1, 30))
);
multiGetRequest.add(INDEX_NAME, Integer.toString(i));
}
Expand Down
Loading

0 comments on commit 18aeaa8

Please sign in to comment.