From ab55033ea945b38be318704c47e5e96de41a2bd4 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jan=20H=C3=B8ydahl?=
Date: Wed, 17 Dec 2025 00:43:29 +0100
Subject: [PATCH 1/2] BulkIngester helper (#1809)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
* Add `BulkIngester` helper, ported from elastic java-client
Signed-off-by: Jan Høydahl
* Update changelog with pr number
Signed-off-by: Jan Høydahl
* Rename variable in BackoffPolicy
Signed-off-by: Jan Høydahl
* Add javadoc to classes and public methods, where missing
Signed-off-by: Jan Høydahl
* Fix typo, add better comments for porting differences
Signed-off-by: Jan Høydahl
* Proper size estimation
Signed-off-by: Jan Høydahl
* toJsonString() method on JsonpUtils
Signed-off-by: Jan Høydahl
* Remove deprecated method flushInterval
Signed-off-by: Jan Høydahl
* Add BulkHelper sample
Documentation in bulk.md
Signed-off-by: Jan Høydahl
* Review comment try-with-resources
Signed-off-by: Jan Høydahl
* Rename BulkHelper sample to BulkIngesterBasics
Signed-off-by: Jan Høydahl
* Move toJsonString helper to the test that uses it
Signed-off-by: Jan Høydahl
* BinaryData not implement JsonpSerializable
Signed-off-by: Jan Høydahl
* Fix broken link in bulk.md
Signed-off-by: Jan Høydahl
* Tidy
Signed-off-by: Jan Høydahl
* Revert "BinaryData not implement JsonpSerializable"
This reverts commit fc777e3c42166e01e746d276dba9dba7fe291af1.
Signed-off-by: Jan Høydahl
---------
Signed-off-by: Jan Høydahl
---
CHANGELOG.md | 1 +
guides/bulk.md | 37 +
.../opensearch/client/json/JsonpUtils.java | 60 ++
.../_helpers/bulk/BulkIngester.java | 824 ++++++++++++++++++
.../_helpers/bulk/BulkListener.java | 77 ++
.../opensearch/_helpers/bulk/FnCondition.java | 194 +++++
.../_helpers/bulk/IngesterOperation.java | 271 ++++++
.../_helpers/bulk/RetryableBulkOperation.java | 133 +++
.../client/transport/BackoffPolicy.java | 342 ++++++++
.../opensearch/client/util/BinaryData.java | 136 +++
.../client/util/ByteArrayBinaryData.java | 165 ++++
.../opensearch/client/util/ContentType.java | 43 +
.../util/NoCopyByteArrayOutputStream.java | 79 ++
.../_helpers/bulk/BulkIngesterTest.java | 583 +++++++++++++
.../integTest/AbstractBulkIngesterIT.java | 139 +++
.../integTest/httpclient5/BulkIngesterIT.java | 13 +
.../client/samples/BulkIngesterBasics.java | 108 +++
17 files changed, 3205 insertions(+)
create mode 100644 java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/BulkIngester.java
create mode 100644 java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/BulkListener.java
create mode 100644 java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/FnCondition.java
create mode 100644 java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/IngesterOperation.java
create mode 100644 java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/RetryableBulkOperation.java
create mode 100644 java-client/src/main/java/org/opensearch/client/transport/BackoffPolicy.java
create mode 100644 java-client/src/main/java/org/opensearch/client/util/BinaryData.java
create mode 100644 java-client/src/main/java/org/opensearch/client/util/ByteArrayBinaryData.java
create mode 100644 java-client/src/main/java/org/opensearch/client/util/ContentType.java
create mode 100644 java-client/src/main/java/org/opensearch/client/util/NoCopyByteArrayOutputStream.java
create mode 100644 java-client/src/test/java/org/opensearch/client/opensearch/_helpers/bulk/BulkIngesterTest.java
create mode 100644 java-client/src/test/java11/org/opensearch/client/opensearch/integTest/AbstractBulkIngesterIT.java
create mode 100644 java-client/src/test/java11/org/opensearch/client/opensearch/integTest/httpclient5/BulkIngesterIT.java
create mode 100644 samples/src/main/java/org/opensearch/client/samples/BulkIngesterBasics.java
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 918ee17014..412e7caf71 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -17,6 +17,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
## [Unreleased 3.x]
### Added
+- Added BulkIngester helper for efficient bulk operations with buffering, retries, and backpressure. Ported from elasticsearch-java (commit e7120d4) ([#1809](https://github.com/opensearch-project/opensearch-java/pull/1809))
### Dependencies
diff --git a/guides/bulk.md b/guides/bulk.md
index 3c12c2ddc7..8bc5707415 100644
--- a/guides/bulk.md
+++ b/guides/bulk.md
@@ -8,6 +8,43 @@ The [Bulk API](https://opensearch.org/docs/latest/api-reference/document-apis/bu
## Bulk Indexing
+The `BulkIngester` is a helper class that simplifies bulk indexing by automatically buffering operations and flushing them to OpenSearch based on configurable thresholds. It provides:
+
+- Automatic flushing based on number of operations, total size in bytes, or time interval
+- Backpressure control to prevent overwhelming the cluster
+- Automatic retries with configurable backoff policies for failed operations
+- Thread-safe concurrent operation
+
+```java
+String indexName = "sample-index";
+
+// Create a BulkIngester with custom settings
+BulkIngester ingester = BulkIngester.of(b -> b
+ .client(client)
+ .maxOperations(1000) // Flush every 1000 operations
+ .flushInterval(5, TimeUnit.SECONDS) // Or every 5 seconds
+ .maxConcurrentRequests(2) // Allow 2 concurrent bulk requests
+);
+
+// Add operations - they are automatically buffered and flushed
+IndexData doc1 = new IndexData("Document 1", "The text of document 1");
+ingester.add(op -> op.index(i -> i.index(indexName).id("id1").document(doc1)));
+
+IndexData doc2 = new IndexData("Document 2", "The text of document 2");
+ingester.add(op -> op.index(i -> i.index(indexName).id("id2").document(doc2)));
+
+IndexData doc3 = new IndexData("Document 3", "The text of document 3");
+ingester.add(op -> op.index(i -> i.index(indexName).id("id3").document(doc3)));
+
+// Close the ingester - this flushes any remaining buffered operations
+ingester.close();
+```
+
+[IndexData](../samples/src/main/java/org/opensearch/client/samples/util/IndexData.java) refers to sample data class.
+
+You can find a working sample of the above code in [BulkIngesterBasics.java](../samples/src/main/java/org/opensearch/client/samples/BulkIngesterBasics.java).
+
+
## Bulk requests
```java
diff --git a/java-client/src/main/java/org/opensearch/client/json/JsonpUtils.java b/java-client/src/main/java/org/opensearch/client/json/JsonpUtils.java
index 8cf51df787..ef42f39401 100644
--- a/java-client/src/main/java/org/opensearch/client/json/JsonpUtils.java
+++ b/java-client/src/main/java/org/opensearch/client/json/JsonpUtils.java
@@ -272,4 +272,64 @@ public static void serializeIntOrNull(JsonGenerator generator, int value, int de
generator.write(value);
}
}
+
+ /**
+ * Copy the JSON value at the current parser location to a JSON generator.
+ */
+ public static void copy(JsonParser parser, JsonGenerator generator) {
+ copy(parser, generator, parser.next());
+ }
+
+ /**
+ * Copy the JSON value at the current parser location to a JSON generator.
+ */
+ public static void copy(JsonParser parser, JsonGenerator generator, JsonParser.Event event) {
+
+ switch (event) {
+ case START_OBJECT:
+ generator.writeStartObject();
+ while ((event = parser.next()) != Event.END_OBJECT) {
+ expectEvent(parser, Event.KEY_NAME, event);
+ generator.writeKey(parser.getString());
+ copy(parser, generator, parser.next());
+ }
+ generator.writeEnd();
+ break;
+
+ case START_ARRAY:
+ generator.writeStartArray();
+ while ((event = parser.next()) != Event.END_ARRAY) {
+ copy(parser, generator, event);
+ }
+ generator.writeEnd();
+ break;
+
+ case VALUE_STRING:
+ generator.write(parser.getString());
+ break;
+
+ case VALUE_FALSE:
+ generator.write(false);
+ break;
+
+ case VALUE_TRUE:
+ generator.write(true);
+ break;
+
+ case VALUE_NULL:
+ generator.writeNull();
+ break;
+
+ case VALUE_NUMBER:
+ if (parser.isIntegralNumber()) {
+ generator.write(parser.getLong());
+ } else {
+ generator.write(parser.getBigDecimal());
+ }
+ break;
+
+ default:
+ throw new UnexpectedJsonEventException(parser, event);
+ }
+ }
}
diff --git a/java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/BulkIngester.java b/java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/BulkIngester.java
new file mode 100644
index 0000000000..85310ca72b
--- /dev/null
+++ b/java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/BulkIngester.java
@@ -0,0 +1,824 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.client.opensearch._helpers.bulk;
+
+import java.io.IOException;
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.LongSummaryStatistics;
+import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.CompletionStage;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import javax.annotation.Nullable;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.opensearch.client.opensearch.OpenSearchAsyncClient;
+import org.opensearch.client.opensearch.OpenSearchClient;
+import org.opensearch.client.opensearch.core.BulkRequest;
+import org.opensearch.client.opensearch.core.BulkResponse;
+import org.opensearch.client.opensearch.core.bulk.BulkOperation;
+import org.opensearch.client.opensearch.core.bulk.BulkResponseItem;
+import org.opensearch.client.transport.BackoffPolicy;
+import org.opensearch.client.transport.TransportOptions;
+import org.opensearch.client.util.ApiTypeHelper;
+import org.opensearch.client.util.ObjectBuilder;
+
+/**
+ * A bulk ingester for efficiently indexing large volumes of documents to OpenSearch.
+ *
+ * The BulkIngester buffers bulk operations and automatically flushes them based on configurable thresholds:
+ *
+ * - Number of operations (maxOperations)
+ * - Total size in bytes (maxSize)
+ * - Time interval (flushInterval)
+ *
+ *
+ * It also provides:
+ *
+ * - Backpressure control via maxConcurrentRequests to prevent overwhelming the cluster
+ * - Automatic retries with configurable backoff policies for failed operations
+ * - Per-request context tracking via the optional Context type parameter
+ * - Event notifications through the {@link BulkListener} interface
+ *
+ *
+ * The ingester is thread-safe and can be used concurrently from multiple threads. It must be closed
+ * when no longer needed to flush any buffered operations and release resources.
+ *
+ * Example usage:
+ *
{@code
+ * BulkIngester ingester = BulkIngester.of(b -> b
+ * .client(client)
+ * .maxOperations(1000)
+ * .flushInterval(5, TimeUnit.SECONDS)
+ * );
+ *
+ * // Add operations
+ * ingester.add(op -> op.index(i -> i.index("my-index").id("1").document(myDoc)));
+ *
+ * // Close when done (flushes remaining operations)
+ * ingester.close();
+ * }
+ *
+ * @param optional context type to associate with each bulk operation
+ */
+public class BulkIngester implements AutoCloseable {
+
+ private static final Log logger = LogFactory.getLog(BulkIngester.class);
+
+ // Instance counter, to name the flush thread if we create one
+ private static final AtomicInteger idCounter = new AtomicInteger();
+
+ // Configuration
+ private final OpenSearchAsyncClient client;
+ private final @Nullable BulkRequest globalSettings;
+ private final int maxRequests;
+ private final long maxSize;
+ private final int maxOperations;
+ private final @Nullable BulkListener listener;
+ private final Long flushIntervalMillis;
+
+ private @Nullable ScheduledFuture> flushTask;
+ private @Nullable ScheduledExecutorService scheduler;
+ private @Nullable ScheduledExecutorService retryScheduler;
+ private boolean isExternalScheduler = false;
+ private BackoffPolicy backoffPolicy;
+
+ // Current state
+ private List> operations = new ArrayList<>();
+ private long currentSize;
+ private int requestsInFlightCount;
+ private volatile boolean isClosed = false;
+
+ // Synchronization objects
+ private final ReentrantLock lock = new ReentrantLock();
+ private final FnCondition addCondition = new FnCondition(lock, this::canAddOperation);
+ private final FnCondition sendRequestCondition = new FnCondition(lock, this::canSendRequest);
+ private final FnCondition closeCondition = new FnCondition(lock, this::closedAndFlushed);
+ private final AtomicInteger listenerInProgressCount = new AtomicInteger();
+ private final AtomicInteger retriesInProgressCount = new AtomicInteger();
+
+ private static class RequestExecution {
+ public final long id;
+ public final BulkRequest request;
+ public final List contexts;
+ public final CompletionStage futureResponse;
+
+ RequestExecution(long id, BulkRequest request, List contexts, CompletionStage futureResponse) {
+ this.id = id;
+ this.request = request;
+ this.contexts = contexts;
+ this.futureResponse = futureResponse;
+ }
+ }
+
+ private BulkIngester(Builder builder) {
+ int ingesterId = idCounter.incrementAndGet();
+ this.client = ApiTypeHelper.requireNonNull(builder.client, this, "client");
+ this.globalSettings = builder.globalSettings;
+ this.maxRequests = builder.maxConcurrentRequests;
+ this.maxSize = builder.bulkSize < 0 ? Long.MAX_VALUE : builder.bulkSize;
+ this.maxOperations = builder.bulkOperations < 0 ? Integer.MAX_VALUE : builder.bulkOperations;
+ this.listener = builder.listener;
+ this.backoffPolicy = builder.backoffPolicy;
+ this.flushIntervalMillis = builder.flushIntervalMillis;
+
+ if (flushIntervalMillis != null || listener != null) {
+ // Create a scheduler if needed
+ if (builder.scheduler == null) {
+ this.scheduler = Executors.newScheduledThreadPool(maxRequests + 1, (r) -> {
+ Thread t = Executors.defaultThreadFactory().newThread(r);
+ t.setName("bulk-ingester-executor#" + ingesterId + "#" + t.getId());
+ t.setDaemon(true);
+ return t;
+ });
+ } else {
+ // It's not ours, we will not close it.
+ this.scheduler = builder.scheduler;
+ this.isExternalScheduler = true;
+ }
+ }
+
+ if (flushIntervalMillis != null) {
+ long flushInterval = flushIntervalMillis;
+ this.flushTask = scheduler.scheduleWithFixedDelay(this::failsafeFlush, flushInterval, flushInterval, TimeUnit.MILLISECONDS);
+ }
+
+ if (backoffPolicy == null) {
+ backoffPolicy = BackoffPolicy.noBackoff();
+ }
+ // preparing a scheduler that will trigger flushes to retry failed requests
+ else {
+ retryScheduler = Executors.newScheduledThreadPool(maxRequests + 1, (r) -> {
+ Thread t = Executors.defaultThreadFactory().newThread(r);
+ t.setName("bulk-ingester-retry#" + ingesterId + "#" + t.getId());
+ t.setDaemon(true);
+ return t;
+ });
+ }
+ }
+
+ // ----- Getters
+
+ /**
+ * The configured max operations to buffer in a single bulk request.
+ */
+ public int maxOperations() {
+ return this.maxOperations;
+ }
+
+ /**
+ * The configured maximum size in bytes for a bulk request. Operations are added to the request until
+ * adding an operation leads the request to exceed this size.
+ */
+ public long maxSize() {
+ return this.maxSize;
+ }
+
+ /**
+ * The configured maximum number of concurrent request sent to OpenSearch.
+ */
+ public int maxConcurrentRequests() {
+ return this.maxRequests;
+ }
+
+ /**
+ * The configured flush period.
+ */
+ public Duration flushInterval() {
+ if (this.flushIntervalMillis != null) {
+ return Duration.ofMillis(flushIntervalMillis);
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * The number of operations that have been buffered, waiting to be sent.
+ */
+ public int pendingOperations() {
+ List> operations = this.operations;
+ return operations == null ? 0 : operations.size();
+ }
+
+ /**
+ * The size in bytes of operations that have been buffered, waiting to be sent.
+ */
+ public long pendingOperationsSize() {
+ return this.currentSize;
+ }
+
+ /**
+ * The number of in flight bulk requests.
+ */
+ public int pendingRequests() {
+ return this.requestsInFlightCount;
+ }
+
+ // ----- Statistics
+
+ /**
+ * Statistics: the number of operations that were added to this ingester since it was created.
+ */
+ public long operationsCount() {
+ return this.addCondition.invocations();
+ }
+
+ /**
+ * Statistics: the number of operations that had to wait before being added because the operation buffer
+ * was full and the number of http requests in flight exceeded the configured maximum number.
+ *
+ * @see Builder#maxConcurrentRequests
+ * @see Builder#maxOperations
+ * @see Builder#maxSize
+ */
+ public long operationContentionsCount() {
+ return this.addCondition.contentions();
+ }
+
+ /**
+ * Statistics: the number of bulk requests that were produced by this ingester since it was created.
+ */
+ public long requestCount() {
+ return this.sendRequestCondition.invocations();
+ }
+
+ /**
+ * Statistics: the number of bulk requests that could not be sent immediately because the number of
+ * http requests in flight exceeded the configured maximum number.
+ *
+ * @see Builder#maxConcurrentRequests
+ */
+ public long requestContentionsCount() {
+ return this.sendRequestCondition.contentions();
+ }
+
+ // ----- Predicates for the condition variables
+
+ private boolean canSendRequest() {
+ return requestsInFlightCount < maxRequests;
+ }
+
+ private boolean canAddOperation() {
+ return currentSize < maxSize && operations.size() < maxOperations;
+ }
+
+ private boolean closedAndFlushed() {
+ return isClosed
+ && operations.isEmpty()
+ && requestsInFlightCount == 0
+ && listenerInProgressCount.get() == 0
+ && retriesInProgressCount.get() == 0;
+ }
+
+ // ----- Ingester logic
+
+ private BulkRequest.Builder newRequest() {
+ BulkRequest.Builder result = new BulkRequest.Builder();
+
+ if (this.globalSettings != null) {
+ BulkRequest settings = this.globalSettings;
+ result.index(settings.index())
+ .pipeline(settings.pipeline())
+ .refresh(settings.refresh())
+ .requireAlias(settings.requireAlias())
+ .routing(settings.routing())
+ .sourceExcludes(settings.sourceExcludes())
+ .sourceIncludes(settings.sourceIncludes())
+ .source(settings.source())
+ .timeout(settings.timeout())
+ .waitForActiveShards(settings.waitForActiveShards());
+ }
+
+ return result;
+ }
+
+ private void failsafeFlush() {
+ try {
+ flush();
+ } catch (Throwable thr) {
+ // Log the error and continue
+ logger.error("Error in background flush", thr);
+ }
+ }
+
+ /**
+ * Manually flush any buffered operations, sending them to OpenSearch immediately.
+ *
+ * This method is non-blocking and returns immediately. Flushing happens asynchronously,
+ * subject to the maxConcurrentRequests limit. Operations that are scheduled for retry
+ * will only be sent once their retry delay has elapsed.
+ *
+ * This method is useful when you want to ensure operations are sent without waiting for
+ * automatic flush triggers (maxOperations, maxSize, or flushInterval).
+ */
+ public void flush() {
+ List> sentRequests = new ArrayList<>();
+ RequestExecution exec = sendRequestCondition.whenReadyIf(() -> {
+ // May happen on manual and periodic flushes
+ return !operations.isEmpty() && operations.stream().anyMatch(RetryableBulkOperation::isSendable);
+ }, () -> {
+ // Selecting operations that can be sent immediately,
+ // Dividing actual operations from contexts
+ List immediateOps = new ArrayList<>();
+ List contexts = new ArrayList<>();
+
+ for (Iterator> it = operations.iterator(); it.hasNext();) {
+ RetryableBulkOperation op = it.next();
+ if (op.isSendable()) {
+ immediateOps.add(op.operation());
+ contexts.add(op.context());
+
+ sentRequests.add(op);
+ it.remove();
+ }
+ }
+
+ // Build the request
+ BulkRequest request = newRequest().operations(immediateOps).build();
+
+ // Prepare for next round
+ currentSize = operations.size();
+ addCondition.signalIfReady();
+
+ long id = sendRequestCondition.invocations();
+
+ if (listener != null) {
+ // synchronous execution to make sure it actually runs before
+ listener.beforeBulk(id, request, contexts);
+ }
+
+ CompletionStage result;
+ try {
+ result = client.bulk(request);
+ } catch (IOException e) {
+ // Convert IOException to a failed CompletionStage
+ result = CompletableFuture.failedFuture(e);
+ }
+ requestsInFlightCount++;
+
+ if (listener == null) {
+ // No need to keep the request around, it can be GC'ed
+ request = null;
+ }
+
+ return new RequestExecution<>(id, request, contexts, result);
+ });
+
+ if (exec != null) {
+ // A request was actually sent
+ exec.futureResponse.handle((resp, thr) -> {
+ if (resp != null) {
+
+ // Success? Checking if total or partial
+ List failedRequestsCanRetry = resp.items()
+ .stream()
+ .filter(i -> i.error() != null && i.status() == 429)
+ .collect(Collectors.toList());
+
+ if (failedRequestsCanRetry.isEmpty() || backoffPolicy.equals(BackoffPolicy.noBackoff())) {
+ // Total success! ...or there's no retry policy implemented. Either way, can call
+ listenerAfterBulkSuccess(resp, exec);
+ } else {
+ // Partial success, retrying failed requests if policy allows it
+ // Keeping list of retryable requests/responses, to exclude them for calling
+ // listener later
+ List> retryableReq = new ArrayList<>();
+ List> refires = new ArrayList<>();
+ List retryableResp = new ArrayList<>();
+
+ for (BulkResponseItem bulkItemResponse : failedRequestsCanRetry) {
+ int index = resp.items().indexOf(bulkItemResponse);
+ selectingRetries(index, bulkItemResponse, sentRequests, retryableResp, retryableReq, refires);
+ }
+ // Scheduling flushes for just sent out retryable requests
+ if (!refires.isEmpty()) {
+ scheduleRetries(refires);
+ }
+ // Retrieving list of remaining successful or not retryable requests
+ retryableReq.forEach(sentRequests::remove);
+ if (!sentRequests.isEmpty()) {
+ if (listener != null) {
+ // Creating partial BulkRequest
+ List partialOps = new ArrayList<>();
+ List partialCtx = new ArrayList<>();
+ for (RetryableBulkOperation op : sentRequests) {
+ partialOps.add(op.operation());
+ partialCtx.add(op.context());
+ }
+ BulkRequest partialRequest = newRequest().operations(partialOps).build();
+
+ // Filtering response
+ List partialItems = resp.items()
+ .stream()
+ .filter(i -> !retryableResp.contains(i))
+ .collect(Collectors.toList());
+
+ BulkResponse partialResp = BulkResponse.of(
+ br -> br.items(partialItems).errors(resp.errors()).took(resp.took()).ingestTook(resp.ingestTook())
+ );
+
+ listenerInProgressCount.incrementAndGet();
+ scheduler.submit(() -> {
+ try {
+ listener.afterBulk(exec.id, partialRequest, partialCtx, partialResp);
+ } finally {
+ if (listenerInProgressCount.decrementAndGet() == 0) {
+ closeCondition.signalIfReady();
+ }
+ }
+ });
+ }
+ }
+
+ }
+ } else {
+ // Failure
+ listenerAfterBulkException(thr, exec);
+ }
+
+ sendRequestCondition.signalIfReadyAfter(() -> {
+ requestsInFlightCount--;
+ closeCondition.signalAllIfReady();
+ });
+ return null;
+ });
+ }
+ }
+
+ private void selectingRetries(
+ int index,
+ BulkResponseItem bulkItemResponse,
+ List> sentRequests,
+ List retryableResp,
+ List> retryableReq,
+ List> refires
+ ) {
+
+ // Getting original failed, requests and keeping successful ones to send to the listener
+ RetryableBulkOperation original = sentRequests.get(index);
+ if (original.canRetry()) {
+ retryableResp.add(bulkItemResponse);
+ Iterator retryTimes = Optional.ofNullable(original.retries()).orElse(backoffPolicy.iterator());
+ RetryableBulkOperation refire = new RetryableBulkOperation<>(original.operation(), original.context(), retryTimes);
+ retryableReq.add(original);
+ refires.add(refire);
+ addRetry(refire);
+ logger.warn("Added failed request back in queue, retrying in : " + refire.currentRetryTimeDelay() + " ms");
+ } else {
+ logger.warn("Retries finished for request: " + original.operation()._kind().toString());
+ }
+ }
+
+ private void listenerAfterBulkException(Throwable thr, RequestExecution exec) {
+ if (listener != null) {
+ listenerInProgressCount.incrementAndGet();
+ scheduler.submit(() -> {
+ try {
+ listener.afterBulk(exec.id, exec.request, exec.contexts, thr);
+ } finally {
+ if (listenerInProgressCount.decrementAndGet() == 0) {
+ closeCondition.signalIfReady();
+ }
+ }
+ });
+ }
+ }
+
+ private void listenerAfterBulkSuccess(BulkResponse resp, RequestExecution exec) {
+ if (listener != null) {
+ listenerInProgressCount.incrementAndGet();
+ scheduler.submit(() -> {
+ try {
+ listener.afterBulk(exec.id, exec.request, exec.contexts, resp);
+ } finally {
+ if (listenerInProgressCount.decrementAndGet() == 0) {
+ closeCondition.signalIfReady();
+ }
+ }
+ });
+ }
+ }
+
+ private void scheduleRetries(List> retryableReq) {
+ LongSummaryStatistics statsDelays = retryableReq.stream()
+ .map(RetryableBulkOperation::currentRetryTimeDelay)
+ .mapToLong(Long::longValue)
+ .summaryStatistics();
+
+ // scheduling earlier and latest delay
+ retryScheduler.schedule(this::flush, statsDelays.getMin(), TimeUnit.MILLISECONDS);
+ retryScheduler.schedule(this::flush, statsDelays.getMax(), TimeUnit.MILLISECONDS);
+
+ }
+
+ /**
+ * Add a bulk operation to the ingester with an associated context.
+ *
+ * The operation will be buffered and sent to OpenSearch when any of the configured
+ * flush thresholds is reached (maxOperations, maxSize, or flushInterval), or when
+ * {@link #flush()} or {@link #close()} is called.
+ *
+ * This method blocks if adding the operation would exceed the maxConcurrentRequests limit,
+ * providing backpressure to prevent overwhelming the cluster.
+ *
+ * @param operation the bulk operation to add
+ * @param context optional context to associate with this operation for tracking purposes
+ * @throws IllegalStateException if the ingester has been closed
+ */
+ public void add(BulkOperation operation, Context context) {
+ if (isClosed) {
+ throw new IllegalStateException("Ingester has been closed");
+ }
+
+ RetryableBulkOperation repeatableOp = new RetryableBulkOperation<>(operation, context, null);
+
+ innerAdd(repeatableOp);
+ }
+
+ // Same as "add", but skips the closed check to allow retries to be added even after ingester closure
+ private void addRetry(RetryableBulkOperation repeatableOp) {
+ // Sending the operation back in the queue using the retry scheduler
+ retriesInProgressCount.incrementAndGet();
+ retryScheduler.submit(() -> {
+ try {
+ innerAdd(repeatableOp);
+ } finally {
+ if (retriesInProgressCount.decrementAndGet() == 0) {
+ closeCondition.signalIfReady();
+ }
+ }
+ });
+ }
+
+ private void innerAdd(RetryableBulkOperation repeatableOp) {
+ IngesterOperation ingestOp = IngesterOperation.of(repeatableOp, client._transport().jsonpMapper());
+
+ addCondition.whenReady(() -> {
+ operations.add(ingestOp.repeatableOperation());
+ currentSize += ingestOp.size();
+
+ if (!canAddOperation()) {
+ flush();
+ } else {
+ addCondition.signalIfReady();
+ }
+ });
+ }
+
+ /**
+ * Add a bulk operation to the ingester without an associated context.
+ *
+ * Equivalent to calling {@code add(operation, null)}.
+ *
+ * @param operation the bulk operation to add
+ * @throws IllegalStateException if the ingester has been closed
+ * @see #add(BulkOperation, Object)
+ */
+ public void add(BulkOperation operation) {
+ add(operation, null);
+ }
+
+ /**
+ * Add a bulk operation to the ingester using a builder function, without an associated context.
+ *
+ * This is a convenience method that accepts a function to build the bulk operation inline.
+ *
+ * @param f the function to build the bulk operation
+ * @throws IllegalStateException if the ingester has been closed
+ * @see #add(BulkOperation)
+ */
+ public void add(Function> f) {
+ add(f.apply(new BulkOperation.Builder()).build(), null);
+ }
+
+ /**
+ * Add a bulk operation to the ingester using a builder function, with an associated context.
+ *
+ * This is a convenience method that accepts a function to build the bulk operation inline.
+ *
+ * @param f the function to build the bulk operation
+ * @param context optional context to associate with this operation for tracking purposes
+ * @throws IllegalStateException if the ingester has been closed
+ * @see #add(BulkOperation, Object)
+ */
+ public void add(Function> f, Context context) {
+ add(f.apply(new BulkOperation.Builder()).build(), context);
+ }
+
+ /**
+ * Close this ingester, first flushing any buffered operations. This does not close
+ * the underlying {@link OpenSearchClient} and {@link org.opensearch.client.transport.Transport}.
+ */
+ @Override
+ public void close() {
+ if (isClosed) {
+ return;
+ }
+
+ isClosed = true;
+ // Flush buffered operations
+ flush();
+ // and wait for all requests to be completed
+ closeCondition.whenReady(() -> {});
+
+ if (flushTask != null) {
+ flushTask.cancel(false);
+ }
+
+ if (scheduler != null && !isExternalScheduler) {
+ scheduler.shutdownNow();
+ }
+
+ if (retryScheduler != null) {
+ retryScheduler.shutdownNow();
+ }
+ }
+
+ // ----------------------------------------------------------------------------------------------------
+
+ public static BulkIngester of(Function, Builder> f) {
+ return f.apply(new Builder<>()).build();
+ }
+
+ public static class Builder implements ObjectBuilder> {
+ private OpenSearchAsyncClient client;
+ private BulkRequest globalSettings;
+ private int bulkOperations = 1000;
+ private long bulkSize = 5 * 1024 * 1024;
+ private int maxConcurrentRequests = 1;
+ private Long flushIntervalMillis;
+ private BulkListener listener;
+ private ScheduledExecutorService scheduler;
+ private BackoffPolicy backoffPolicy;
+
+ public Builder client(OpenSearchAsyncClient client) {
+ this.client = client;
+ return this;
+ }
+
+ public Builder client(OpenSearchClient client) {
+ TransportOptions options = client._transportOptions();
+ if (options == client._transport().options()) {
+ options = null;
+ }
+ return client(new OpenSearchAsyncClient(client._transport(), options));
+ }
+
+ /**
+ * Sets when to flush a new bulk request based on the number of operations currently added.
+ * Defaults to
+ * {@code 1000}. Can be set to {@code -1} to disable it.
+ *
+ * @throws IllegalArgumentException if less than -1.
+ */
+ public Builder maxOperations(int count) {
+ if (count < -1) {
+ throw new IllegalArgumentException("Max operations should be at least -1");
+ }
+ this.bulkOperations = count;
+ return this;
+ }
+
+ /**
+ * Sets when to flush a new bulk request based on the size in bytes of actions currently added. A
+ * request is sent
+ * once that size has been exceeded. Defaults to 5 megabytes. Can be set to {@code -1} to disable it.
+ *
+ * @throws IllegalArgumentException if less than -1.
+ */
+ public Builder maxSize(long bytes) {
+ if (bytes < -1) {
+ throw new IllegalArgumentException("Max size should be at least -1");
+ }
+ this.bulkSize = bytes;
+ return this;
+ }
+
+ /**
+ * Sets the number of concurrent requests allowed to be executed. A value of 1 means 1 request is
+ * allowed to be executed
+ * while accumulating new bulk requests. Defaults to {@code 1}.
+ *
+ * @throws IllegalArgumentException if less than 1.
+ */
+ public Builder maxConcurrentRequests(int max) {
+ if (max < 1) {
+ throw new IllegalArgumentException("Max concurrent request should be at least 1");
+ }
+ this.maxConcurrentRequests = max;
+ return this;
+ }
+
+ /**
+ * Sets an interval flushing any bulk actions pending if the interval passes. Defaults to not set.
+ *
+ * Flushing is still subject to the maximum number of requests set with
+ * {@link #maxConcurrentRequests}.
+ *
+ * @throws IllegalArgumentException if not a positive duration.
+ */
+ public Builder flushInterval(long value, TimeUnit unit) {
+ if (value < 0) {
+ throw new IllegalArgumentException("Duration should be positive");
+ }
+ this.flushIntervalMillis = unit.toMillis(value);
+ return this;
+ }
+
+ /**
+ * Sets a custom scheduler to run the flush thread and the listener logic. A default one is used if
+ * not set.
+ */
+ public Builder scheduler(ScheduledExecutorService scheduler) {
+ this.scheduler = scheduler;
+ return this;
+ }
+
+ public Builder listener(BulkListener listener) {
+ this.listener = listener;
+ return this;
+ }
+
+ /**
+ * Sets the backoff policy that will handle retries for error 429: too many requests.
+ * All the times are defined in milliseconds.
+ */
+ public Builder backoffPolicy(BackoffPolicy backoffPolicy) {
+ this.backoffPolicy = backoffPolicy;
+ return this;
+ }
+
+ /**
+ * Sets global bulk request settings that will be applied to all requests sent by the ingester.
+ */
+ public Builder globalSettings(BulkRequest.Builder settings) {
+ if (settings != null) {
+ // Set required field
+ this.globalSettings = settings.operations(Collections.emptyList()).build();
+ } else {
+ this.globalSettings = null;
+ }
+ return this;
+ }
+
+ /**
+ * Sets global bulk request settings that will be applied to all bulk requests.
+ */
+ public Builder globalSettings(Function fn) {
+ return globalSettings(fn.apply(new BulkRequest.Builder()));
+ }
+
+ @Override
+ public BulkIngester build() {
+ // Ensure some chunking criteria are defined
+ boolean hasCriteria = this.bulkOperations >= 0 || this.bulkSize >= 0 || this.flushIntervalMillis != null;
+
+ if (!hasCriteria) {
+ throw new IllegalStateException("No bulk operation chunking criteria have been set.");
+ }
+
+ return new BulkIngester<>(this);
+ }
+ }
+}
diff --git a/java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/BulkListener.java b/java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/BulkListener.java
new file mode 100644
index 0000000000..69925c5591
--- /dev/null
+++ b/java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/BulkListener.java
@@ -0,0 +1,77 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.client.opensearch._helpers.bulk;
+
+import java.util.List;
+import org.opensearch.client.opensearch.core.BulkRequest;
+import org.opensearch.client.opensearch.core.BulkResponse;
+import org.opensearch.client.opensearch.core.bulk.BulkOperation;
+
+/**
+ * A listener that is called by a {@link BulkIngester} to allow monitoring requests sent and their result.
+ *
+ * @param application-defined contextual data that can be associated to a bulk operation.
+ */
+public interface BulkListener {
+
+ /**
+ * Called before a bulk request is sent. Note: documents in {@code request} operations have been
+ * converted to {@link org.opensearch.client.util.BinaryData}.
+ *
+ * @param executionId the id of this request, unique for the {@link BulkIngester} that created it.
+ * @param request the bulk request that will be sent, with documents in binary form.
+ * @param contexts application-defined data that was passed in {@link BulkIngester#add(BulkOperation, Object)}.
+ */
+ void beforeBulk(long executionId, BulkRequest request, List contexts);
+
+ /**
+ * Called after a bulk request has been processed. OpenSearch accepted the request, but {@code response} the response may
+ * contain both successful and failure response items.
+ *
+ * @param executionId the id of this request, unique for the {@link BulkIngester} that created it.
+ * @param request the bulk request that will be sent, with documents in binary form.
+ * @param contexts application-defined data that was passed in {@link BulkIngester#add(BulkOperation, Object)}.
+ * @param response the response received from OpenSearch.
+ */
+ void afterBulk(long executionId, BulkRequest request, List contexts, BulkResponse response);
+
+ /**
+ * Called when a bulk request could not be sent to OpenSearch.
+ *
+ * @param executionId the id of this request, unique for the {@link BulkIngester} that created it.
+ * @param request the bulk request that will be sent, with documents in binary form.
+ * @param contexts application-defined data that was passed in {@link BulkIngester#add(BulkOperation, Object)}.
+ * @param failure the failure that occurred when sending the request to OpenSearch.
+ */
+ void afterBulk(long executionId, BulkRequest request, List contexts, Throwable failure);
+}
diff --git a/java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/FnCondition.java b/java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/FnCondition.java
new file mode 100644
index 0000000000..5d6a35acaf
--- /dev/null
+++ b/java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/FnCondition.java
@@ -0,0 +1,194 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.client.opensearch._helpers.bulk;
+
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.Lock;
+import java.util.function.BooleanSupplier;
+import java.util.function.Supplier;
+
+/**
+ * A helper to make {@link Condition} easier and less error-prone to use.
+ *
+ * This utility class wraps a {@link Lock} and {@link Condition} with a readiness predicate,
+ * providing a functional interface for executing code when the condition is satisfied.
+ * It handles the lock/unlock pattern and waiting on the condition variable automatically.
+ *
+ * This is an internal utility class used by {@link BulkIngester} for coordinating concurrent operations.
+ */
+class FnCondition {
+ private final Lock lock;
+ public final Condition condition;
+ private final BooleanSupplier ready;
+ private long invocations;
+ private long contentions;
+
+ FnCondition(Lock lock, BooleanSupplier ready) {
+ this.lock = lock;
+ this.condition = lock.newCondition();
+ this.ready = ready;
+ }
+
+ /**
+ * Execute a runnable when the condition becomes ready, blocking if necessary.
+ *
+ * This method will wait on the condition variable until the readiness predicate returns true,
+ * then execute the provided runnable while holding the lock.
+ *
+ * @param fn the runnable to execute when ready
+ */
+ public void whenReady(Runnable fn) {
+ whenReadyIf(null, () -> {
+ fn.run();
+ return null;
+ });
+ }
+
+ /**
+ * Execute a function when the condition becomes ready, blocking if necessary.
+ *
+ * This method will wait on the condition variable until the readiness predicate returns true,
+ * then execute the provided function while holding the lock.
+ *
+ * @param fn the function to execute when ready
+ * @param the return type of the function
+ * @return the result of the function
+ */
+ public T whenReady(Supplier fn) {
+ return whenReadyIf(null, fn);
+ }
+
+ /**
+ * Runs a function when the condition variable is ready, after verifying in that it can actually run.
+ *
+ * {@code canRun} and {@code fn} are executed withing the lock.
+ *
+ * @param canRun a predicate indicating if {@code fn} is ready to run. If not, returns {@code null} immediately.
+ * @param fn the function to run once the condition variable allows it.
+ * @return the result of {@code fn}.
+ */
+ public T whenReadyIf(BooleanSupplier canRun, Supplier fn) {
+ lock.lock();
+ try {
+ if (canRun != null && !canRun.getAsBoolean()) {
+ return null;
+ }
+
+ invocations++;
+ boolean firstLoop = true;
+ while (!ready.getAsBoolean()) {
+ if (firstLoop) {
+ contentions++;
+ firstLoop = false;
+ }
+ condition.awaitUninterruptibly();
+ }
+
+ if (canRun != null && !canRun.getAsBoolean()) {
+ return null;
+ }
+
+ return fn.get();
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Signal one waiting thread if the condition is ready.
+ *
+ * This method checks if the readiness predicate is true, and if so, signals one thread
+ * waiting on the condition variable.
+ */
+ public void signalIfReady() {
+ lock.lock();
+ try {
+ if (ready.getAsBoolean()) {
+ this.condition.signal();
+ }
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Signal all waiting threads if the condition is ready.
+ *
+ * This method checks if the readiness predicate is true, and if so, signals all threads
+ * waiting on the condition variable.
+ */
+ public void signalAllIfReady() {
+ lock.lock();
+ try {
+ if (ready.getAsBoolean()) {
+ this.condition.signalAll();
+ }
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Execute a runnable and then signal if the condition becomes ready.
+ *
+ * This method executes the provided runnable while holding the lock, then checks the
+ * readiness predicate and signals one waiting thread if ready.
+ *
+ * @param r the runnable to execute
+ */
+ public void signalIfReadyAfter(Runnable r) {
+ lock.lock();
+ try {
+ r.run();
+ if (ready.getAsBoolean()) {
+ this.condition.signal();
+ }
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Number of invocations of {@code whenReady}.
+ */
+ public long invocations() {
+ return this.invocations;
+ }
+
+ /**
+ * Number of invocations of {@code whenReady} that contended and required to wait on the condition variable.
+ */
+ public long contentions() {
+ return this.contentions;
+ }
+}
diff --git a/java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/IngesterOperation.java b/java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/IngesterOperation.java
new file mode 100644
index 0000000000..19aca6969f
--- /dev/null
+++ b/java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/IngesterOperation.java
@@ -0,0 +1,271 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.client.opensearch._helpers.bulk;
+
+import javax.annotation.Nullable;
+import org.opensearch.client.json.JsonEnum;
+import org.opensearch.client.json.JsonpMapper;
+import org.opensearch.client.opensearch.core.bulk.BulkOperation;
+import org.opensearch.client.opensearch.core.bulk.BulkOperationBase;
+import org.opensearch.client.opensearch.core.bulk.CreateOperation;
+import org.opensearch.client.opensearch.core.bulk.DeleteOperation;
+import org.opensearch.client.opensearch.core.bulk.IndexOperation;
+import org.opensearch.client.opensearch.core.bulk.UpdateOperation;
+import org.opensearch.client.util.BinaryData;
+import org.opensearch.client.util.NoCopyByteArrayOutputStream;
+
+/**
+ * A bulk operation whose size has been calculated and content turned to a binary blob (to compute its size).
+ *
+ * This class wraps a {@link RetryableBulkOperation} and calculates the estimated byte size that the operation
+ * will occupy in the bulk request payload. For operations with documents (create, index, update), the document
+ * content is converted to {@link BinaryData} to enable efficient size calculation and avoid re-serialization.
+ *
+ * This is an internal utility class used by {@link BulkIngester} to track buffered operation sizes.
+ */
+class IngesterOperation {
+ private final RetryableBulkOperation repeatableOp;
+ private final long size;
+
+ IngesterOperation(RetryableBulkOperation repeatableOp, long size) {
+ this.repeatableOp = repeatableOp;
+ this.size = size;
+ }
+
+ /**
+ * Create an IngesterOperation from a retryable bulk operation, calculating its size.
+ *
+ * @param repeatableOp the retryable bulk operation to wrap
+ * @param mapper the JSON mapper for serialization
+ * @return an IngesterOperation with calculated size
+ */
+ public static IngesterOperation of(RetryableBulkOperation repeatableOp, JsonpMapper mapper) {
+ switch (repeatableOp.operation()._kind()) {
+ case Create:
+ return createOperation(repeatableOp, mapper);
+ case Index:
+ return indexOperation(repeatableOp, mapper);
+ case Update:
+ return updateOperation(repeatableOp, mapper);
+ case Delete:
+ return deleteOperation(repeatableOp);
+ default:
+ throw new IllegalStateException("Unknown bulk operation type " + repeatableOp.operation()._kind());
+ }
+ }
+
+ /**
+ * Get the wrapped retryable bulk operation.
+ *
+ * @return the retryable bulk operation
+ */
+ public RetryableBulkOperation repeatableOperation() {
+ return this.repeatableOp;
+ }
+
+ /**
+ * Get the estimated size in bytes of this operation.
+ *
+ * @return the operation size in bytes
+ */
+ public long size() {
+ return this.size;
+ }
+
+ private static IngesterOperation createOperation(RetryableBulkOperation repeatableOp, JsonpMapper mapper) {
+ CreateOperation> create = repeatableOp.operation().create();
+ RetryableBulkOperation newOperation;
+
+ long size = basePropertiesSize(create);
+
+ if (create.document() instanceof BinaryData) {
+ newOperation = repeatableOp;
+ size += ((BinaryData) create.document()).size();
+
+ } else {
+ BinaryData binaryDoc = BinaryData.of(create.document(), mapper);
+ size += binaryDoc.size();
+ newOperation = new RetryableBulkOperation(BulkOperation.of(bo -> bo.create(idx -> {
+ copyCreateProperties(create, idx);
+ return idx.document(binaryDoc);
+ })), repeatableOp.context(), repeatableOp.retries());
+ }
+
+ return new IngesterOperation(newOperation, size);
+ }
+
+ private static IngesterOperation indexOperation(RetryableBulkOperation repeatableOp, JsonpMapper mapper) {
+ IndexOperation> index = repeatableOp.operation().index();
+ RetryableBulkOperation newOperation;
+
+ long size = basePropertiesSize(index);
+
+ if (index.document() instanceof BinaryData) {
+ newOperation = repeatableOp;
+ size += ((BinaryData) index.document()).size();
+
+ } else {
+ BinaryData binaryDoc = BinaryData.of(index.document(), mapper);
+ size += binaryDoc.size();
+ newOperation = new RetryableBulkOperation(BulkOperation.of(bo -> bo.index(idx -> {
+ copyIndexProperties(index, idx);
+ return idx.document(binaryDoc);
+ })), repeatableOp.context(), repeatableOp.retries());
+ }
+
+ return new IngesterOperation(newOperation, size);
+ }
+
+ private static IngesterOperation updateOperation(RetryableBulkOperation repeatableOp, JsonpMapper mapper) {
+ UpdateOperation> update = repeatableOp.operation().update();
+
+ // UpdateOperation implements NdJsonpSerializable, which means it serializes as two separate JSON objects:
+ // 1. The metadata line (with base properties, requireAlias, retryOnConflict)
+ // 2. The data line (UpdateOperationData with document, script, upsert, etc.)
+ //
+ // We calculate the size by serializing both parts to measure their actual byte size.
+ // This is more accurate than estimation and handles all fields (doc, upsert, script, etc.)
+ long size = 0;
+
+ // Serialize both the metadata and data parts using _serializables()
+ try {
+ NoCopyByteArrayOutputStream out = new NoCopyByteArrayOutputStream();
+ jakarta.json.stream.JsonGenerator generator = mapper.jsonProvider().createGenerator(out);
+
+ // UpdateOperation._serializables() returns an iterator with [metadata, data]
+ // Serialize each part (both are PlainJsonSerializable)
+ java.util.Iterator> serializables = update._serializables();
+ while (serializables.hasNext()) {
+ Object serializable = serializables.next();
+ if (serializable instanceof org.opensearch.client.json.PlainJsonSerializable) {
+ ((org.opensearch.client.json.PlainJsonSerializable) serializable).serialize(generator, mapper);
+ generator.flush();
+ }
+ }
+
+ generator.close();
+ size = out.size();
+
+ } catch (Exception e) {
+ // If serialization fails for any reason, fall back to conservative estimate
+ // This shouldn't happen in normal operation, but provides a safety net
+ size = basePropertiesSize(update) + size("retry_on_conflict", update.retryOnConflict()) + size(
+ "require_alias",
+ update.requireAlias()
+ ) + 300; // Fallback estimate for data
+ }
+
+ return new IngesterOperation(repeatableOp, size);
+ }
+
+ private static IngesterOperation deleteOperation(RetryableBulkOperation repeatableOp) {
+ DeleteOperation delete = repeatableOp.operation().delete();
+ return new IngesterOperation(repeatableOp, basePropertiesSize(delete));
+ }
+
+ private static void copyBaseProperties(BulkOperationBase op, BulkOperationBase.AbstractBuilder> builder) {
+ builder.id(op.id())
+ .index(op.index())
+ .ifPrimaryTerm(op.ifPrimaryTerm())
+ .ifSeqNo(op.ifSeqNo())
+ .routing(op.routing())
+ .version(op.version())
+ .versionType(op.versionType());
+ }
+
+ private static void copyIndexProperties(IndexOperation> op, IndexOperation.Builder> builder) {
+ copyBaseProperties(op, builder);
+ builder.pipeline(op.pipeline());
+ builder.requireAlias(op.requireAlias());
+ }
+
+ private static void copyCreateProperties(CreateOperation> op, CreateOperation.Builder> builder) {
+ copyBaseProperties(op, builder);
+ builder.pipeline(op.pipeline());
+ builder.requireAlias(op.requireAlias());
+ }
+
+ private static int size(String name, @Nullable Boolean value) {
+ if (value != null) {
+ return name.length() + 12; // 12 added chars for "name":"false",
+ } else {
+ return 0;
+ }
+ }
+
+ private static int size(String name, @Nullable String value) {
+ if (value != null) {
+ return name.length() + value.length() + 6; // 6 added chars for "name":"value",
+ } else {
+ return 0;
+ }
+ }
+
+ private static int size(String name, @Nullable Long value) {
+ if (value != null) {
+ // Borrowed from Long.toUnsignedString0, shift = 3 (base 10 is closer to 3 than 4)
+ int mag = Integer.SIZE - Long.numberOfLeadingZeros(value);
+ int chars = Math.max(((mag + (3 - 1)) / 3), 1);
+ return name.length() + chars + 4; // 4 added chars for "name":,
+ } else {
+ return 0;
+ }
+ }
+
+ private static int size(String name, @Nullable Integer value) {
+ if (value != null) {
+ // Borrowed from Integer.toUnsignedString0, shift = 3 (base 10 is closer to 3 than 4)
+ int mag = Integer.SIZE - Integer.numberOfLeadingZeros(value);
+ int chars = Math.max(((mag + (3 - 1)) / 3), 1);
+ return name.length() + chars + 4;
+ } else {
+ return 0;
+ }
+ }
+
+ private static int size(String name, @Nullable JsonEnum value) {
+ if (value != null) {
+ return name.length() + value.jsonValue().length() + 6;
+ } else {
+ return 0;
+ }
+ }
+
+ private static int basePropertiesSize(BulkOperationBase op) {
+ return size("id", op.id()) + size("index", op.index()) + size("if_primary_term", op.ifPrimaryTerm()) + size(
+ "if_seq_no",
+ op.ifSeqNo()
+ ) + size("routing", op.routing()) + size("version", op.version()) + size("version_type", op.versionType()) + 4; // Open/closing
+ // brace, 2 newlines
+ }
+}
diff --git a/java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/RetryableBulkOperation.java b/java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/RetryableBulkOperation.java
new file mode 100644
index 0000000000..d4083c53fe
--- /dev/null
+++ b/java-client/src/main/java/org/opensearch/client/opensearch/_helpers/bulk/RetryableBulkOperation.java
@@ -0,0 +1,133 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.client.opensearch._helpers.bulk;
+
+import java.util.Iterator;
+import java.util.Optional;
+import org.opensearch.client.opensearch.core.bulk.BulkOperation;
+
+/**
+ * A bulk operation with retry information and optional context.
+ *
+ * This class wraps a {@link BulkOperation} along with:
+ *
+ * - An optional context value for tracking
+ * - A retry iterator providing delay values in milliseconds
+ * - The calculated time when this operation can be sent (for retries)
+ *
+ *
+ * This is an internal utility class used by {@link BulkIngester} to manage operation retries
+ * with backoff delays.
+ *
+ * @param optional context type associated with the operation
+ */
+class RetryableBulkOperation {
+ private final BulkOperation operation;
+ private final Context context;
+ private final Iterator retries;
+ private final Long retryTime;
+
+ /**
+ * Create a retryable bulk operation.
+ *
+ * @param request the bulk operation
+ * @param context optional context associated with this operation
+ * @param retries iterator providing retry delay values in milliseconds, or null if this is not a retry
+ */
+ RetryableBulkOperation(BulkOperation request, Context context, Iterator retries) {
+ this.operation = request;
+ this.context = context;
+ this.retries = retries;
+ // if the retries iterator is null it means that it's not a retry, otherwise calculating retry time
+ long currentMillis = currentMillis();
+ this.retryTime = Optional.ofNullable(retries).map(r -> currentMillis + r.next()).orElse(currentMillis);
+ }
+
+ /**
+ * Get the wrapped bulk operation.
+ *
+ * @return the bulk operation
+ */
+ public BulkOperation operation() {
+ return operation;
+ }
+
+ /**
+ * Get the optional context associated with this operation.
+ *
+ * @return the context, or null if none was provided
+ */
+ public Context context() {
+ return context;
+ }
+
+ /**
+ * Get the retry iterator providing backoff delay values.
+ *
+ * @return the retry iterator, or null if this is not a retry
+ */
+ public Iterator retries() {
+ return retries;
+ }
+
+ /**
+ * Get the time delay in milliseconds until this operation can be sent.
+ *
+ * @return the delay in milliseconds (may be negative if the operation is ready to send)
+ */
+ public long currentRetryTimeDelay() {
+ return this.retryTime - currentMillis();
+ }
+
+ /**
+ * Check if this operation can be retried again after a failure.
+ *
+ * @return true if more retry attempts are available, false otherwise
+ */
+ public boolean canRetry() {
+ return Optional.ofNullable(retries).map(Iterator::hasNext).orElse(true);
+ }
+
+ /**
+ * Check if this operation can be sent now (retry delay has elapsed).
+ *
+ * @return true if the operation can be sent immediately, false if it needs to wait
+ */
+ public boolean isSendable() {
+ return (this.retryTime - currentMillis()) <= 0;
+ }
+
+ private Long currentMillis() {
+ return System.nanoTime() / 1_000_000L;
+ }
+}
diff --git a/java-client/src/main/java/org/opensearch/client/transport/BackoffPolicy.java b/java-client/src/main/java/org/opensearch/client/transport/BackoffPolicy.java
new file mode 100644
index 0000000000..21709aa34f
--- /dev/null
+++ b/java-client/src/main/java/org/opensearch/client/transport/BackoffPolicy.java
@@ -0,0 +1,342 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.client.transport;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+import javax.annotation.Nullable;
+
+/**
+ * Provides a set of generic backoff policies. Backoff policies are used to calculate the number of times an action will be retried
+ * and the intervals between those retries.
+ *
+ * Notes for implementing custom subclasses:
+ *
+ * The underlying mathematical principle of BackoffPolicy are progressions which can be either finite or infinite although
+ * the latter should not be used for retrying. A progression can be mapped to a java.util.Iterator with the following
+ * semantics:
+ *
+ *
+ * #hasNext() determines whether the progression has more elements. Return true for infinite progressions
+ *
+ * #next() determines the next element in the progression, i.e. the next wait time period
+ *
+ *
+ * Note that backoff policies are exposed as Iterables in order to be consumed multiple times.
+ */
+public abstract class BackoffPolicy implements Iterable {
+ private static final BackoffPolicy NO_BACKOFF = new NoBackoff();
+
+ /**
+ * Creates a backoff policy that will not allow any backoff, i.e. an operation will fail after the first attempt.
+ *
+ * @return A backoff policy without any backoff period. The returned instance is thread safe.
+ */
+ public static BackoffPolicy noBackoff() {
+ return NO_BACKOFF;
+ }
+
+ /**
+ * Creates an new constant backoff policy with the provided configuration.
+ *
+ * @param delay The delay defines how long to wait between retry attempts. Must not be null.
+ * Must be <= Integer.MAX_VALUE ms.
+ * @param maxNumberOfRetries The maximum number of retries. Must be a non-negative number.
+ * @return A backoff policy with a constant wait time between retries. The returned instance is thread safe but each
+ * iterator created from it should only be used by a single thread.
+ */
+ public static BackoffPolicy constantBackoff(Long delay, int maxNumberOfRetries) {
+ return new ConstantBackoff(checkDelay(delay), maxNumberOfRetries);
+ }
+
+ /**
+ * Creates an new exponential backoff policy with a default configuration of 50 ms initial wait period and 8 retries taking
+ * roughly 5.1 seconds in total.
+ *
+ * @return A backoff policy with an exponential increase in wait time for retries. The returned instance is thread safe but each
+ * iterator created from it should only be used by a single thread.
+ */
+ public static BackoffPolicy exponentialBackoff() {
+ return exponentialBackoff(50L, 8);
+ }
+
+ /**
+ * Creates an new exponential backoff policy with the provided configuration.
+ *
+ * @param initialDelay The initial delay defines how long to wait for the first retry attempt. Must not be null.
+ * Must be <= Integer.MAX_VALUE ms.
+ * @param maxNumberOfRetries The maximum number of retries. Must be a non-negative number.
+ * @return A backoff policy with an exponential increase in wait time for retries. The returned instance is thread safe but each
+ * iterator created from it should only be used by a single thread.
+ */
+ public static BackoffPolicy exponentialBackoff(Long initialDelay, int maxNumberOfRetries) {
+ return new ExponentialBackoff(checkDelay(initialDelay), maxNumberOfRetries);
+ }
+
+ /**
+ * Creates a new linear backoff policy with the provided configuration
+ *
+ * @param delayIncrement The amount by which to increment the delay on each retry
+ * @param maxNumberOfRetries The maximum number of retries
+ * @param maximumDelay The maximum delay
+ * @return A backoff policy with linear increase in wait time for retries.
+ */
+ public static BackoffPolicy linearBackoff(Long delayIncrement, int maxNumberOfRetries, Long maximumDelay) {
+ return new LinearBackoff(delayIncrement, maxNumberOfRetries, maximumDelay);
+ }
+
+ /**
+ * Wraps the backoff policy in one that calls a method every time a new backoff is taken from the policy.
+ */
+ public static BackoffPolicy wrap(BackoffPolicy delegate, Runnable onBackoff) {
+ return new WrappedBackoffPolicy(delegate, onBackoff);
+ }
+
+ private static Long checkDelay(Long delay) {
+ if (delay > Integer.MAX_VALUE) {
+ throw new IllegalArgumentException("delay must be <= " + Integer.MAX_VALUE + " ms");
+ }
+ return delay;
+ }
+
+ private static class NoBackoff extends BackoffPolicy {
+ @Override
+ public Iterator iterator() {
+ return Collections.emptyIterator();
+ }
+
+ @Override
+ public String toString() {
+ return "NoBackoff";
+ }
+ }
+
+ private static class ExponentialBackoff extends BackoffPolicy {
+ private final Long start;
+
+ private final int numberOfElements;
+
+ private ExponentialBackoff(Long start, int numberOfElements) {
+ assert start >= 0;
+ assert numberOfElements >= 0;
+ this.start = start;
+ this.numberOfElements = numberOfElements;
+ }
+
+ @Override
+ public Iterator iterator() {
+ return new ExponentialBackoffIterator(start, numberOfElements);
+ }
+
+ @Override
+ public String toString() {
+ return "ExponentialBackoff{start=" + start + ", numberOfElements=" + numberOfElements + '}';
+ }
+ }
+
+ private static class ExponentialBackoffIterator implements Iterator {
+ private final int numberOfElements;
+
+ private final Long start;
+
+ private int currentlyConsumed;
+
+ private ExponentialBackoffIterator(Long start, int numberOfElements) {
+ this.start = start;
+ this.numberOfElements = numberOfElements;
+ }
+
+ @Override
+ public boolean hasNext() {
+ return currentlyConsumed < numberOfElements;
+ }
+
+ @Override
+ public Long next() {
+ if (!hasNext()) {
+ throw new NoSuchElementException("Only up to " + numberOfElements + " elements");
+ }
+ Long result = start + 10L * ((int) Math.exp(0.8d * (currentlyConsumed)) - 1);
+ currentlyConsumed++;
+ return result;
+ }
+ }
+
+ private static final class ConstantBackoff extends BackoffPolicy {
+ private final Long delay;
+
+ private final int numberOfElements;
+
+ ConstantBackoff(Long delay, int numberOfElements) {
+ assert numberOfElements >= 0;
+ this.delay = delay;
+ this.numberOfElements = numberOfElements;
+ }
+
+ @Override
+ public Iterator iterator() {
+ return new ConstantBackoffIterator(delay, numberOfElements);
+ }
+
+ @Override
+ public String toString() {
+ return "ConstantBackoff{delay=" + delay + ", numberOfElements=" + numberOfElements + '}';
+ }
+ }
+
+ private static final class ConstantBackoffIterator implements Iterator {
+ private final Long delay;
+ private final int numberOfElements;
+ private int curr;
+
+ ConstantBackoffIterator(Long delay, int numberOfElements) {
+ this.delay = delay;
+ this.numberOfElements = numberOfElements;
+ }
+
+ @Override
+ public boolean hasNext() {
+ return curr < numberOfElements;
+ }
+
+ @Override
+ public Long next() {
+ if (hasNext() == false) {
+ throw new NoSuchElementException();
+ }
+ curr++;
+ return delay;
+ }
+ }
+
+ private static final class WrappedBackoffPolicy extends BackoffPolicy {
+ private final BackoffPolicy delegate;
+ private final Runnable onBackoff;
+
+ WrappedBackoffPolicy(BackoffPolicy delegate, Runnable onBackoff) {
+ this.delegate = delegate;
+ this.onBackoff = onBackoff;
+ }
+
+ @Override
+ public Iterator iterator() {
+ return new WrappedBackoffIterator(delegate.iterator(), onBackoff);
+ }
+
+ @Override
+ public String toString() {
+ return "WrappedBackoffPolicy{delegate=" + delegate + ", onBackoff=" + onBackoff + '}';
+ }
+ }
+
+ private static final class WrappedBackoffIterator implements Iterator {
+ private final Iterator delegate;
+ private final Runnable onBackoff;
+
+ WrappedBackoffIterator(Iterator delegate, Runnable onBackoff) {
+ this.delegate = delegate;
+ this.onBackoff = onBackoff;
+ }
+
+ @Override
+ public boolean hasNext() {
+ return delegate.hasNext();
+ }
+
+ @Override
+ public Long next() {
+ if (false == delegate.hasNext()) {
+ throw new NoSuchElementException();
+ }
+ onBackoff.run();
+ return delegate.next();
+ }
+ }
+
+ private static final class LinearBackoff extends BackoffPolicy {
+
+ private final Long delayIncrement;
+ private final int maxNumberOfRetries;
+ private final Long maximumDelay;
+
+ private LinearBackoff(Long delayIncrement, int maxNumberOfRetries, @Nullable Long maximumDelay) {
+ this.delayIncrement = delayIncrement;
+ this.maxNumberOfRetries = maxNumberOfRetries;
+ this.maximumDelay = maximumDelay;
+ }
+
+ @Override
+ public Iterator iterator() {
+ return new LinearBackoffIterator(delayIncrement, maxNumberOfRetries, maximumDelay);
+ }
+
+ @Override
+ public String toString() {
+ return "LinearBackoff{"
+ + "delayIncrement="
+ + delayIncrement
+ + ", maxNumberOfRetries="
+ + maxNumberOfRetries
+ + ", maximumDelay="
+ + maximumDelay
+ + '}';
+ }
+ }
+
+ private static final class LinearBackoffIterator implements Iterator {
+
+ private final Long delayIncrement;
+ private final int maxNumberOfRetries;
+ private final Long maximumDelay;
+ private int curr;
+
+ private LinearBackoffIterator(Long delayIncrement, int maxNumberOfRetries, @Nullable Long maximumDelay) {
+ this.delayIncrement = delayIncrement;
+ this.maxNumberOfRetries = maxNumberOfRetries;
+ this.maximumDelay = maximumDelay;
+ }
+
+ @Override
+ public boolean hasNext() {
+ return curr < maxNumberOfRetries;
+ }
+
+ @Override
+ public Long next() {
+ curr++;
+ Long delay = curr * delayIncrement;
+ return maximumDelay == null ? delay : delay.compareTo(maximumDelay) < 0 ? delay : maximumDelay;
+ }
+ }
+}
diff --git a/java-client/src/main/java/org/opensearch/client/util/BinaryData.java b/java-client/src/main/java/org/opensearch/client/util/BinaryData.java
new file mode 100644
index 0000000000..ced2d345c7
--- /dev/null
+++ b/java-client/src/main/java/org/opensearch/client/util/BinaryData.java
@@ -0,0 +1,136 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.client.util;
+
+import jakarta.json.stream.JsonGenerator;
+import jakarta.json.stream.JsonParser;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import org.opensearch.client.json.JsonpDeserializable;
+import org.opensearch.client.json.JsonpDeserializer;
+import org.opensearch.client.json.JsonpDeserializerBase;
+import org.opensearch.client.json.JsonpMapper;
+import org.opensearch.client.json.JsonpSerializable;
+
+/**
+ * Binary data with an associated content type.
+ */
+@JsonpDeserializable
+public interface BinaryData extends JsonpSerializable {
+
+ /**
+ * Get the content type of this binary data.
+ *
+ * @return the content type (e.g., "application/json")
+ */
+ String contentType();
+
+ /**
+ * Write this data to an output stream.
+ * @throws IllegalStateException if the content has already been consumed and the object
+ * isn't replayable.
+ */
+ void writeTo(OutputStream out) throws IOException;
+
+ /**
+ * Return this data as a {@code ByteBuffer}.
+ *
+ * @throws IllegalStateException if the content has already been consumed and the object
+ * isn't replayable.
+ */
+ ByteBuffer asByteBuffer() throws IOException;
+
+ /**
+ * Return this data as an {@code InputStream}.
+ *
+ * @throws IllegalStateException if the content has already been consumed and the object
+ * isn't replayable.
+ */
+ InputStream asInputStream() throws IOException;
+
+ /**
+ * Can this object be consumed several times?
+ */
+ boolean isRepeatable();
+
+ /**
+ * Get the estimated size in bytes of the data.
+ *
+ * @return the estimated size, or -1 if the value cannot be estimated or if the data has already been
+ * consumed.
+ */
+ long size();
+
+ /**
+ * Create a {@code BinaryData} from a value and a JSON mapper. The binary content is the result of serializing
+ * {@code value} with {@code mapper}. Returns {@code null} if {@code value} is null.
+ *
+ * Note that the result's content-type can be different from {@code "application/json"} if the JSON mapper is setup to
+ * produce other representations such as CBOR or SMILE.
+ */
+ static BinaryData of(Object value, JsonpMapper mapper) {
+ if (value == null) {
+ return null;
+ }
+
+ if (value instanceof BinaryData) {
+ return (BinaryData) value;
+ }
+
+ NoCopyByteArrayOutputStream out = new NoCopyByteArrayOutputStream();
+ try (JsonGenerator generator = mapper.jsonProvider().createGenerator(out)) {
+ mapper.serialize(value, generator);
+ }
+
+ return new ByteArrayBinaryData(out.array(), 0, out.size(), ContentType.APPLICATION_JSON);
+ }
+
+ static BinaryData of(byte[] bytes, String contentType) {
+ return new ByteArrayBinaryData(bytes, 0, bytes.length, contentType);
+ }
+
+ static BinaryData of(byte[] value, int offset, int length, String contentType) {
+ return new ByteArrayBinaryData(value, offset, length, contentType);
+ }
+
+ JsonpDeserializer _DESERIALIZER = new JsonpDeserializerBase(
+ ByteArrayBinaryData._DESERIALIZER.acceptedEvents()
+ ) {
+ @Override
+ public BinaryData deserialize(JsonParser parser, JsonpMapper mapper, JsonParser.Event event) {
+ return ByteArrayBinaryData._DESERIALIZER.deserialize(parser, mapper, event);
+ }
+ };
+}
diff --git a/java-client/src/main/java/org/opensearch/client/util/ByteArrayBinaryData.java b/java-client/src/main/java/org/opensearch/client/util/ByteArrayBinaryData.java
new file mode 100644
index 0000000000..f7c7da5b05
--- /dev/null
+++ b/java-client/src/main/java/org/opensearch/client/util/ByteArrayBinaryData.java
@@ -0,0 +1,165 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.client.util;
+
+import jakarta.json.stream.JsonGenerator;
+import jakarta.json.stream.JsonParser;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.EnumSet;
+import org.opensearch.client.json.JsonpDeserializable;
+import org.opensearch.client.json.JsonpDeserializer;
+import org.opensearch.client.json.JsonpDeserializerBase;
+import org.opensearch.client.json.JsonpMapper;
+import org.opensearch.client.json.JsonpUtils;
+
+@JsonpDeserializable
+public class ByteArrayBinaryData implements BinaryData {
+
+ private final byte[] bytes;
+ private final int offset;
+ private final int length;
+ private final String contentType;
+
+ public ByteArrayBinaryData(byte[] bytes, int offset, int length, String contentType) {
+ this.contentType = contentType;
+ this.bytes = bytes;
+ this.offset = offset;
+ this.length = length;
+ }
+
+ public ByteArrayBinaryData(byte[] bytes, String contentType) {
+ this.contentType = contentType;
+ this.bytes = bytes;
+ this.offset = 0;
+ this.length = bytes.length;
+ }
+
+ /**
+ * Copy another {@link BinaryData}. Typically used to make a replayable {@link BinaryData}
+ * from a non-replayable one.
+ */
+ public ByteArrayBinaryData(BinaryData data) throws IOException {
+ NoCopyByteArrayOutputStream out = new NoCopyByteArrayOutputStream();
+ data.writeTo(out);
+ this.contentType = data.contentType();
+ this.bytes = out.array();
+ this.offset = 0;
+ this.length = out.size();
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public String contentType() {
+ return this.contentType;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void writeTo(OutputStream out) throws IOException {
+ out.write(bytes, offset, length);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public long size() {
+ return length;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public ByteBuffer asByteBuffer() {
+ return ByteBuffer.wrap(bytes, offset, length);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public InputStream asInputStream() {
+ return new ByteArrayInputStream(bytes, offset, length);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean isRepeatable() {
+ return true;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void serialize(JsonGenerator generator, JsonpMapper mapper) {
+ // Serialize the binary content as raw JSON by parsing it and copying events
+ try (InputStream in = asInputStream()) {
+ JsonParser parser = mapper.jsonProvider().createParser(in);
+ JsonpUtils.copy(parser, generator);
+ parser.close();
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to serialize BinaryData", e);
+ }
+ }
+
+ private static class Deserializer extends JsonpDeserializerBase {
+
+ Deserializer() {
+ super(EnumSet.allOf(JsonParser.Event.class));
+ }
+
+ @Override
+ public ByteArrayBinaryData deserialize(JsonParser parser, JsonpMapper mapper, JsonParser.Event event) {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ JsonGenerator generator = mapper.jsonProvider().createGenerator(baos);
+ JsonpUtils.copy(parser, generator, event);
+ generator.close();
+ return new ByteArrayBinaryData(baos.toByteArray(), ContentType.APPLICATION_JSON);
+ }
+ }
+
+ public static final JsonpDeserializer _DESERIALIZER = new Deserializer();
+}
diff --git a/java-client/src/main/java/org/opensearch/client/util/ContentType.java b/java-client/src/main/java/org/opensearch/client/util/ContentType.java
new file mode 100644
index 0000000000..77c8f5f401
--- /dev/null
+++ b/java-client/src/main/java/org/opensearch/client/util/ContentType.java
@@ -0,0 +1,43 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.client.util;
+
+/**
+ * Constants for content-type values.
+ */
+public class ContentType {
+
+ private ContentType() {}
+
+ public static final String APPLICATION_JSON = "application/json";
+}
diff --git a/java-client/src/main/java/org/opensearch/client/util/NoCopyByteArrayOutputStream.java b/java-client/src/main/java/org/opensearch/client/util/NoCopyByteArrayOutputStream.java
new file mode 100644
index 0000000000..1333374ab5
--- /dev/null
+++ b/java-client/src/main/java/org/opensearch/client/util/NoCopyByteArrayOutputStream.java
@@ -0,0 +1,79 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.client.util;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.nio.ByteBuffer;
+
+/**
+ * A {@code ByteArrayOutputStream} that reduces copy operations of its underlying buffer.
+ */
+public class NoCopyByteArrayOutputStream extends ByteArrayOutputStream {
+
+ /**
+ * Create a new output stream with default initial capacity.
+ */
+ public NoCopyByteArrayOutputStream() {}
+
+ /**
+ * Create a new output stream with the specified initial capacity.
+ *
+ * @param size the initial capacity of the internal buffer
+ */
+ public NoCopyByteArrayOutputStream(int size) {
+ super(size);
+ }
+
+ /**
+ * Get the underlying buffer. Data was added to this buffer up to {@code size()}. Note that calling this method
+ * again may return a different result if additional data was inserted and the buffer had to grow.
+ */
+ public byte[] array() {
+ return this.buf;
+ }
+
+ /**
+ * Get an {@code InputStream} view on this object, based on the current buffer and size.
+ */
+ public ByteArrayInputStream asInputStream() {
+ return new ByteArrayInputStream(this.buf, 0, this.count);
+ }
+
+ /**
+ * Get a {@code ByteBuffer} view on this object, based on the current buffer and size.
+ */
+ public ByteBuffer asByteBuffer() {
+ return ByteBuffer.wrap(this.buf, 0, this.count);
+ }
+}
diff --git a/java-client/src/test/java/org/opensearch/client/opensearch/_helpers/bulk/BulkIngesterTest.java b/java-client/src/test/java/org/opensearch/client/opensearch/_helpers/bulk/BulkIngesterTest.java
new file mode 100644
index 0000000000..2662db26a3
--- /dev/null
+++ b/java-client/src/test/java/org/opensearch/client/opensearch/_helpers/bulk/BulkIngesterTest.java
@@ -0,0 +1,583 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.client.opensearch._helpers.bulk;
+
+import jakarta.json.stream.JsonGenerator;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
+import javax.annotation.Nullable;
+import org.junit.Assert;
+import org.junit.Test;
+import org.opensearch.client.json.JsonpMapper;
+import org.opensearch.client.json.jackson.JacksonJsonpMapper;
+import org.opensearch.client.opensearch.OpenSearchAsyncClient;
+import org.opensearch.client.opensearch.core.BulkRequest;
+import org.opensearch.client.opensearch.core.BulkResponse;
+import org.opensearch.client.opensearch.core.bulk.BulkOperation;
+import org.opensearch.client.opensearch.core.bulk.BulkResponseItem;
+import org.opensearch.client.opensearch.core.bulk.OperationType;
+import org.opensearch.client.transport.Endpoint;
+import org.opensearch.client.transport.OpenSearchTransport;
+import org.opensearch.client.transport.TransportOptions;
+
+public class BulkIngesterTest extends Assert {
+
+ static class AppData {
+ private int intValue;
+ private String msg;
+
+ public int getIntValue() {
+ return intValue;
+ }
+
+ public void setIntValue(int intValue) {
+ this.intValue = intValue;
+ }
+
+ public String getMsg() {
+ return msg;
+ }
+
+ public void setMsg(String msg) {
+ this.msg = msg;
+ }
+ }
+
+ private static final BulkResponseItem successItem = BulkResponseItem.of(
+ i -> i.index("foo").status(200).operationType(OperationType.Delete)
+ );
+
+ private static final BulkOperation operation = BulkOperation.of(op -> op.delete(d -> d.index("foo").id("bar")));
+
+ private void printStats(BulkIngester> ingester) {
+ System.out.printf(
+ "Ingester - operations: %d (%d), requests: %d (%d)%n",
+ ingester.operationsCount(),
+ ingester.operationContentionsCount(),
+ ingester.requestCount(),
+ ingester.requestContentionsCount()
+ );
+ }
+
+ private void printStats(CountingListener listener) {
+ System.out.printf("Listener - operations: %d, requests: %d%n", listener.operations.get(), listener.requests.get());
+ }
+
+ private void printStats(TestTransport transport) {
+ System.out.printf(
+ "Transport - operations: %d, requests: %d (%d completed)%n",
+ transport.operations.get(),
+ transport.requestsStarted.get(),
+ transport.requestsCompleted.get()
+ );
+ }
+
+ @Test
+ public void basicTestFlush() throws Exception {
+ // Prime numbers, so that we have leftovers to flush before shutting down
+ multiThreadTest(7, 3, 5, 101, true);
+ }
+
+ @Test
+ public void basicTestFlushWithInternalScheduler() throws Exception {
+ // Prime numbers, so that we have leftovers to flush before shutting down
+ multiThreadTest(7, 3, 5, 101, false);
+ }
+
+ @Test
+ public void basicTestNoFlush() throws Exception {
+ // Will have nothing to flush on close.
+ multiThreadTest(10, 3, 5, 100, true);
+ }
+
+ @Test
+ public void basicTestNoFlushWithInternalScheduler() throws Exception {
+ // Will have nothing to flush on close.
+ multiThreadTest(10, 3, 5, 100, false);
+ }
+
+ private void multiThreadTest(int maxOperations, int maxRequests, int numThreads, int numOperations, boolean externalScheduler)
+ throws Exception {
+
+ CountingListener listener = new CountingListener();
+ TestTransport transport = new TestTransport();
+ OpenSearchAsyncClient client = new OpenSearchAsyncClient(transport);
+ ScheduledExecutorService scheduler;
+ if (externalScheduler) {
+ scheduler = Executors.newSingleThreadScheduledExecutor(r -> {
+ Thread t = Executors.defaultThreadFactory().newThread(r);
+ t.setName("my-bulk-ingester-executor#");
+ t.setDaemon(true);
+ return t;
+ });
+ } else {
+ scheduler = null;
+ }
+
+ BulkIngester ingester = BulkIngester.of(
+ b -> b.client(client).maxOperations(maxOperations).maxConcurrentRequests(maxRequests).scheduler(scheduler).listener(listener)
+ );
+
+ CountDownLatch latch = new CountDownLatch(numThreads);
+ for (int i = 0; i < numThreads; i++) {
+ new Thread(() -> {
+ try {
+ Thread.sleep((long) (Math.random() * 100));
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ for (int j = 0; j < numOperations; j++) {
+ ingester.add(operation);
+ }
+
+ latch.countDown();
+ }).start();
+ }
+
+ latch.await();
+
+ ingester.close();
+ transport.close();
+ if (scheduler != null) scheduler.shutdownNow();
+
+ printStats(ingester);
+ printStats(listener);
+ printStats(transport);
+
+ int expectedOperations = numThreads * numOperations;
+ assertEquals(expectedOperations, ingester.operationsCount());
+ assertEquals(expectedOperations, listener.operations.get());
+ assertEquals(expectedOperations, transport.operations.get());
+
+ int expectedRequests = expectedOperations / maxOperations + ((expectedOperations % maxOperations == 0) ? 0 : 1);
+
+ assertEquals(expectedRequests, ingester.requestCount());
+ assertEquals(expectedRequests, listener.requests.get());
+ assertEquals(expectedRequests, transport.requestsStarted.get());
+ }
+
+ // Note: multiThreadStressTest has been moved to AbstractBulkIngesterIT
+ // See: java-client/src/test/java11/org/opensearch/client/opensearch/integTest/AbstractBulkIngesterIT.java
+
+ @Test
+ public void sizeLimitTest() throws Exception {
+ TestTransport transport = new TestTransport();
+
+ long operationSize = IngesterOperation.of(new RetryableBulkOperation<>(operation, null, null), transport.jsonpMapper()).size();
+
+ BulkIngester> ingester = BulkIngester.of(
+ b -> b.client(new OpenSearchAsyncClient(transport))
+ // Set size limit just above operation's size, leading to 2 operations per request
+ .maxSize(operationSize + 1)
+ );
+
+ for (int i = 0; i < 10; i++) {
+ ingester.add(operation);
+ }
+
+ ingester.close();
+ transport.close();
+
+ assertEquals(10, ingester.operationsCount());
+ assertEquals(5, ingester.requestCount());
+ }
+
+ @Test
+ public void periodicFlushTest() throws Exception {
+ TestTransport transport = new TestTransport();
+
+ BulkIngester> ingester = BulkIngester.of(
+ b -> b.client(new OpenSearchAsyncClient(transport))
+ // Flush every 50 ms
+ .flushInterval(50, TimeUnit.MILLISECONDS)
+ // Disable other flushing limits
+ .maxSize(-1)
+ .maxOperations(-1)
+ .maxConcurrentRequests(Integer.MAX_VALUE - 1)
+ );
+
+ // Add an operation every 100 ms to give time
+ // to the flushing timer to kick in.
+ for (int i = 0; i < 10; i++) {
+ ingester.add(operation);
+ Thread.sleep(100);
+ }
+
+ ingester.close();
+ transport.close();
+
+ // We should have one operation per request
+ assertEquals(10, ingester.operationsCount());
+ assertEquals(10, ingester.requestCount());
+ }
+
+ @Test
+ public void failingListener() throws Exception {
+ TestTransport transport = new TestTransport();
+ AtomicInteger failureCount = new AtomicInteger();
+ AtomicReference> lastContexts = new AtomicReference<>();
+ AtomicReference lastRequest = new AtomicReference<>();
+
+ BulkListener listener = new BulkListener() {
+ @Override
+ public void beforeBulk(long executionId, BulkRequest request, List contexts) {
+ // So that we can test that it's non-empty
+ lastContexts.set(contexts);
+ lastRequest.set(request);
+
+ if (executionId == 1) {
+ // Fail before the request is sent
+ failureCount.incrementAndGet();
+ throw new RuntimeException("Before bulk failure");
+ }
+ }
+
+ @Override
+ public void afterBulk(long executionId, BulkRequest request, List contexts, BulkResponse response) {
+ if (executionId == 2) {
+ // Fail after the request is sent
+ failureCount.incrementAndGet();
+ throw new RuntimeException("After bulk failure");
+ }
+ }
+
+ @Override
+ public void afterBulk(long executionId, BulkRequest request, List contexts, Throwable failure) {
+
+ }
+ };
+
+ BulkIngester ingester = BulkIngester.of(
+ b -> b.client(new OpenSearchAsyncClient(transport))
+ // Flush every 50 ms
+ .flushInterval(50, TimeUnit.MILLISECONDS)
+ // Disable other flushing limits
+ .maxSize(-1)
+ .maxOperations(-1)
+ .maxConcurrentRequests(Integer.MAX_VALUE - 1)
+ .listener(listener)
+ );
+
+ // Add an operation every 100 ms to give time
+ // to the flushing timer to kick in.
+ for (int i = 0; i < 10; i++) {
+ ingester.add(operation);
+ Thread.sleep(100);
+ }
+
+ ingester.close();
+ transport.close();
+
+ // We should have one operation per request
+ assertEquals(10, ingester.operationsCount());
+ assertEquals(10, ingester.requestCount());
+ // Transport hasn't seen the request where beforeBulk failed
+ assertEquals(9, transport.requestsStarted.get());
+
+ assertEquals(2, failureCount.get());
+
+ // Also test context list when no values were provided
+ assertTrue(lastRequest.get().operations().size() > 0);
+ assertEquals(lastRequest.get().operations().size(), lastContexts.get().size());
+ }
+
+ @Test
+ public void withContextValues() throws Exception {
+ TestTransport transport = new TestTransport();
+ List allRequests = Collections.synchronizedList(new ArrayList<>());
+ List> allContexts = Collections.synchronizedList(new ArrayList<>());
+
+ BulkListener listener = new BulkListener() {
+ @Override
+ public void beforeBulk(long executionId, BulkRequest request, List contexts) {
+ allRequests.add(request);
+ allContexts.add(contexts);
+ }
+
+ @Override
+ public void afterBulk(long executionId, BulkRequest request, List contexts, BulkResponse response) {}
+
+ @Override
+ public void afterBulk(long executionId, BulkRequest request, List contexts, Throwable failure) {}
+ };
+
+ BulkIngester ingester = BulkIngester.of(
+ b -> b.client(new OpenSearchAsyncClient(transport))
+ // Split every 10 operations
+ .maxOperations(10)
+ .listener(listener)
+ );
+
+ for (int i = 0; i < 10; i++) {
+ for (int j = 0; j < 10; j++) {
+ // Set a context only after 5, so that we test filling with nulls.
+ Integer context = j < 5 ? null : i * 10 + j;
+ ingester.add(operation, context);
+ }
+ }
+
+ ingester.close();
+ transport.close();
+
+ // We should have 10 operations per request
+ assertEquals(100, ingester.operationsCount());
+ assertEquals(10, ingester.requestCount());
+
+ for (int i = 0; i < 10; i++) {
+ List contexts = allContexts.get(i);
+ for (int j = 0; j < 10; j++) {
+ if (j < 5) {
+ assertNull(contexts.get(j));
+ } else {
+ assertEquals(Integer.valueOf(i * 10 + j), contexts.get(j));
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testGlobalSettings() throws Exception {
+ AtomicReference storedRequest = new AtomicReference<>();
+
+ TestTransport transport = new TestTransport();
+ CountingListener listener = new CountingListener() {
+ @Override
+ public void beforeBulk(long executionId, BulkRequest request, List contexts) {
+ super.beforeBulk(executionId, request, contexts);
+ storedRequest.set(request);
+ }
+ };
+
+ BulkIngester ingester = BulkIngester.of(
+ b -> b.client(new OpenSearchAsyncClient(transport)).listener(listener).globalSettings(s -> s.index("foo").routing("bar"))
+ );
+
+ ingester.add(operation);
+
+ ingester.close();
+ transport.close();
+
+ assertEquals(1, ingester.operationsCount());
+ assertEquals(1, ingester.requestCount());
+
+ assertEquals("foo", storedRequest.get().index());
+ assertEquals("bar", storedRequest.get().routing());
+ }
+
+ @Test
+ public void pipelineTest() {
+ String json = "{\"create\":{\"_id\":\"some_id\",\"_index\":\"some_idx\",\"pipeline\":\"pipe\",\"require_alias\":true}}";
+ JsonpMapper mapper = new JacksonJsonpMapper();
+
+ BulkOperation create = BulkOperation.of(
+ o -> o.create(c -> c.pipeline("pipe").requireAlias(true).index("some_idx").id("some_id").document("Some doc"))
+ );
+
+ String createStr = toJsonString(create, mapper);
+ assertEquals(json, createStr);
+
+ BulkOperation create1 = IngesterOperation.of(new RetryableBulkOperation<>(create, null, null), mapper)
+ .repeatableOperation()
+ .operation();
+
+ String create1Str = toJsonString(create1, mapper);
+ assertEquals(json, create1Str);
+ }
+
+ // Note: endToEndTest has been moved to AbstractBulkIngesterIT
+ // See: java-client/src/test/java11/org/opensearch/client/opensearch/integTest/AbstractBulkIngesterIT.java
+
+ @Test
+ public void testConfigValidation() {
+
+ BulkIngester.Builder b = new BulkIngester.Builder<>();
+
+ try {
+ b.flushInterval(-1, TimeUnit.MILLISECONDS);
+ fail("Expected IllegalArgumentException");
+ } catch (IllegalArgumentException e) {
+ // Expected
+ }
+
+ try {
+ b.maxConcurrentRequests(0);
+ fail("Expected IllegalArgumentException");
+ } catch (IllegalArgumentException e) {
+ // Expected
+ }
+
+ try {
+ b.maxSize(-2);
+ fail("Expected IllegalArgumentException");
+ } catch (IllegalArgumentException e) {
+ // Expected
+ }
+
+ try {
+ b.maxOperations(-2);
+ fail("Expected IllegalArgumentException");
+ } catch (IllegalArgumentException e) {
+ // Expected
+ }
+
+ try {
+ b.maxSize(-1).maxOperations(-1).build();
+ fail("Expected IllegalStateException");
+ } catch (IllegalStateException e) {
+ // Expected
+ }
+ }
+
+ // -----------------------------------------------------------------------------------------------------------------
+
+ private static class CountingListener implements BulkListener {
+ public final AtomicInteger operations = new AtomicInteger();
+ public final AtomicInteger requests = new AtomicInteger();
+
+ @Override
+ public void beforeBulk(long executionId, BulkRequest request, List contexts) {
+
+ }
+
+ @Override
+ public void afterBulk(long executionId, BulkRequest request, List contexts, BulkResponse response) {
+ operations.addAndGet(request.operations().size());
+ requests.incrementAndGet();
+ }
+
+ @Override
+ public void afterBulk(long executionId, BulkRequest request, List contexts, Throwable failure) {
+ failure.printStackTrace();
+ operations.addAndGet(request.operations().size());
+ requests.incrementAndGet();
+ }
+ }
+
+ private static class TestTransport implements OpenSearchTransport {
+ public final AtomicInteger requestsStarted = new AtomicInteger();
+ public final AtomicInteger requestsCompleted = new AtomicInteger();
+ public final AtomicInteger operations = new AtomicInteger();
+
+ private final ExecutorService executor = Executors.newCachedThreadPool();
+
+ @Override
+ public ResponseT performRequest(
+ RequestT request,
+ Endpoint endpoint,
+ @Nullable TransportOptions options
+ ) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public CompletableFuture performRequestAsync(
+ RequestT request,
+ Endpoint endpoint,
+ @Nullable TransportOptions options
+ ) {
+
+ BulkRequest bulk = (BulkRequest) request;
+ requestsStarted.incrementAndGet();
+ operations.addAndGet(bulk.operations().size());
+
+ if (bulk.operations().size() == 0) {
+ System.out.println("No operations!");
+ }
+
+ List items = new ArrayList<>();
+ for (int i = 0; i < bulk.operations().size(); i++) {
+ items.add(successItem);
+ }
+
+ CompletableFuture response = new CompletableFuture<>();
+ executor.submit(() -> {
+ requestsCompleted.incrementAndGet();
+ response.complete(BulkResponse.of(r -> r.errors(false).items(items).took(3)));
+ });
+
+ @SuppressWarnings("unchecked")
+ CompletableFuture result = (CompletableFuture) response;
+ return result;
+ }
+
+ @Override
+ public JsonpMapper jsonpMapper() {
+ return new JacksonJsonpMapper();
+ }
+
+ @Override
+ public TransportOptions options() {
+ return null;
+ }
+
+ @Override
+ public void close() throws IOException {
+ executor.shutdown();
+ try {
+ executor.awaitTermination(1, TimeUnit.SECONDS);
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+
+ /**
+ * Serializes an object to a JSON string.
+ *
+ * @param value the object to serialize
+ * @param mapper the JSON mapper to use for serialization
+ * @return the JSON string representation
+ */
+ public static String toJsonString(Object value, JsonpMapper mapper) {
+ java.io.StringWriter writer = new java.io.StringWriter();
+ JsonGenerator generator = mapper.jsonProvider().createGenerator(writer);
+ mapper.serialize(value, generator);
+ generator.close();
+ return writer.toString();
+ }
+
+ private boolean isGithubBuild() {
+ return Optional.ofNullable(System.getenv("GITHUB_JOB")).isPresent();
+ }
+}
diff --git a/java-client/src/test/java11/org/opensearch/client/opensearch/integTest/AbstractBulkIngesterIT.java b/java-client/src/test/java11/org/opensearch/client/opensearch/integTest/AbstractBulkIngesterIT.java
new file mode 100644
index 0000000000..2bd70aa307
--- /dev/null
+++ b/java-client/src/test/java11/org/opensearch/client/opensearch/integTest/AbstractBulkIngesterIT.java
@@ -0,0 +1,139 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.client.opensearch.integTest;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import org.junit.Test;
+import org.opensearch.client.opensearch._helpers.bulk.BulkIngester;
+import org.opensearch.client.opensearch.indices.IndicesStatsResponse;
+
+public abstract class AbstractBulkIngesterIT extends OpenSearchJavaClientTestCase {
+
+ static class AppData {
+ private int intValue;
+ private String msg;
+
+ public int getIntValue() {
+ return intValue;
+ }
+
+ public void setIntValue(int intValue) {
+ this.intValue = intValue;
+ }
+
+ public String getMsg() {
+ return msg;
+ }
+
+ public void setMsg(String msg) {
+ this.msg = msg;
+ }
+ }
+
+ @Test
+ public void testEndToEnd() throws Exception {
+ String index = "bulk-ingester-test";
+
+ BulkIngester> ingester = BulkIngester.of(b -> b.client(javaClient()).globalSettings(s -> s.index(index)));
+
+ AppData appData = new AppData();
+ appData.setIntValue(42);
+ appData.setMsg("Some message");
+
+ ingester.add(_1 -> _1.create(_2 -> _2.id("abc").document(appData)));
+
+ ingester.add(_1 -> _1.create(_2 -> _2.id("def").document(appData)));
+
+ ingester.add(_1 -> _1.update(_2 -> _2.id("gh").docAsUpsert(true).document(appData)));
+
+ // Closing waits until all pending requests are completed
+ ingester.close();
+
+ // Refresh to make documents searchable
+ javaClient().indices().refresh(r -> r.index(index));
+
+ // Verify documents were indexed
+ for (String id : Arrays.asList("abc", "def", "gh")) {
+ assertEquals(42, javaClient().get(b -> b.index(index).id(id), AppData.class).source().getIntValue());
+ }
+
+ // Clean up
+ javaClient().indices().delete(d -> d.index(index));
+ }
+
+ @Test
+ public void testMultiThreadStress() throws InterruptedException, IOException {
+
+ String index = "bulk-ingester-stress-test";
+
+ // DISCLAIMER: this configuration is highly inefficient and only used here to showcase an extreme
+ // situation where the number of adding threads greatly exceeds the number of concurrent requests
+ // handled by the ingester. It's strongly recommended to always tweak maxConcurrentRequests accordingly.
+ BulkIngester> ingester = BulkIngester.of(
+ b -> b.client(javaClient()).globalSettings(s -> s.index(index)).flushInterval(5, TimeUnit.SECONDS)
+ );
+
+ AppData appData = new AppData();
+ appData.setIntValue(42);
+ appData.setMsg("Some message");
+
+ ExecutorService executor = Executors.newFixedThreadPool(50);
+
+ for (int i = 0; i < 100000; i++) {
+ int ii = i;
+ Runnable thread = () -> {
+ int finalI = ii;
+ ingester.add(_1 -> _1.create(_2 -> _2.id(String.valueOf(finalI)).document(appData)));
+ };
+ executor.submit(thread);
+ }
+
+ executor.shutdown();
+ executor.awaitTermination(30, TimeUnit.SECONDS);
+ ingester.close();
+
+ // Refresh to make documents searchable
+ javaClient().indices().refresh(r -> r.index(index));
+
+ IndicesStatsResponse indexStats = javaClient().indices().stats(g -> g.index(index));
+
+ assertEquals(100000, indexStats.indices().get(index).primaries().docs().count());
+
+ // Clean up
+ javaClient().indices().delete(d -> d.index(index));
+ }
+}
diff --git a/java-client/src/test/java11/org/opensearch/client/opensearch/integTest/httpclient5/BulkIngesterIT.java b/java-client/src/test/java11/org/opensearch/client/opensearch/integTest/httpclient5/BulkIngesterIT.java
new file mode 100644
index 0000000000..c169069a04
--- /dev/null
+++ b/java-client/src/test/java11/org/opensearch/client/opensearch/integTest/httpclient5/BulkIngesterIT.java
@@ -0,0 +1,13 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.client.opensearch.integTest.httpclient5;
+
+import org.opensearch.client.opensearch.integTest.AbstractBulkIngesterIT;
+
+public class BulkIngesterIT extends AbstractBulkIngesterIT implements HttpClient5TransportSupport {}
diff --git a/samples/src/main/java/org/opensearch/client/samples/BulkIngesterBasics.java b/samples/src/main/java/org/opensearch/client/samples/BulkIngesterBasics.java
new file mode 100644
index 0000000000..5eec1f7b30
--- /dev/null
+++ b/samples/src/main/java/org/opensearch/client/samples/BulkIngesterBasics.java
@@ -0,0 +1,108 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.client.samples;
+
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.opensearch.client.opensearch._helpers.bulk.BulkIngester;
+import org.opensearch.client.opensearch._types.mapping.IntegerNumberProperty;
+import org.opensearch.client.opensearch._types.mapping.Property;
+import org.opensearch.client.opensearch._types.mapping.TypeMapping;
+import org.opensearch.client.opensearch._types.query_dsl.Query;
+import org.opensearch.client.opensearch.core.SearchRequest;
+import org.opensearch.client.opensearch.core.SearchResponse;
+import org.opensearch.client.opensearch.indices.CreateIndexRequest;
+import org.opensearch.client.opensearch.indices.DeleteIndexRequest;
+import org.opensearch.client.opensearch.indices.IndexSettings;
+import org.opensearch.client.samples.util.IndexData;
+
+/**
+ * Run with: {@code ./gradlew :samples:run -Dsamples.mainClass=BulkIngesterBasics}
+ */
+public class BulkIngesterBasics {
+ private static final Logger LOGGER = LogManager.getLogger(BulkIngesterBasics.class);
+
+ public static void main(String[] args) {
+ try {
+ var client = SampleClient.create();
+
+ var version = client.info().version();
+ LOGGER.info("Server: {}@{}", version.distribution(), version.number());
+
+ final var indexName = "my-index";
+
+ if (!client.indices().exists(r -> r.index(indexName)).value()) {
+ LOGGER.info("Creating index {}", indexName);
+ IndexSettings settings = new IndexSettings.Builder().numberOfShards(2).numberOfReplicas(1).build();
+ TypeMapping mapping = new TypeMapping.Builder().properties(
+ "age",
+ new Property.Builder().integer(new IntegerNumberProperty.Builder().build()).build()
+ ).build();
+ CreateIndexRequest createIndexRequest = new CreateIndexRequest.Builder().index(indexName)
+ .settings(settings)
+ .mappings(mapping)
+ .build();
+ client.indices().create(createIndexRequest);
+ }
+
+ LOGGER.info("Bulk indexing documents using BulkIngester");
+
+ // Create BulkIngester with custom settings
+ BulkIngester ingester = BulkIngester.of(b -> b.client(client).maxOperations(100).flushInterval(1, TimeUnit.SECONDS));
+
+ // Add index operations - they are automatically buffered and flushed
+ IndexData doc1 = new IndexData("Document 1", "The text of document 1");
+ ingester.add(op -> op.index(i -> i.index(indexName).id("id1").document(doc1)));
+
+ IndexData doc2 = new IndexData("Document 2", "The text of document 2");
+ ingester.add(op -> op.index(i -> i.index(indexName).id("id2").document(doc2)));
+
+ IndexData doc3 = new IndexData("Document 3", "The text of document 3");
+ ingester.add(op -> op.index(i -> i.index(indexName).id("id3").document(doc3)));
+
+ // Close the ingester to flush remaining operations and wait for completion
+ ingester.close();
+ LOGGER.info("Bulk indexing completed");
+
+ // Refresh index to make documents searchable
+ client.indices().refresh(r -> r.index(indexName));
+
+ Query query = Query.of(qb -> qb.match(mb -> mb.field("title").query(fv -> fv.stringValue("Document"))));
+ final SearchRequest.Builder searchReq = new SearchRequest.Builder().allowPartialSearchResults(false)
+ .index(List.of(indexName))
+ .size(10)
+ .source(sc -> sc.fetch(false))
+ .ignoreThrottled(false)
+ .query(query);
+
+ SearchResponse searchResponse = client.search(searchReq.build(), IndexData.class);
+ LOGGER.info("Found {} documents", searchResponse.hits().hits().size());
+
+ LOGGER.info("Bulk update document using BulkIngester");
+ doc1.setText("Updated Document");
+
+ // Create a new ingester for updates
+ BulkIngester updateIngester = BulkIngester.of(b -> b.client(client));
+
+ updateIngester.add(op -> op.update(u -> u.index(indexName).id("id1").document(doc1)));
+
+ // Close to flush and wait for completion
+ updateIngester.close();
+ LOGGER.info("Bulk update completed");
+
+ LOGGER.info("Deleting index {}", indexName);
+ DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest.Builder().index(indexName).build();
+ client.indices().delete(deleteIndexRequest);
+ } catch (Exception e) {
+ LOGGER.error("Unexpected exception", e);
+ }
+ }
+}
From 93770141f7ccd68801734af359decf297e55f6e2 Mon Sep 17 00:00:00 2001
From: "opensearch-trigger-bot[bot]"
<98922864+opensearch-trigger-bot[bot]@users.noreply.github.com>
Date: Tue, 16 Dec 2025 19:54:05 -0500
Subject: [PATCH 2/2] Re-generated client code using latest OpenSearch API
specification (#1829)
* Re-generate client code using latest OpenSearch API specification (2025-12-16)
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
* Fixing codegen compilation issues
Signed-off-by: Andriy Redko
---------
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Signed-off-by: Andriy Redko
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: Andriy Redko
---
.../opensearch/OpenSearchAsyncClientBase.java | 5 +
.../opensearch/OpenSearchClientBase.java | 5 +
.../opensearch/_types/DerivedField.java | 409 +++++++++++
.../client/opensearch/_types/ErrorCause.java | 65 ++
.../client/opensearch/_types/PhaseTook.java | 72 +-
.../opensearch/_types/ShardFailure.java | 28 +
.../_types/aggregations/Aggregation.java | 126 ----
.../aggregations/BucketAggregationBase.java | 171 ++++-
.../aggregations/CardinalityAggregation.java | 36 +-
.../CardinalityExecutionMode.java | 4 +-
.../_types/aggregations/TermsAggregation.java | 105 +--
.../opensearch/_types/mapping/Property.java | 28 +
.../_types/mapping/PropertyBuilders.java | 7 +
.../mapping/SemanticDenseEmbeddingConfig.java | 299 ++++++++
.../_types/mapping/SemanticProperty.java | 485 +++++++++++++
.../SemanticSparseEncodingConfig.java} | 116 +--
.../_types/query_dsl/DecayPlacement.java | 24 +-
.../_types/query_dsl/FieldAndFormat.java | 36 +-
.../_types/query_dsl/GeoDistanceQuery.java | 40 ++
.../_types/query_dsl/HybridQuery.java | 44 +-
.../_types/query_dsl/PinnedQuery.java | 310 --------
.../_types/query_dsl/PinnedQueryVariant.java | 47 --
.../opensearch/_types/query_dsl/Query.java | 28 -
.../_types/query_dsl/QueryBuilders.java | 7 -
.../_types/query_dsl/RangeQuery.java | 66 ++
.../_types/query_dsl/TermsQuery.java | 94 ++-
.../client/opensearch/core/SearchRequest.java | 141 +++-
.../opensearch/core/explain/Explanation.java | 18 +-
.../opensearch/core/search/Highlight.java | 37 +-
.../core/search/HighlightField.java | 49 +-
.../core/search/HighlighterTagsSchema.java | 2 +
.../client/opensearch/core/search/Hit.java | 55 ++
.../opensearch/core/search/HitsMetadata.java | 65 +-
.../opensearch/core/search/InnerHits.java | 28 +-
.../nodes/stats/RepositoryStatsSnapshot.java | 644 +++++++++++++++++
.../client/opensearch/nodes/stats/Stats.java | 27 +-
.../opensearch/ubi/InitializeRequest.java | 157 ++++
.../ubi/OpenSearchUbiAsyncClient.java | 94 +++
.../opensearch/ubi/OpenSearchUbiClient.java | 93 +++
.../_types/mapping/DynamicTemplateTest.java | 27 +
.../_types/query_dsl/DecayFunctionTest.java | 5 +-
.../_types/query_dsl/DecayPlacementTest.java | 3 +-
.../query_dsl/FunctionScoreQueryTest.java | 5 +-
.../_types/query_dsl/PinnedDocTest.java | 22 -
.../_types/query_dsl/PinnedQueryTest.java | 32 -
.../json/PlainJsonSerializableTest.java | 3 +-
.../integTest/AbstractHighlightIT.java | 12 +-
.../AbstractMultiSearchRequestIT.java | 3 +-
.../integTest/AbstractRequestIT.java | 34 +-
java-codegen/build.gradle.kts | 2 +-
java-codegen/opensearch-openapi.yaml | 675 ++++++++++++------
.../transformer/overrides/Overrides.java | 8 +
52 files changed, 3684 insertions(+), 1214 deletions(-)
create mode 100644 java-client/src/generated/java/org/opensearch/client/opensearch/_types/DerivedField.java
create mode 100644 java-client/src/generated/java/org/opensearch/client/opensearch/_types/mapping/SemanticDenseEmbeddingConfig.java
create mode 100644 java-client/src/generated/java/org/opensearch/client/opensearch/_types/mapping/SemanticProperty.java
rename java-client/src/generated/java/org/opensearch/client/opensearch/_types/{query_dsl/PinnedDoc.java => mapping/SemanticSparseEncodingConfig.java} (55%)
delete mode 100644 java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/PinnedQuery.java
delete mode 100644 java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/PinnedQueryVariant.java
create mode 100644 java-client/src/generated/java/org/opensearch/client/opensearch/nodes/stats/RepositoryStatsSnapshot.java
create mode 100644 java-client/src/generated/java/org/opensearch/client/opensearch/ubi/InitializeRequest.java
create mode 100644 java-client/src/generated/java/org/opensearch/client/opensearch/ubi/OpenSearchUbiAsyncClient.java
create mode 100644 java-client/src/generated/java/org/opensearch/client/opensearch/ubi/OpenSearchUbiClient.java
create mode 100644 java-client/src/test/java/org/opensearch/client/opensearch/_types/mapping/DynamicTemplateTest.java
delete mode 100644 java-client/src/test/java/org/opensearch/client/opensearch/_types/query_dsl/PinnedDocTest.java
delete mode 100644 java-client/src/test/java/org/opensearch/client/opensearch/_types/query_dsl/PinnedQueryTest.java
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/OpenSearchAsyncClientBase.java b/java-client/src/generated/java/org/opensearch/client/opensearch/OpenSearchAsyncClientBase.java
index 009dc6cf63..29f6e64550 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/OpenSearchAsyncClientBase.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/OpenSearchAsyncClientBase.java
@@ -126,6 +126,7 @@
import org.opensearch.client.opensearch.security.OpenSearchSecurityAsyncClient;
import org.opensearch.client.opensearch.snapshot.OpenSearchSnapshotAsyncClient;
import org.opensearch.client.opensearch.tasks.OpenSearchTasksAsyncClient;
+import org.opensearch.client.opensearch.ubi.OpenSearchUbiAsyncClient;
import org.opensearch.client.transport.JsonEndpoint;
import org.opensearch.client.transport.OpenSearchTransport;
import org.opensearch.client.transport.TransportOptions;
@@ -212,6 +213,10 @@ public OpenSearchTasksAsyncClient tasks() {
return new OpenSearchTasksAsyncClient(this.transport, this.transportOptions);
}
+ public OpenSearchUbiAsyncClient ubi() {
+ return new OpenSearchUbiAsyncClient(this.transport, this.transportOptions);
+ }
+
// ----- Endpoint: clear_scroll
/**
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/OpenSearchClientBase.java b/java-client/src/generated/java/org/opensearch/client/opensearch/OpenSearchClientBase.java
index 46557ed513..e45e1661dd 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/OpenSearchClientBase.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/OpenSearchClientBase.java
@@ -125,6 +125,7 @@
import org.opensearch.client.opensearch.security.OpenSearchSecurityClient;
import org.opensearch.client.opensearch.snapshot.OpenSearchSnapshotClient;
import org.opensearch.client.opensearch.tasks.OpenSearchTasksClient;
+import org.opensearch.client.opensearch.ubi.OpenSearchUbiClient;
import org.opensearch.client.transport.JsonEndpoint;
import org.opensearch.client.transport.OpenSearchTransport;
import org.opensearch.client.transport.TransportOptions;
@@ -211,6 +212,10 @@ public OpenSearchTasksClient tasks() {
return new OpenSearchTasksClient(this.transport, this.transportOptions);
}
+ public OpenSearchUbiClient ubi() {
+ return new OpenSearchUbiClient(this.transport, this.transportOptions);
+ }
+
// ----- Endpoint: clear_scroll
/**
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/DerivedField.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/DerivedField.java
new file mode 100644
index 0000000000..eb2e081559
--- /dev/null
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/DerivedField.java
@@ -0,0 +1,409 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+//----------------------------------------------------
+// THIS CODE IS GENERATED. MANUAL EDITS WILL BE LOST.
+//----------------------------------------------------
+
+package org.opensearch.client.opensearch._types;
+
+import jakarta.json.stream.JsonGenerator;
+import java.util.Map;
+import java.util.Objects;
+import java.util.function.Function;
+import javax.annotation.Generated;
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+import org.opensearch.client.json.JsonData;
+import org.opensearch.client.json.JsonpDeserializable;
+import org.opensearch.client.json.JsonpDeserializer;
+import org.opensearch.client.json.JsonpMapper;
+import org.opensearch.client.json.ObjectBuilderDeserializer;
+import org.opensearch.client.json.ObjectDeserializer;
+import org.opensearch.client.json.PlainJsonSerializable;
+import org.opensearch.client.util.ApiTypeHelper;
+import org.opensearch.client.util.CopyableBuilder;
+import org.opensearch.client.util.ObjectBuilder;
+import org.opensearch.client.util.ObjectBuilderBase;
+import org.opensearch.client.util.ToCopyableBuilder;
+
+// typedef: _types.DerivedField
+
+@JsonpDeserializable
+@Generated("org.opensearch.client.codegen.CodeGenerator")
+public class DerivedField implements PlainJsonSerializable, ToCopyableBuilder {
+
+ @Nullable
+ private final String format;
+
+ @Nullable
+ private final Boolean ignoreMalformed;
+
+ @Nonnull
+ private final String name;
+
+ @Nullable
+ private final String prefilterField;
+
+ @Nonnull
+ private final Map properties;
+
+ @Nonnull
+ private final Script script;
+
+ @Nonnull
+ private final String type;
+
+ // ---------------------------------------------------------------------------------------------
+
+ private DerivedField(Builder builder) {
+ this.format = builder.format;
+ this.ignoreMalformed = builder.ignoreMalformed;
+ this.name = ApiTypeHelper.requireNonNull(builder.name, this, "name");
+ this.prefilterField = builder.prefilterField;
+ this.properties = ApiTypeHelper.unmodifiable(builder.properties);
+ this.script = ApiTypeHelper.requireNonNull(builder.script, this, "script");
+ this.type = ApiTypeHelper.requireNonNull(builder.type, this, "type");
+ }
+
+ public static DerivedField of(Function> fn) {
+ return fn.apply(new Builder()).build();
+ }
+
+ /**
+ * API name: {@code format}
+ */
+ @Nullable
+ public final String format() {
+ return this.format;
+ }
+
+ /**
+ * API name: {@code ignore_malformed}
+ */
+ @Nullable
+ public final Boolean ignoreMalformed() {
+ return this.ignoreMalformed;
+ }
+
+ /**
+ * Required - API name: {@code name}
+ */
+ @Nonnull
+ public final String name() {
+ return this.name;
+ }
+
+ /**
+ * API name: {@code prefilter_field}
+ */
+ @Nullable
+ public final String prefilterField() {
+ return this.prefilterField;
+ }
+
+ /**
+ * API name: {@code properties}
+ */
+ @Nonnull
+ public final Map properties() {
+ return this.properties;
+ }
+
+ /**
+ * Required - API name: {@code script}
+ */
+ @Nonnull
+ public final Script script() {
+ return this.script;
+ }
+
+ /**
+ * Required - API name: {@code type}
+ */
+ @Nonnull
+ public final String type() {
+ return this.type;
+ }
+
+ /**
+ * Serialize this object to JSON.
+ */
+ @Override
+ public void serialize(JsonGenerator generator, JsonpMapper mapper) {
+ generator.writeStartObject();
+ serializeInternal(generator, mapper);
+ generator.writeEnd();
+ }
+
+ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
+ if (this.format != null) {
+ generator.writeKey("format");
+ generator.write(this.format);
+ }
+
+ if (this.ignoreMalformed != null) {
+ generator.writeKey("ignore_malformed");
+ generator.write(this.ignoreMalformed);
+ }
+
+ generator.writeKey("name");
+ generator.write(this.name);
+
+ if (this.prefilterField != null) {
+ generator.writeKey("prefilter_field");
+ generator.write(this.prefilterField);
+ }
+
+ if (ApiTypeHelper.isDefined(this.properties)) {
+ generator.writeKey("properties");
+ generator.writeStartObject();
+ for (Map.Entry item0 : this.properties.entrySet()) {
+ generator.writeKey(item0.getKey());
+ item0.getValue().serialize(generator, mapper);
+ }
+ generator.writeEnd();
+ }
+
+ generator.writeKey("script");
+ this.script.serialize(generator, mapper);
+
+ generator.writeKey("type");
+ generator.write(this.type);
+ }
+
+ // ---------------------------------------------------------------------------------------------
+
+ @Override
+ @Nonnull
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ @Nonnull
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ /**
+ * Builder for {@link DerivedField}.
+ */
+ public static class Builder extends ObjectBuilderBase implements CopyableBuilder {
+ @Nullable
+ private String format;
+ @Nullable
+ private Boolean ignoreMalformed;
+ private String name;
+ @Nullable
+ private String prefilterField;
+ @Nullable
+ private Map properties;
+ private Script script;
+ private String type;
+
+ public Builder() {}
+
+ private Builder(DerivedField o) {
+ this.format = o.format;
+ this.ignoreMalformed = o.ignoreMalformed;
+ this.name = o.name;
+ this.prefilterField = o.prefilterField;
+ this.properties = _mapCopy(o.properties);
+ this.script = o.script;
+ this.type = o.type;
+ }
+
+ private Builder(Builder o) {
+ this.format = o.format;
+ this.ignoreMalformed = o.ignoreMalformed;
+ this.name = o.name;
+ this.prefilterField = o.prefilterField;
+ this.properties = _mapCopy(o.properties);
+ this.script = o.script;
+ this.type = o.type;
+ }
+
+ @Override
+ @Nonnull
+ public Builder copy() {
+ return new Builder(this);
+ }
+
+ /**
+ * API name: {@code format}
+ */
+ @Nonnull
+ public final Builder format(@Nullable String value) {
+ this.format = value;
+ return this;
+ }
+
+ /**
+ * API name: {@code ignore_malformed}
+ */
+ @Nonnull
+ public final Builder ignoreMalformed(@Nullable Boolean value) {
+ this.ignoreMalformed = value;
+ return this;
+ }
+
+ /**
+ * Required - API name: {@code name}
+ */
+ @Nonnull
+ public final Builder name(String value) {
+ this.name = value;
+ return this;
+ }
+
+ /**
+ * API name: {@code prefilter_field}
+ */
+ @Nonnull
+ public final Builder prefilterField(@Nullable String value) {
+ this.prefilterField = value;
+ return this;
+ }
+
+ /**
+ * API name: {@code properties}
+ *
+ *
+ * Adds all elements of map to properties.
+ *
+ */
+ @Nonnull
+ public final Builder properties(Map map) {
+ this.properties = _mapPutAll(this.properties, map);
+ return this;
+ }
+
+ /**
+ * API name: {@code properties}
+ *
+ *
+ * Adds an entry to properties.
+ *
+ */
+ @Nonnull
+ public final Builder properties(String key, JsonData value) {
+ this.properties = _mapPut(this.properties, key, value);
+ return this;
+ }
+
+ /**
+ * Required - API name: {@code script}
+ */
+ @Nonnull
+ public final Builder script(Script value) {
+ this.script = value;
+ return this;
+ }
+
+ /**
+ * Required - API name: {@code script}
+ */
+ @Nonnull
+ public final Builder script(Function> fn) {
+ return script(fn.apply(new Script.Builder()).build());
+ }
+
+ /**
+ * Required - API name: {@code type}
+ */
+ @Nonnull
+ public final Builder type(String value) {
+ this.type = value;
+ return this;
+ }
+
+ /**
+ * Builds a {@link DerivedField}.
+ *
+ * @throws NullPointerException if some of the required fields are null.
+ */
+ @Override
+ @Nonnull
+ public DerivedField build() {
+ _checkSingleUse();
+
+ return new DerivedField(this);
+ }
+ }
+
+ // ---------------------------------------------------------------------------------------------
+
+ /**
+ * Json deserializer for {@link DerivedField}
+ */
+ public static final JsonpDeserializer _DESERIALIZER = ObjectBuilderDeserializer.lazy(
+ Builder::new,
+ DerivedField::setupDerivedFieldDeserializer
+ );
+
+ protected static void setupDerivedFieldDeserializer(ObjectDeserializer op) {
+ op.add(Builder::format, JsonpDeserializer.stringDeserializer(), "format");
+ op.add(Builder::ignoreMalformed, JsonpDeserializer.booleanDeserializer(), "ignore_malformed");
+ op.add(Builder::name, JsonpDeserializer.stringDeserializer(), "name");
+ op.add(Builder::prefilterField, JsonpDeserializer.stringDeserializer(), "prefilter_field");
+ op.add(Builder::properties, JsonpDeserializer.stringMapDeserializer(JsonData._DESERIALIZER), "properties");
+ op.add(Builder::script, Script._DESERIALIZER, "script");
+ op.add(Builder::type, JsonpDeserializer.stringDeserializer(), "type");
+ }
+
+ @Override
+ public int hashCode() {
+ int result = 17;
+ result = 31 * result + Objects.hashCode(this.format);
+ result = 31 * result + Objects.hashCode(this.ignoreMalformed);
+ result = 31 * result + this.name.hashCode();
+ result = 31 * result + Objects.hashCode(this.prefilterField);
+ result = 31 * result + Objects.hashCode(this.properties);
+ result = 31 * result + this.script.hashCode();
+ result = 31 * result + this.type.hashCode();
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || this.getClass() != o.getClass()) return false;
+ DerivedField other = (DerivedField) o;
+ return Objects.equals(this.format, other.format)
+ && Objects.equals(this.ignoreMalformed, other.ignoreMalformed)
+ && this.name.equals(other.name)
+ && Objects.equals(this.prefilterField, other.prefilterField)
+ && Objects.equals(this.properties, other.properties)
+ && this.script.equals(other.script)
+ && this.type.equals(other.type);
+ }
+}
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/ErrorCause.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/ErrorCause.java
index f7e967d882..68d84e9eab 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/ErrorCause.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/ErrorCause.java
@@ -67,6 +67,9 @@ public class ErrorCause implements PlainJsonSerializable, ToCopyableBuilder> header;
+
@Nonnull
private final Map metadata;
@@ -89,6 +92,7 @@ public class ErrorCause implements PlainJsonSerializable, ToCopyableBuilder> header() {
+ return this.header;
+ }
+
/**
* Any additional information about the error.
*/
@@ -186,6 +198,22 @@ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
this.causedBy.serialize(generator, mapper);
}
+ if (ApiTypeHelper.isDefined(this.header)) {
+ generator.writeKey("header");
+ generator.writeStartObject();
+ for (Map.Entry> item0 : this.header.entrySet()) {
+ generator.writeKey(item0.getKey());
+ generator.writeStartArray();
+ if (item0.getValue() != null) {
+ for (String item1 : item0.getValue()) {
+ generator.write(item1);
+ }
+ }
+ generator.writeEnd();
+ }
+ generator.writeEnd();
+ }
+
if (this.reason != null) {
generator.writeKey("reason");
generator.write(this.reason);
@@ -238,6 +266,8 @@ public static class Builder extends ObjectBuilderBase implements CopyableBuilder
@Nullable
private ErrorCause causedBy;
@Nullable
+ private Map> header;
+ @Nullable
private Map metadata;
@Nullable
private String reason;
@@ -253,6 +283,7 @@ public Builder() {}
private Builder(ErrorCause o) {
this.causedBy = o.causedBy;
+ this.header = _mapCopy(o.header);
this.metadata = _mapCopy(o.metadata);
this.reason = o.reason;
this.rootCause = _listCopy(o.rootCause);
@@ -263,6 +294,7 @@ private Builder(ErrorCause o) {
private Builder(Builder o) {
this.causedBy = o.causedBy;
+ this.header = _mapCopy(o.header);
this.metadata = _mapCopy(o.metadata);
this.reason = o.reason;
this.rootCause = _listCopy(o.rootCause);
@@ -294,6 +326,32 @@ public final Builder causedBy(Function
+ * Adds all elements of map to header.
+ *
+ */
+ @Nonnull
+ public final Builder header(Map> map) {
+ this.header = _mapPutAll(this.header, map);
+ return this;
+ }
+
+ /**
+ * API name: {@code header}
+ *
+ *
+ * Adds an entry to header.
+ *
+ */
+ @Nonnull
+ public final Builder header(String key, List value) {
+ this.header = _mapPut(this.header, key, value);
+ return this;
+ }
+
/**
* Any additional information about the error.
*
@@ -458,6 +516,11 @@ public ErrorCause build() {
protected static void setupErrorCauseDeserializer(ObjectDeserializer op) {
op.add(Builder::causedBy, ErrorCause._DESERIALIZER, "caused_by");
+ op.add(
+ Builder::header,
+ JsonpDeserializer.stringMapDeserializer(JsonpDeserializer.arrayDeserializer(JsonpDeserializer.stringDeserializer())),
+ "header"
+ );
op.add(Builder::reason, JsonpDeserializer.stringDeserializer(), "reason");
op.add(Builder::rootCause, JsonpDeserializer.arrayDeserializer(ErrorCause._DESERIALIZER), "root_cause");
op.add(Builder::stackTrace, JsonpDeserializer.stringDeserializer(), "stack_trace");
@@ -475,6 +538,7 @@ protected static void setupErrorCauseDeserializer(ObjectDeserializer {
- private final int canMatch;
+ private final long canMatch;
- private final int dfsPreQuery;
+ private final long dfsPreQuery;
- private final int dfsQuery;
+ private final long dfsQuery;
- private final int expand;
+ private final long expand;
- private final int fetch;
+ private final long fetch;
- private final int query;
+ private final long query;
// ---------------------------------------------------------------------------------------------
@@ -94,7 +94,7 @@ public static PhaseTook of(Function>
* API name: {@code can_match}
*
*/
- public final int canMatch() {
+ public final long canMatch() {
return this.canMatch;
}
@@ -104,7 +104,7 @@ public final int canMatch() {
* API name: {@code dfs_pre_query}
*
*/
- public final int dfsPreQuery() {
+ public final long dfsPreQuery() {
return this.dfsPreQuery;
}
@@ -114,7 +114,7 @@ public final int dfsPreQuery() {
* API name: {@code dfs_query}
*
*/
- public final int dfsQuery() {
+ public final long dfsQuery() {
return this.dfsQuery;
}
@@ -124,7 +124,7 @@ public final int dfsQuery() {
* API name: {@code expand}
*
*/
- public final int expand() {
+ public final long expand() {
return this.expand;
}
@@ -134,7 +134,7 @@ public final int expand() {
* API name: {@code fetch}
*
*/
- public final int fetch() {
+ public final long fetch() {
return this.fetch;
}
@@ -144,7 +144,7 @@ public final int fetch() {
* API name: {@code query}
*
*/
- public final int query() {
+ public final long query() {
return this.query;
}
@@ -195,12 +195,12 @@ public static Builder builder() {
* Builder for {@link PhaseTook}.
*/
public static class Builder extends ObjectBuilderBase implements CopyableBuilder {
- private Integer canMatch;
- private Integer dfsPreQuery;
- private Integer dfsQuery;
- private Integer expand;
- private Integer fetch;
- private Integer query;
+ private Long canMatch;
+ private Long dfsPreQuery;
+ private Long dfsQuery;
+ private Long expand;
+ private Long fetch;
+ private Long query;
public Builder() {}
@@ -235,7 +235,7 @@ public Builder copy() {
*
*/
@Nonnull
- public final Builder canMatch(int value) {
+ public final Builder canMatch(long value) {
this.canMatch = value;
return this;
}
@@ -247,7 +247,7 @@ public final Builder canMatch(int value) {
*
*/
@Nonnull
- public final Builder dfsPreQuery(int value) {
+ public final Builder dfsPreQuery(long value) {
this.dfsPreQuery = value;
return this;
}
@@ -259,7 +259,7 @@ public final Builder dfsPreQuery(int value) {
*
*/
@Nonnull
- public final Builder dfsQuery(int value) {
+ public final Builder dfsQuery(long value) {
this.dfsQuery = value;
return this;
}
@@ -271,7 +271,7 @@ public final Builder dfsQuery(int value) {
*
*/
@Nonnull
- public final Builder expand(int value) {
+ public final Builder expand(long value) {
this.expand = value;
return this;
}
@@ -283,7 +283,7 @@ public final Builder expand(int value) {
*
*/
@Nonnull
- public final Builder fetch(int value) {
+ public final Builder fetch(long value) {
this.fetch = value;
return this;
}
@@ -295,7 +295,7 @@ public final Builder fetch(int value) {
*
*/
@Nonnull
- public final Builder query(int value) {
+ public final Builder query(long value) {
this.query = value;
return this;
}
@@ -325,23 +325,23 @@ public PhaseTook build() {
);
protected static void setupPhaseTookDeserializer(ObjectDeserializer op) {
- op.add(Builder::canMatch, JsonpDeserializer.integerDeserializer(), "can_match");
- op.add(Builder::dfsPreQuery, JsonpDeserializer.integerDeserializer(), "dfs_pre_query");
- op.add(Builder::dfsQuery, JsonpDeserializer.integerDeserializer(), "dfs_query");
- op.add(Builder::expand, JsonpDeserializer.integerDeserializer(), "expand");
- op.add(Builder::fetch, JsonpDeserializer.integerDeserializer(), "fetch");
- op.add(Builder::query, JsonpDeserializer.integerDeserializer(), "query");
+ op.add(Builder::canMatch, JsonpDeserializer.longDeserializer(), "can_match");
+ op.add(Builder::dfsPreQuery, JsonpDeserializer.longDeserializer(), "dfs_pre_query");
+ op.add(Builder::dfsQuery, JsonpDeserializer.longDeserializer(), "dfs_query");
+ op.add(Builder::expand, JsonpDeserializer.longDeserializer(), "expand");
+ op.add(Builder::fetch, JsonpDeserializer.longDeserializer(), "fetch");
+ op.add(Builder::query, JsonpDeserializer.longDeserializer(), "query");
}
@Override
public int hashCode() {
int result = 17;
- result = 31 * result + Integer.hashCode(this.canMatch);
- result = 31 * result + Integer.hashCode(this.dfsPreQuery);
- result = 31 * result + Integer.hashCode(this.dfsQuery);
- result = 31 * result + Integer.hashCode(this.expand);
- result = 31 * result + Integer.hashCode(this.fetch);
- result = 31 * result + Integer.hashCode(this.query);
+ result = 31 * result + Long.hashCode(this.canMatch);
+ result = 31 * result + Long.hashCode(this.dfsPreQuery);
+ result = 31 * result + Long.hashCode(this.dfsQuery);
+ result = 31 * result + Long.hashCode(this.expand);
+ result = 31 * result + Long.hashCode(this.fetch);
+ result = 31 * result + Long.hashCode(this.query);
return result;
}
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/ShardFailure.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/ShardFailure.java
index c8dd0863ec..c46b364053 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/ShardFailure.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/ShardFailure.java
@@ -66,6 +66,8 @@ public class ShardFailure implements PlainJsonSerializable, ToCopyableBuilder op) {
op.add(Builder::index, JsonpDeserializer.stringDeserializer(), "index");
op.add(Builder::node, JsonpDeserializer.stringDeserializer(), "node");
+ op.add(Builder::primary, JsonpDeserializer.booleanDeserializer(), "primary");
op.add(Builder::reason, ErrorCause._DESERIALIZER, "reason");
op.add(Builder::shard, JsonpDeserializer.integerDeserializer(), "shard");
op.add(Builder::status, JsonpDeserializer.stringDeserializer(), "status");
@@ -300,6 +326,7 @@ public int hashCode() {
int result = 17;
result = 31 * result + Objects.hashCode(this.index);
result = 31 * result + Objects.hashCode(this.node);
+ result = 31 * result + Boolean.hashCode(this.primary);
result = 31 * result + this.reason.hashCode();
result = 31 * result + Integer.hashCode(this.shard);
result = 31 * result + Objects.hashCode(this.status);
@@ -313,6 +340,7 @@ public boolean equals(Object o) {
ShardFailure other = (ShardFailure) o;
return Objects.equals(this.index, other.index)
&& Objects.equals(this.node, other.node)
+ && this.primary == other.primary
&& this.reason.equals(other.reason)
&& this.shard == other.shard
&& Objects.equals(this.status, other.status);
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/aggregations/Aggregation.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/aggregations/Aggregation.java
index 661b984988..57994ec30d 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/aggregations/Aggregation.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/aggregations/Aggregation.java
@@ -159,21 +159,16 @@ public final AggregationVariant _get() {
return _value;
}
- @Nonnull
- private final Map aggregations;
-
@Nonnull
private final Map meta;
public Aggregation(AggregationVariant value) {
this._kind = ApiTypeHelper.requireNonNull(value._aggregationKind(), this, "");
this._value = ApiTypeHelper.requireNonNull(value, this, "");
- this.aggregations = null;
this.meta = null;
}
private Aggregation(Builder builder) {
- this.aggregations = ApiTypeHelper.unmodifiable(builder.aggregations);
this.meta = ApiTypeHelper.unmodifiable(builder.meta);
this._kind = ApiTypeHelper.requireNonNull(builder._kind, builder, "");
this._value = ApiTypeHelper.requireNonNull(builder._value, builder, "");
@@ -183,17 +178,6 @@ public static Aggregation of(Function
- * API name: {@code aggregations}
- *
- */
- @Nonnull
- public final Map aggregations() {
- return this.aggregations;
- }
-
/**
* API name: {@code meta}
*/
@@ -1245,16 +1229,6 @@ public WeightedAverageAggregation weightedAvg() {
@Override
public void serialize(JsonGenerator generator, JsonpMapper mapper) {
generator.writeStartObject();
- if (ApiTypeHelper.isDefined(this.aggregations)) {
- generator.writeKey("aggregations");
- generator.writeStartObject();
- for (Map.Entry item0 : this.aggregations.entrySet()) {
- generator.writeKey(item0.getKey());
- item0.getValue().serialize(generator, mapper);
- }
- generator.writeEnd();
- }
-
if (ApiTypeHelper.isDefined(this.meta)) {
generator.writeKey("meta");
generator.writeStartObject();
@@ -1285,66 +1259,16 @@ public static class Builder extends ObjectBuilderBase {
private Kind _kind;
private AggregationVariant _value;
@Nullable
- private Map aggregations;
- @Nullable
private Map meta;
public Builder() {}
private Builder(Aggregation o) {
- this.aggregations = _mapCopy(o.aggregations);
this.meta = _mapCopy(o.meta);
this._kind = o._kind;
this._value = o._value;
}
- /**
- * Sub-aggregations for this aggregation. Only applies to bucket aggregations.
- *
- * API name: {@code aggregations}
- *
- *
- *
- * Adds all elements of map to aggregations.
- *
- */
- @Nonnull
- public final Builder aggregations(Map map) {
- this.aggregations = _mapPutAll(this.aggregations, map);
- return this;
- }
-
- /**
- * Sub-aggregations for this aggregation. Only applies to bucket aggregations.
- *
- * API name: {@code aggregations}
- *
- *
- *
- * Adds an entry to aggregations.
- *
- */
- @Nonnull
- public final Builder aggregations(String key, Aggregation value) {
- this.aggregations = _mapPut(this.aggregations, key, value);
- return this;
- }
-
- /**
- * Sub-aggregations for this aggregation. Only applies to bucket aggregations.
- *
- * API name: {@code aggregations}
- *
- *
- *
- * Adds a value to aggregations using a builder lambda.
- *
- */
- @Nonnull
- public final Builder aggregations(String key, Function> fn) {
- return aggregations(key, fn.apply(new Aggregation.Builder()).build());
- }
-
/**
* API name: {@code meta}
*
@@ -2055,53 +1979,6 @@ protected Aggregation build() {
public class ContainerBuilder implements ObjectBuilder {
private ContainerBuilder() {}
- /**
- * Sub-aggregations for this aggregation. Only applies to bucket aggregations.
- *
- * API name: {@code aggregations}
- *
- *
- *
- * Adds all elements of map to aggregations.
- *
- */
- @Nonnull
- public final ContainerBuilder aggregations(Map map) {
- Builder.this.aggregations = _mapPutAll(Builder.this.aggregations, map);
- return this;
- }
-
- /**
- * Sub-aggregations for this aggregation. Only applies to bucket aggregations.
- *
- * API name: {@code aggregations}
- *
- *
- *
- * Adds an entry to aggregations.
- *
- */
- @Nonnull
- public final ContainerBuilder aggregations(String key, Aggregation value) {
- Builder.this.aggregations = _mapPut(Builder.this.aggregations, key, value);
- return this;
- }
-
- /**
- * Sub-aggregations for this aggregation. Only applies to bucket aggregations.
- *
- * API name: {@code aggregations}
- *
- *
- *
- * Adds a value to aggregations using a builder lambda.
- *
- */
- @Nonnull
- public final ContainerBuilder aggregations(String key, Function> fn) {
- return aggregations(key, fn.apply(new Aggregation.Builder()).build());
- }
-
/**
* API name: {@code meta}
*
@@ -2136,7 +2013,6 @@ public Aggregation build() {
}
protected static void setupAggregationDeserializer(ObjectDeserializer op) {
- op.add(Builder::aggregations, JsonpDeserializer.stringMapDeserializer(Aggregation._DESERIALIZER), "aggregations", "aggs");
op.add(Builder::meta, JsonpDeserializer.stringMapDeserializer(JsonData._DESERIALIZER), "meta");
op.add(Builder::adjacencyMatrix, AdjacencyMatrixAggregation._DESERIALIZER, "adjacency_matrix");
op.add(Builder::autoDateHistogram, AutoDateHistogramAggregation._DESERIALIZER, "auto_date_histogram");
@@ -2216,7 +2092,6 @@ public int hashCode() {
int result = 17;
result = 31 * result + Objects.hashCode(this._kind);
result = 31 * result + Objects.hashCode(this._value);
- result = 31 * result + Objects.hashCode(this.aggregations);
result = 31 * result + Objects.hashCode(this.meta);
return result;
}
@@ -2228,7 +2103,6 @@ public boolean equals(Object o) {
Aggregation other = (Aggregation) o;
return Objects.equals(this._kind, other._kind)
&& Objects.equals(this._value, other._value)
- && Objects.equals(this.aggregations, other.aggregations)
&& Objects.equals(this.meta, other.meta);
}
}
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/aggregations/BucketAggregationBase.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/aggregations/BucketAggregationBase.java
index 4ab6fd521f..c803a45540 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/aggregations/BucketAggregationBase.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/aggregations/BucketAggregationBase.java
@@ -36,35 +36,199 @@
package org.opensearch.client.opensearch._types.aggregations;
+import jakarta.json.stream.JsonGenerator;
+import java.util.Map;
+import java.util.Objects;
+import java.util.function.Function;
import javax.annotation.Generated;
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+import org.opensearch.client.json.JsonpDeserializer;
+import org.opensearch.client.json.JsonpMapper;
import org.opensearch.client.json.ObjectDeserializer;
+import org.opensearch.client.util.ApiTypeHelper;
+import org.opensearch.client.util.ObjectBuilder;
// typedef: _types.aggregations.BucketAggregationBase
@Generated("org.opensearch.client.codegen.CodeGenerator")
public abstract class BucketAggregationBase extends AggregationBase {
+ @Nonnull
+ private final Map aggregations;
+
+ @Nonnull
+ private final Map aggs;
+
// ---------------------------------------------------------------------------------------------
protected BucketAggregationBase(AbstractBuilder> builder) {
super(builder);
+ this.aggregations = ApiTypeHelper.unmodifiable(builder.aggregations);
+ this.aggs = ApiTypeHelper.unmodifiable(builder.aggs);
+ }
+
+ /**
+ * Sub-aggregations for this bucket aggregation
+ *
+ * API name: {@code aggregations}
+ *
+ */
+ @Nonnull
+ public final Map aggregations() {
+ return this.aggregations;
+ }
+
+ /**
+ * Sub-aggregations for this bucket aggregation
+ *
+ * API name: {@code aggs}
+ *
+ */
+ @Nonnull
+ public final Map aggs() {
+ return this.aggs;
+ }
+
+ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
+ super.serializeInternal(generator, mapper);
+ if (ApiTypeHelper.isDefined(this.aggregations)) {
+ generator.writeKey("aggregations");
+ generator.writeStartObject();
+ for (Map.Entry item0 : this.aggregations.entrySet()) {
+ generator.writeKey(item0.getKey());
+ item0.getValue().serialize(generator, mapper);
+ }
+ generator.writeEnd();
+ }
+
+ if (ApiTypeHelper.isDefined(this.aggs)) {
+ generator.writeKey("aggs");
+ generator.writeStartObject();
+ for (Map.Entry item0 : this.aggs.entrySet()) {
+ generator.writeKey(item0.getKey());
+ item0.getValue().serialize(generator, mapper);
+ }
+ generator.writeEnd();
+ }
}
// ---------------------------------------------------------------------------------------------
public abstract static class AbstractBuilder> extends AggregationBase.AbstractBuilder<
BuilderT> {
+ @Nullable
+ private Map aggregations;
+ @Nullable
+ private Map aggs;
protected AbstractBuilder() {}
protected AbstractBuilder(BucketAggregationBase o) {
super(o);
+ this.aggregations = _mapCopy(o.aggregations);
+ this.aggs = _mapCopy(o.aggs);
}
protected AbstractBuilder(AbstractBuilder o) {
super(o);
+ this.aggregations = _mapCopy(o.aggregations);
+ this.aggs = _mapCopy(o.aggs);
}
+ /**
+ * Sub-aggregations for this bucket aggregation
+ *
+ * API name: {@code aggregations}
+ *
+ *
+ *
+ * Adds all elements of map to aggregations.
+ *
+ */
+ @Nonnull
+ public final BuilderT aggregations(Map map) {
+ this.aggregations = _mapPutAll(this.aggregations, map);
+ return self();
+ }
+
+ /**
+ * Sub-aggregations for this bucket aggregation
+ *
+ * API name: {@code aggregations}
+ *
+ *
+ *
+ * Adds an entry to aggregations.
+ *
+ */
+ @Nonnull
+ public final BuilderT aggregations(String key, Aggregation value) {
+ this.aggregations = _mapPut(this.aggregations, key, value);
+ return self();
+ }
+
+ /**
+ * Sub-aggregations for this bucket aggregation
+ *
+ * API name: {@code aggregations}
+ *
+ *
+ *
+ * Adds a value to aggregations using a builder lambda.
+ *
+ */
+ @Nonnull
+ public final BuilderT aggregations(String key, Function> fn) {
+ return aggregations(key, fn.apply(new Aggregation.Builder()).build());
+ }
+
+ /**
+ * Sub-aggregations for this bucket aggregation
+ *
+ * API name: {@code aggs}
+ *
+ *
+ *
+ * Adds all elements of map to aggs.
+ *
+ */
+ @Nonnull
+ public final BuilderT aggs(Map map) {
+ this.aggs = _mapPutAll(this.aggs, map);
+ return self();
+ }
+
+ /**
+ * Sub-aggregations for this bucket aggregation
+ *
+ * API name: {@code aggs}
+ *
+ *
+ *
+ * Adds an entry to aggs.
+ *
+ */
+ @Nonnull
+ public final BuilderT aggs(String key, Aggregation value) {
+ this.aggs = _mapPut(this.aggs, key, value);
+ return self();
+ }
+
+ /**
+ * Sub-aggregations for this bucket aggregation
+ *
+ * API name: {@code aggs}
+ *
+ *
+ *
+ * Adds a value to aggs using a builder lambda.
+ *
+ */
+ @Nonnull
+ public final BuilderT aggs(String key, Function> fn) {
+ return aggs(key, fn.apply(new Aggregation.Builder()).build());
+ }
}
// ---------------------------------------------------------------------------------------------
@@ -73,11 +237,15 @@ protected static > void setupBucketAg
ObjectDeserializer op
) {
setupAggregationBaseDeserializer(op);
+ op.add(AbstractBuilder::aggregations, JsonpDeserializer.stringMapDeserializer(Aggregation._DESERIALIZER), "aggregations");
+ op.add(AbstractBuilder::aggs, JsonpDeserializer.stringMapDeserializer(Aggregation._DESERIALIZER), "aggs");
}
@Override
public int hashCode() {
int result = super.hashCode();
+ result = 31 * result + Objects.hashCode(this.aggregations);
+ result = 31 * result + Objects.hashCode(this.aggs);
return result;
}
@@ -88,6 +256,7 @@ public boolean equals(Object o) {
}
if (this == o) return true;
if (o == null || this.getClass() != o.getClass()) return false;
- return true;
+ BucketAggregationBase other = (BucketAggregationBase) o;
+ return Objects.equals(this.aggregations, other.aggregations) && Objects.equals(this.aggs, other.aggs);
}
}
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/aggregations/CardinalityAggregation.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/aggregations/CardinalityAggregation.java
index ab3f5f2ea5..46adb8cbee 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/aggregations/CardinalityAggregation.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/aggregations/CardinalityAggregation.java
@@ -66,16 +66,12 @@ public class CardinalityAggregation extends MetricAggregationBase
@Nullable
private final Integer precisionThreshold;
- @Nullable
- private final Boolean rehash;
-
// ---------------------------------------------------------------------------------------------
private CardinalityAggregation(Builder builder) {
super(builder);
this.executionHint = builder.executionHint;
this.precisionThreshold = builder.precisionThreshold;
- this.rehash = builder.rehash;
}
public static CardinalityAggregation of(Function> fn) {
@@ -109,14 +105,6 @@ public final Integer precisionThreshold() {
return this.precisionThreshold;
}
- /**
- * API name: {@code rehash}
- */
- @Nullable
- public final Boolean rehash() {
- return this.rehash;
- }
-
protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
super.serializeInternal(generator, mapper);
if (this.executionHint != null) {
@@ -128,11 +116,6 @@ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
generator.writeKey("precision_threshold");
generator.write(this.precisionThreshold);
}
-
- if (this.rehash != null) {
- generator.writeKey("rehash");
- generator.write(this.rehash);
- }
}
// ---------------------------------------------------------------------------------------------
@@ -158,8 +141,6 @@ public static class Builder extends MetricAggregationBase.AbstractBuilder> order;
@Nullable
private final Script script;
+ @Nullable
+ private final Integer shardMinDocCount;
+
@Nullable
private final Integer shardSize;
@@ -126,10 +123,9 @@ private TermsAggregation(Builder builder) {
this.include = builder.include;
this.minDocCount = builder.minDocCount;
this.missing = builder.missing;
- this.missingBucket = builder.missingBucket;
- this.missingOrder = builder.missingOrder;
this.order = ApiTypeHelper.unmodifiable(builder.order);
this.script = builder.script;
+ this.shardMinDocCount = builder.shardMinDocCount;
this.shardSize = builder.shardSize;
this.showTermDocCountError = builder.showTermDocCountError;
this.size = builder.size;
@@ -215,22 +211,6 @@ public final FieldValue missing() {
return this.missing;
}
- /**
- * API name: {@code missing_bucket}
- */
- @Nullable
- public final Boolean missingBucket() {
- return this.missingBucket;
- }
-
- /**
- * API name: {@code missing_order}
- */
- @Nullable
- public final MissingOrder missingOrder() {
- return this.missingOrder;
- }
-
/**
* API name: {@code order}
*/
@@ -247,6 +227,17 @@ public final Script script() {
return this.script;
}
+ /**
+ * The minimum number of documents in a bucket on each shard for it to be returned.
+ *
+ * API name: {@code shard_min_doc_count}
+ *
+ */
+ @Nullable
+ public final Integer shardMinDocCount() {
+ return this.shardMinDocCount;
+ }
+
/**
* The number of candidate terms produced by each shard. By default, shard_size will be automatically estimated based on
* the number of shards and the size parameter.
@@ -335,16 +326,6 @@ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
this.missing.serialize(generator, mapper);
}
- if (this.missingBucket != null) {
- generator.writeKey("missing_bucket");
- generator.write(this.missingBucket);
- }
-
- if (this.missingOrder != null) {
- generator.writeKey("missing_order");
- this.missingOrder.serialize(generator, mapper);
- }
-
if (ApiTypeHelper.isDefined(this.order)) {
generator.writeKey("order");
generator.writeStartArray();
@@ -366,6 +347,11 @@ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
this.script.serialize(generator, mapper);
}
+ if (this.shardMinDocCount != null) {
+ generator.writeKey("shard_min_doc_count");
+ generator.write(this.shardMinDocCount);
+ }
+
if (this.shardSize != null) {
generator.writeKey("shard_size");
generator.write(this.shardSize);
@@ -423,14 +409,12 @@ public static class Builder extends BucketAggregationBase.AbstractBuilder> order;
@Nullable
private Script script;
@Nullable
+ private Integer shardMinDocCount;
+ @Nullable
private Integer shardSize;
@Nullable
private Boolean showTermDocCountError;
@@ -451,10 +435,9 @@ private Builder(TermsAggregation o) {
this.include = o.include;
this.minDocCount = o.minDocCount;
this.missing = o.missing;
- this.missingBucket = o.missingBucket;
- this.missingOrder = o.missingOrder;
this.order = _listCopy(o.order);
this.script = o.script;
+ this.shardMinDocCount = o.shardMinDocCount;
this.shardSize = o.shardSize;
this.showTermDocCountError = o.showTermDocCountError;
this.size = o.size;
@@ -471,10 +454,9 @@ private Builder(Builder o) {
this.include = o.include;
this.minDocCount = o.minDocCount;
this.missing = o.missing;
- this.missingBucket = o.missingBucket;
- this.missingOrder = o.missingOrder;
this.order = _listCopy(o.order);
this.script = o.script;
+ this.shardMinDocCount = o.shardMinDocCount;
this.shardSize = o.shardSize;
this.showTermDocCountError = o.showTermDocCountError;
this.size = o.size;
@@ -592,24 +574,6 @@ public final Builder missing(Function> fn)
return script(fn.apply(new Script.Builder()).build());
}
+ /**
+ * The minimum number of documents in a bucket on each shard for it to be returned.
+ *
+ * API name: {@code shard_min_doc_count}
+ *
+ */
+ @Nonnull
+ public final Builder shardMinDocCount(@Nullable Integer value) {
+ this.shardMinDocCount = value;
+ return this;
+ }
+
/**
* The number of candidate terms produced by each shard. By default, shard_size will be automatically estimated based
* on the number of shards and the size parameter.
@@ -737,14 +713,13 @@ protected static void setupTermsAggregationDeserializer(ObjectDeserializer searchAsYouType(
return this.searchAsYouType(fn.apply(new SearchAsYouTypeProperty.Builder()).build());
}
+ public ObjectBuilder semantic(SemanticProperty v) {
+ this._kind = Kind.Semantic;
+ this._value = v;
+ return this;
+ }
+
+ public ObjectBuilder semantic(Function> fn) {
+ return this.semantic(fn.apply(new SemanticProperty.Builder()).build());
+ }
+
public ObjectBuilder short_(ShortNumberProperty v) {
this._kind = Kind.Short;
this._value = v;
@@ -1429,6 +1456,7 @@ protected static void setupPropertyDeserializer(ObjectDeserializer op)
op.add(Builder::rankFeatures, RankFeaturesProperty._DESERIALIZER, "rank_features");
op.add(Builder::scaledFloat, ScaledFloatNumberProperty._DESERIALIZER, "scaled_float");
op.add(Builder::searchAsYouType, SearchAsYouTypeProperty._DESERIALIZER, "search_as_you_type");
+ op.add(Builder::semantic, SemanticProperty._DESERIALIZER, "semantic");
op.add(Builder::short_, ShortNumberProperty._DESERIALIZER, "short");
op.add(Builder::text, TextProperty._DESERIALIZER, "text");
op.add(Builder::tokenCount, TokenCountProperty._DESERIALIZER, "token_count");
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/mapping/PropertyBuilders.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/mapping/PropertyBuilders.java
index db1850e193..ff4be039c7 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/mapping/PropertyBuilders.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/mapping/PropertyBuilders.java
@@ -311,6 +311,13 @@ public static SearchAsYouTypeProperty.Builder searchAsYouType() {
return new SearchAsYouTypeProperty.Builder();
}
+ /**
+ * Creates a builder for the {@link SemanticProperty semantic} {@code Property} variant.
+ */
+ public static SemanticProperty.Builder semantic() {
+ return new SemanticProperty.Builder();
+ }
+
/**
* Creates a builder for the {@link ShortNumberProperty short} {@code Property} variant.
*/
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/mapping/SemanticDenseEmbeddingConfig.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/mapping/SemanticDenseEmbeddingConfig.java
new file mode 100644
index 0000000000..a517946454
--- /dev/null
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/mapping/SemanticDenseEmbeddingConfig.java
@@ -0,0 +1,299 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+//----------------------------------------------------
+// THIS CODE IS GENERATED. MANUAL EDITS WILL BE LOST.
+//----------------------------------------------------
+
+package org.opensearch.client.opensearch._types.mapping;
+
+import jakarta.json.stream.JsonGenerator;
+import java.util.Objects;
+import java.util.function.Function;
+import javax.annotation.Generated;
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+import org.opensearch.client.json.JsonpDeserializable;
+import org.opensearch.client.json.JsonpDeserializer;
+import org.opensearch.client.json.JsonpMapper;
+import org.opensearch.client.json.ObjectBuilderDeserializer;
+import org.opensearch.client.json.ObjectDeserializer;
+import org.opensearch.client.json.PlainJsonSerializable;
+import org.opensearch.client.util.CopyableBuilder;
+import org.opensearch.client.util.ObjectBuilder;
+import org.opensearch.client.util.ObjectBuilderBase;
+import org.opensearch.client.util.ToCopyableBuilder;
+
+// typedef: _types.mapping.SemanticDenseEmbeddingConfig
+
+@JsonpDeserializable
+@Generated("org.opensearch.client.codegen.CodeGenerator")
+public class SemanticDenseEmbeddingConfig
+ implements
+ PlainJsonSerializable,
+ ToCopyableBuilder {
+
+ @Nullable
+ private final String compressionLevel;
+
+ @Nullable
+ private final String dataType;
+
+ @Nullable
+ private final KnnVectorMethod method;
+
+ @Nullable
+ private final String mode;
+
+ // ---------------------------------------------------------------------------------------------
+
+ private SemanticDenseEmbeddingConfig(Builder builder) {
+ this.compressionLevel = builder.compressionLevel;
+ this.dataType = builder.dataType;
+ this.method = builder.method;
+ this.mode = builder.mode;
+ }
+
+ public static SemanticDenseEmbeddingConfig of(
+ Function> fn
+ ) {
+ return fn.apply(new Builder()).build();
+ }
+
+ /**
+ * API name: {@code compression_level}
+ */
+ @Nullable
+ public final String compressionLevel() {
+ return this.compressionLevel;
+ }
+
+ /**
+ * API name: {@code data_type}
+ */
+ @Nullable
+ public final String dataType() {
+ return this.dataType;
+ }
+
+ /**
+ * API name: {@code method}
+ */
+ @Nullable
+ public final KnnVectorMethod method() {
+ return this.method;
+ }
+
+ /**
+ * API name: {@code mode}
+ */
+ @Nullable
+ public final String mode() {
+ return this.mode;
+ }
+
+ /**
+ * Serialize this object to JSON.
+ */
+ @Override
+ public void serialize(JsonGenerator generator, JsonpMapper mapper) {
+ generator.writeStartObject();
+ serializeInternal(generator, mapper);
+ generator.writeEnd();
+ }
+
+ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
+ if (this.compressionLevel != null) {
+ generator.writeKey("compression_level");
+ generator.write(this.compressionLevel);
+ }
+
+ if (this.dataType != null) {
+ generator.writeKey("data_type");
+ generator.write(this.dataType);
+ }
+
+ if (this.method != null) {
+ generator.writeKey("method");
+ this.method.serialize(generator, mapper);
+ }
+
+ if (this.mode != null) {
+ generator.writeKey("mode");
+ generator.write(this.mode);
+ }
+ }
+
+ // ---------------------------------------------------------------------------------------------
+
+ @Override
+ @Nonnull
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ @Nonnull
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ /**
+ * Builder for {@link SemanticDenseEmbeddingConfig}.
+ */
+ public static class Builder extends ObjectBuilderBase implements CopyableBuilder {
+ @Nullable
+ private String compressionLevel;
+ @Nullable
+ private String dataType;
+ @Nullable
+ private KnnVectorMethod method;
+ @Nullable
+ private String mode;
+
+ public Builder() {}
+
+ private Builder(SemanticDenseEmbeddingConfig o) {
+ this.compressionLevel = o.compressionLevel;
+ this.dataType = o.dataType;
+ this.method = o.method;
+ this.mode = o.mode;
+ }
+
+ private Builder(Builder o) {
+ this.compressionLevel = o.compressionLevel;
+ this.dataType = o.dataType;
+ this.method = o.method;
+ this.mode = o.mode;
+ }
+
+ @Override
+ @Nonnull
+ public Builder copy() {
+ return new Builder(this);
+ }
+
+ /**
+ * API name: {@code compression_level}
+ */
+ @Nonnull
+ public final Builder compressionLevel(@Nullable String value) {
+ this.compressionLevel = value;
+ return this;
+ }
+
+ /**
+ * API name: {@code data_type}
+ */
+ @Nonnull
+ public final Builder dataType(@Nullable String value) {
+ this.dataType = value;
+ return this;
+ }
+
+ /**
+ * API name: {@code method}
+ */
+ @Nonnull
+ public final Builder method(@Nullable KnnVectorMethod value) {
+ this.method = value;
+ return this;
+ }
+
+ /**
+ * API name: {@code method}
+ */
+ @Nonnull
+ public final Builder method(Function> fn) {
+ return method(fn.apply(new KnnVectorMethod.Builder()).build());
+ }
+
+ /**
+ * API name: {@code mode}
+ */
+ @Nonnull
+ public final Builder mode(@Nullable String value) {
+ this.mode = value;
+ return this;
+ }
+
+ /**
+ * Builds a {@link SemanticDenseEmbeddingConfig}.
+ *
+ * @throws NullPointerException if some of the required fields are null.
+ */
+ @Override
+ @Nonnull
+ public SemanticDenseEmbeddingConfig build() {
+ _checkSingleUse();
+
+ return new SemanticDenseEmbeddingConfig(this);
+ }
+ }
+
+ // ---------------------------------------------------------------------------------------------
+
+ /**
+ * Json deserializer for {@link SemanticDenseEmbeddingConfig}
+ */
+ public static final JsonpDeserializer _DESERIALIZER = ObjectBuilderDeserializer.lazy(
+ Builder::new,
+ SemanticDenseEmbeddingConfig::setupSemanticDenseEmbeddingConfigDeserializer
+ );
+
+ protected static void setupSemanticDenseEmbeddingConfigDeserializer(ObjectDeserializer op) {
+ op.add(Builder::compressionLevel, JsonpDeserializer.stringDeserializer(), "compression_level");
+ op.add(Builder::dataType, JsonpDeserializer.stringDeserializer(), "data_type");
+ op.add(Builder::method, KnnVectorMethod._DESERIALIZER, "method");
+ op.add(Builder::mode, JsonpDeserializer.stringDeserializer(), "mode");
+ }
+
+ @Override
+ public int hashCode() {
+ int result = 17;
+ result = 31 * result + Objects.hashCode(this.compressionLevel);
+ result = 31 * result + Objects.hashCode(this.dataType);
+ result = 31 * result + Objects.hashCode(this.method);
+ result = 31 * result + Objects.hashCode(this.mode);
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || this.getClass() != o.getClass()) return false;
+ SemanticDenseEmbeddingConfig other = (SemanticDenseEmbeddingConfig) o;
+ return Objects.equals(this.compressionLevel, other.compressionLevel)
+ && Objects.equals(this.dataType, other.dataType)
+ && Objects.equals(this.method, other.method)
+ && Objects.equals(this.mode, other.mode);
+ }
+}
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/mapping/SemanticProperty.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/mapping/SemanticProperty.java
new file mode 100644
index 0000000000..a3cb00a9d5
--- /dev/null
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/mapping/SemanticProperty.java
@@ -0,0 +1,485 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+//----------------------------------------------------
+// THIS CODE IS GENERATED. MANUAL EDITS WILL BE LOST.
+//----------------------------------------------------
+
+package org.opensearch.client.opensearch._types.mapping;
+
+import jakarta.json.stream.JsonGenerator;
+import java.util.Objects;
+import java.util.function.Function;
+import javax.annotation.Generated;
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+import org.opensearch.client.json.JsonData;
+import org.opensearch.client.json.JsonpDeserializable;
+import org.opensearch.client.json.JsonpDeserializer;
+import org.opensearch.client.json.JsonpMapper;
+import org.opensearch.client.json.ObjectBuilderDeserializer;
+import org.opensearch.client.json.ObjectDeserializer;
+import org.opensearch.client.json.PlainJsonSerializable;
+import org.opensearch.client.util.ApiTypeHelper;
+import org.opensearch.client.util.CopyableBuilder;
+import org.opensearch.client.util.ObjectBuilder;
+import org.opensearch.client.util.ObjectBuilderBase;
+import org.opensearch.client.util.ToCopyableBuilder;
+
+// typedef: _types.mapping.SemanticProperty
+
+@JsonpDeserializable
+@Generated("org.opensearch.client.codegen.CodeGenerator")
+public class SemanticProperty
+ implements
+ PropertyVariant,
+ PlainJsonSerializable,
+ ToCopyableBuilder {
+
+ @Nullable
+ private final JsonData chunking;
+
+ @Nullable
+ private final SemanticDenseEmbeddingConfig denseEmbeddingConfig;
+
+ @Nonnull
+ private final String modelId;
+
+ @Nullable
+ private final String rawFieldType;
+
+ @Nullable
+ private final String searchModelId;
+
+ @Nullable
+ private final String semanticFieldSearchAnalyzer;
+
+ @Nullable
+ private final String semanticInfoFieldName;
+
+ @Nullable
+ private final Boolean skipExistingEmbedding;
+
+ @Nullable
+ private final SemanticSparseEncodingConfig sparseEncodingConfig;
+
+ // ---------------------------------------------------------------------------------------------
+
+ private SemanticProperty(Builder builder) {
+ this.chunking = builder.chunking;
+ this.denseEmbeddingConfig = builder.denseEmbeddingConfig;
+ this.modelId = ApiTypeHelper.requireNonNull(builder.modelId, this, "modelId");
+ this.rawFieldType = builder.rawFieldType;
+ this.searchModelId = builder.searchModelId;
+ this.semanticFieldSearchAnalyzer = builder.semanticFieldSearchAnalyzer;
+ this.semanticInfoFieldName = builder.semanticInfoFieldName;
+ this.skipExistingEmbedding = builder.skipExistingEmbedding;
+ this.sparseEncodingConfig = builder.sparseEncodingConfig;
+ }
+
+ public static SemanticProperty of(Function> fn) {
+ return fn.apply(new Builder()).build();
+ }
+
+ /**
+ * {@link Property} variant kind.
+ */
+ @Override
+ public Property.Kind _propertyKind() {
+ return Property.Kind.Semantic;
+ }
+
+ /**
+ * API name: {@code chunking}
+ */
+ @Nullable
+ public final JsonData chunking() {
+ return this.chunking;
+ }
+
+ /**
+ * API name: {@code dense_embedding_config}
+ */
+ @Nullable
+ public final SemanticDenseEmbeddingConfig denseEmbeddingConfig() {
+ return this.denseEmbeddingConfig;
+ }
+
+ /**
+ * Required - API name: {@code model_id}
+ */
+ @Nonnull
+ public final String modelId() {
+ return this.modelId;
+ }
+
+ /**
+ * API name: {@code raw_field_type}
+ */
+ @Nullable
+ public final String rawFieldType() {
+ return this.rawFieldType;
+ }
+
+ /**
+ * API name: {@code search_model_id}
+ */
+ @Nullable
+ public final String searchModelId() {
+ return this.searchModelId;
+ }
+
+ /**
+ * API name: {@code semantic_field_search_analyzer}
+ */
+ @Nullable
+ public final String semanticFieldSearchAnalyzer() {
+ return this.semanticFieldSearchAnalyzer;
+ }
+
+ /**
+ * API name: {@code semantic_info_field_name}
+ */
+ @Nullable
+ public final String semanticInfoFieldName() {
+ return this.semanticInfoFieldName;
+ }
+
+ /**
+ * API name: {@code skip_existing_embedding}
+ */
+ @Nullable
+ public final Boolean skipExistingEmbedding() {
+ return this.skipExistingEmbedding;
+ }
+
+ /**
+ * API name: {@code sparse_encoding_config}
+ */
+ @Nullable
+ public final SemanticSparseEncodingConfig sparseEncodingConfig() {
+ return this.sparseEncodingConfig;
+ }
+
+ /**
+ * Serialize this object to JSON.
+ */
+ @Override
+ public void serialize(JsonGenerator generator, JsonpMapper mapper) {
+ generator.writeStartObject();
+ serializeInternal(generator, mapper);
+ generator.writeEnd();
+ }
+
+ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
+ generator.write("type", "semantic");
+ if (this.chunking != null) {
+ generator.writeKey("chunking");
+ this.chunking.serialize(generator, mapper);
+ }
+
+ if (this.denseEmbeddingConfig != null) {
+ generator.writeKey("dense_embedding_config");
+ this.denseEmbeddingConfig.serialize(generator, mapper);
+ }
+
+ generator.writeKey("model_id");
+ generator.write(this.modelId);
+
+ if (this.rawFieldType != null) {
+ generator.writeKey("raw_field_type");
+ generator.write(this.rawFieldType);
+ }
+
+ if (this.searchModelId != null) {
+ generator.writeKey("search_model_id");
+ generator.write(this.searchModelId);
+ }
+
+ if (this.semanticFieldSearchAnalyzer != null) {
+ generator.writeKey("semantic_field_search_analyzer");
+ generator.write(this.semanticFieldSearchAnalyzer);
+ }
+
+ if (this.semanticInfoFieldName != null) {
+ generator.writeKey("semantic_info_field_name");
+ generator.write(this.semanticInfoFieldName);
+ }
+
+ if (this.skipExistingEmbedding != null) {
+ generator.writeKey("skip_existing_embedding");
+ generator.write(this.skipExistingEmbedding);
+ }
+
+ if (this.sparseEncodingConfig != null) {
+ generator.writeKey("sparse_encoding_config");
+ this.sparseEncodingConfig.serialize(generator, mapper);
+ }
+ }
+
+ // ---------------------------------------------------------------------------------------------
+
+ @Override
+ @Nonnull
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ @Nonnull
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ /**
+ * Builder for {@link SemanticProperty}.
+ */
+ public static class Builder extends ObjectBuilderBase implements CopyableBuilder {
+ @Nullable
+ private JsonData chunking;
+ @Nullable
+ private SemanticDenseEmbeddingConfig denseEmbeddingConfig;
+ private String modelId;
+ @Nullable
+ private String rawFieldType;
+ @Nullable
+ private String searchModelId;
+ @Nullable
+ private String semanticFieldSearchAnalyzer;
+ @Nullable
+ private String semanticInfoFieldName;
+ @Nullable
+ private Boolean skipExistingEmbedding;
+ @Nullable
+ private SemanticSparseEncodingConfig sparseEncodingConfig;
+
+ public Builder() {}
+
+ private Builder(SemanticProperty o) {
+ this.chunking = o.chunking;
+ this.denseEmbeddingConfig = o.denseEmbeddingConfig;
+ this.modelId = o.modelId;
+ this.rawFieldType = o.rawFieldType;
+ this.searchModelId = o.searchModelId;
+ this.semanticFieldSearchAnalyzer = o.semanticFieldSearchAnalyzer;
+ this.semanticInfoFieldName = o.semanticInfoFieldName;
+ this.skipExistingEmbedding = o.skipExistingEmbedding;
+ this.sparseEncodingConfig = o.sparseEncodingConfig;
+ }
+
+ private Builder(Builder o) {
+ this.chunking = o.chunking;
+ this.denseEmbeddingConfig = o.denseEmbeddingConfig;
+ this.modelId = o.modelId;
+ this.rawFieldType = o.rawFieldType;
+ this.searchModelId = o.searchModelId;
+ this.semanticFieldSearchAnalyzer = o.semanticFieldSearchAnalyzer;
+ this.semanticInfoFieldName = o.semanticInfoFieldName;
+ this.skipExistingEmbedding = o.skipExistingEmbedding;
+ this.sparseEncodingConfig = o.sparseEncodingConfig;
+ }
+
+ @Override
+ @Nonnull
+ public Builder copy() {
+ return new Builder(this);
+ }
+
+ /**
+ * API name: {@code chunking}
+ */
+ @Nonnull
+ public final Builder chunking(@Nullable JsonData value) {
+ this.chunking = value;
+ return this;
+ }
+
+ /**
+ * API name: {@code dense_embedding_config}
+ */
+ @Nonnull
+ public final Builder denseEmbeddingConfig(@Nullable SemanticDenseEmbeddingConfig value) {
+ this.denseEmbeddingConfig = value;
+ return this;
+ }
+
+ /**
+ * API name: {@code dense_embedding_config}
+ */
+ @Nonnull
+ public final Builder denseEmbeddingConfig(
+ Function> fn
+ ) {
+ return denseEmbeddingConfig(fn.apply(new SemanticDenseEmbeddingConfig.Builder()).build());
+ }
+
+ /**
+ * Required - API name: {@code model_id}
+ */
+ @Nonnull
+ public final Builder modelId(String value) {
+ this.modelId = value;
+ return this;
+ }
+
+ /**
+ * API name: {@code raw_field_type}
+ */
+ @Nonnull
+ public final Builder rawFieldType(@Nullable String value) {
+ this.rawFieldType = value;
+ return this;
+ }
+
+ /**
+ * API name: {@code search_model_id}
+ */
+ @Nonnull
+ public final Builder searchModelId(@Nullable String value) {
+ this.searchModelId = value;
+ return this;
+ }
+
+ /**
+ * API name: {@code semantic_field_search_analyzer}
+ */
+ @Nonnull
+ public final Builder semanticFieldSearchAnalyzer(@Nullable String value) {
+ this.semanticFieldSearchAnalyzer = value;
+ return this;
+ }
+
+ /**
+ * API name: {@code semantic_info_field_name}
+ */
+ @Nonnull
+ public final Builder semanticInfoFieldName(@Nullable String value) {
+ this.semanticInfoFieldName = value;
+ return this;
+ }
+
+ /**
+ * API name: {@code skip_existing_embedding}
+ */
+ @Nonnull
+ public final Builder skipExistingEmbedding(@Nullable Boolean value) {
+ this.skipExistingEmbedding = value;
+ return this;
+ }
+
+ /**
+ * API name: {@code sparse_encoding_config}
+ */
+ @Nonnull
+ public final Builder sparseEncodingConfig(@Nullable SemanticSparseEncodingConfig value) {
+ this.sparseEncodingConfig = value;
+ return this;
+ }
+
+ /**
+ * API name: {@code sparse_encoding_config}
+ */
+ @Nonnull
+ public final Builder sparseEncodingConfig(
+ Function> fn
+ ) {
+ return sparseEncodingConfig(fn.apply(new SemanticSparseEncodingConfig.Builder()).build());
+ }
+
+ /**
+ * Builds a {@link SemanticProperty}.
+ *
+ * @throws NullPointerException if some of the required fields are null.
+ */
+ @Override
+ @Nonnull
+ public SemanticProperty build() {
+ _checkSingleUse();
+
+ return new SemanticProperty(this);
+ }
+ }
+
+ // ---------------------------------------------------------------------------------------------
+
+ /**
+ * Json deserializer for {@link SemanticProperty}
+ */
+ public static final JsonpDeserializer _DESERIALIZER = ObjectBuilderDeserializer.lazy(
+ Builder::new,
+ SemanticProperty::setupSemanticPropertyDeserializer
+ );
+
+ protected static void setupSemanticPropertyDeserializer(ObjectDeserializer op) {
+ op.add(Builder::chunking, JsonData._DESERIALIZER, "chunking");
+ op.add(Builder::denseEmbeddingConfig, SemanticDenseEmbeddingConfig._DESERIALIZER, "dense_embedding_config");
+ op.add(Builder::modelId, JsonpDeserializer.stringDeserializer(), "model_id");
+ op.add(Builder::rawFieldType, JsonpDeserializer.stringDeserializer(), "raw_field_type");
+ op.add(Builder::searchModelId, JsonpDeserializer.stringDeserializer(), "search_model_id");
+ op.add(Builder::semanticFieldSearchAnalyzer, JsonpDeserializer.stringDeserializer(), "semantic_field_search_analyzer");
+ op.add(Builder::semanticInfoFieldName, JsonpDeserializer.stringDeserializer(), "semantic_info_field_name");
+ op.add(Builder::skipExistingEmbedding, JsonpDeserializer.booleanDeserializer(), "skip_existing_embedding");
+ op.add(Builder::sparseEncodingConfig, SemanticSparseEncodingConfig._DESERIALIZER, "sparse_encoding_config");
+
+ op.ignore("type");
+ }
+
+ @Override
+ public int hashCode() {
+ int result = 17;
+ result = 31 * result + Objects.hashCode(this.chunking);
+ result = 31 * result + Objects.hashCode(this.denseEmbeddingConfig);
+ result = 31 * result + this.modelId.hashCode();
+ result = 31 * result + Objects.hashCode(this.rawFieldType);
+ result = 31 * result + Objects.hashCode(this.searchModelId);
+ result = 31 * result + Objects.hashCode(this.semanticFieldSearchAnalyzer);
+ result = 31 * result + Objects.hashCode(this.semanticInfoFieldName);
+ result = 31 * result + Objects.hashCode(this.skipExistingEmbedding);
+ result = 31 * result + Objects.hashCode(this.sparseEncodingConfig);
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || this.getClass() != o.getClass()) return false;
+ SemanticProperty other = (SemanticProperty) o;
+ return Objects.equals(this.chunking, other.chunking)
+ && Objects.equals(this.denseEmbeddingConfig, other.denseEmbeddingConfig)
+ && this.modelId.equals(other.modelId)
+ && Objects.equals(this.rawFieldType, other.rawFieldType)
+ && Objects.equals(this.searchModelId, other.searchModelId)
+ && Objects.equals(this.semanticFieldSearchAnalyzer, other.semanticFieldSearchAnalyzer)
+ && Objects.equals(this.semanticInfoFieldName, other.semanticInfoFieldName)
+ && Objects.equals(this.skipExistingEmbedding, other.skipExistingEmbedding)
+ && Objects.equals(this.sparseEncodingConfig, other.sparseEncodingConfig);
+ }
+}
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/PinnedDoc.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/mapping/SemanticSparseEncodingConfig.java
similarity index 55%
rename from java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/PinnedDoc.java
rename to java-client/src/generated/java/org/opensearch/client/opensearch/_types/mapping/SemanticSparseEncodingConfig.java
index 0eeae29390..82327989b5 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/PinnedDoc.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/mapping/SemanticSparseEncodingConfig.java
@@ -34,61 +34,67 @@
// THIS CODE IS GENERATED. MANUAL EDITS WILL BE LOST.
//----------------------------------------------------
-package org.opensearch.client.opensearch._types.query_dsl;
+package org.opensearch.client.opensearch._types.mapping;
import jakarta.json.stream.JsonGenerator;
+import java.util.Objects;
import java.util.function.Function;
import javax.annotation.Generated;
import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
import org.opensearch.client.json.JsonpDeserializable;
import org.opensearch.client.json.JsonpDeserializer;
import org.opensearch.client.json.JsonpMapper;
import org.opensearch.client.json.ObjectBuilderDeserializer;
import org.opensearch.client.json.ObjectDeserializer;
import org.opensearch.client.json.PlainJsonSerializable;
-import org.opensearch.client.util.ApiTypeHelper;
import org.opensearch.client.util.CopyableBuilder;
import org.opensearch.client.util.ObjectBuilder;
import org.opensearch.client.util.ObjectBuilderBase;
import org.opensearch.client.util.ToCopyableBuilder;
-// typedef: _types.query_dsl.PinnedDoc
+// typedef: _types.mapping.SemanticSparseEncodingConfig
@JsonpDeserializable
@Generated("org.opensearch.client.codegen.CodeGenerator")
-public class PinnedDoc implements PlainJsonSerializable, ToCopyableBuilder {
+public class SemanticSparseEncodingConfig
+ implements
+ PlainJsonSerializable,
+ ToCopyableBuilder {
- @Nonnull
- private final String id;
+ @Nullable
+ private final Number pruneRatio;
- @Nonnull
- private final String index;
+ @Nullable
+ private final String pruneType;
// ---------------------------------------------------------------------------------------------
- private PinnedDoc(Builder builder) {
- this.id = ApiTypeHelper.requireNonNull(builder.id, this, "id");
- this.index = ApiTypeHelper.requireNonNull(builder.index, this, "index");
+ private SemanticSparseEncodingConfig(Builder builder) {
+ this.pruneRatio = builder.pruneRatio;
+ this.pruneType = builder.pruneType;
}
- public static PinnedDoc of(Function> fn) {
+ public static SemanticSparseEncodingConfig of(
+ Function> fn
+ ) {
return fn.apply(new Builder()).build();
}
/**
- * Required - API name: {@code _id}
+ * API name: {@code prune_ratio}
*/
- @Nonnull
- public final String id() {
- return this.id;
+ @Nullable
+ public final Number pruneRatio() {
+ return this.pruneRatio;
}
/**
- * Required - API name: {@code _index}
+ * API name: {@code prune_type}
*/
- @Nonnull
- public final String index() {
- return this.index;
+ @Nullable
+ public final String pruneType() {
+ return this.pruneType;
}
/**
@@ -102,11 +108,15 @@ public void serialize(JsonGenerator generator, JsonpMapper mapper) {
}
protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
- generator.writeKey("_id");
- generator.write(this.id);
+ if (this.pruneRatio != null) {
+ generator.writeKey("prune_ratio");
+ generator.write(this.pruneRatio.doubleValue());
+ }
- generator.writeKey("_index");
- generator.write(this.index);
+ if (this.pruneType != null) {
+ generator.writeKey("prune_type");
+ generator.write(this.pruneType);
+ }
}
// ---------------------------------------------------------------------------------------------
@@ -123,22 +133,24 @@ public static Builder builder() {
}
/**
- * Builder for {@link PinnedDoc}.
+ * Builder for {@link SemanticSparseEncodingConfig}.
*/
- public static class Builder extends ObjectBuilderBase implements CopyableBuilder {
- private String id;
- private String index;
+ public static class Builder extends ObjectBuilderBase implements CopyableBuilder {
+ @Nullable
+ private Number pruneRatio;
+ @Nullable
+ private String pruneType;
public Builder() {}
- private Builder(PinnedDoc o) {
- this.id = o.id;
- this.index = o.index;
+ private Builder(SemanticSparseEncodingConfig o) {
+ this.pruneRatio = o.pruneRatio;
+ this.pruneType = o.pruneType;
}
private Builder(Builder o) {
- this.id = o.id;
- this.index = o.index;
+ this.pruneRatio = o.pruneRatio;
+ this.pruneType = o.pruneType;
}
@Override
@@ -148,57 +160,57 @@ public Builder copy() {
}
/**
- * Required - API name: {@code _id}
+ * API name: {@code prune_ratio}
*/
@Nonnull
- public final Builder id(String value) {
- this.id = value;
+ public final Builder pruneRatio(@Nullable Number value) {
+ this.pruneRatio = value;
return this;
}
/**
- * Required - API name: {@code _index}
+ * API name: {@code prune_type}
*/
@Nonnull
- public final Builder index(String value) {
- this.index = value;
+ public final Builder pruneType(@Nullable String value) {
+ this.pruneType = value;
return this;
}
/**
- * Builds a {@link PinnedDoc}.
+ * Builds a {@link SemanticSparseEncodingConfig}.
*
* @throws NullPointerException if some of the required fields are null.
*/
@Override
@Nonnull
- public PinnedDoc build() {
+ public SemanticSparseEncodingConfig build() {
_checkSingleUse();
- return new PinnedDoc(this);
+ return new SemanticSparseEncodingConfig(this);
}
}
// ---------------------------------------------------------------------------------------------
/**
- * Json deserializer for {@link PinnedDoc}
+ * Json deserializer for {@link SemanticSparseEncodingConfig}
*/
- public static final JsonpDeserializer _DESERIALIZER = ObjectBuilderDeserializer.lazy(
+ public static final JsonpDeserializer _DESERIALIZER = ObjectBuilderDeserializer.lazy(
Builder::new,
- PinnedDoc::setupPinnedDocDeserializer
+ SemanticSparseEncodingConfig::setupSemanticSparseEncodingConfigDeserializer
);
- protected static void setupPinnedDocDeserializer(ObjectDeserializer op) {
- op.add(Builder::id, JsonpDeserializer.stringDeserializer(), "_id");
- op.add(Builder::index, JsonpDeserializer.stringDeserializer(), "_index");
+ protected static void setupSemanticSparseEncodingConfigDeserializer(ObjectDeserializer op) {
+ op.add(Builder::pruneRatio, JsonpDeserializer.numberDeserializer(), "prune_ratio");
+ op.add(Builder::pruneType, JsonpDeserializer.stringDeserializer(), "prune_type");
}
@Override
public int hashCode() {
int result = 17;
- result = 31 * result + this.id.hashCode();
- result = 31 * result + this.index.hashCode();
+ result = 31 * result + Objects.hashCode(this.pruneRatio);
+ result = 31 * result + Objects.hashCode(this.pruneType);
return result;
}
@@ -206,7 +218,7 @@ public int hashCode() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || this.getClass() != o.getClass()) return false;
- PinnedDoc other = (PinnedDoc) o;
- return this.id.equals(other.id) && this.index.equals(other.index);
+ SemanticSparseEncodingConfig other = (SemanticSparseEncodingConfig) o;
+ return Objects.equals(this.pruneRatio, other.pruneRatio) && Objects.equals(this.pruneType, other.pruneType);
}
}
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/DecayPlacement.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/DecayPlacement.java
index 1de9a9e14b..17646b90de 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/DecayPlacement.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/DecayPlacement.java
@@ -49,6 +49,7 @@
import org.opensearch.client.json.ObjectBuilderDeserializer;
import org.opensearch.client.json.ObjectDeserializer;
import org.opensearch.client.json.PlainJsonSerializable;
+import org.opensearch.client.util.ApiTypeHelper;
import org.opensearch.client.util.CopyableBuilder;
import org.opensearch.client.util.ObjectBuilder;
import org.opensearch.client.util.ObjectBuilderBase;
@@ -69,7 +70,7 @@ public class DecayPlacement implements PlainJsonSerializable, ToCopyableBuilder<
@Nullable
private final JsonData origin;
- @Nullable
+ @Nonnull
private final JsonData scale;
// ---------------------------------------------------------------------------------------------
@@ -78,7 +79,7 @@ private DecayPlacement(Builder builder) {
this.decay = builder.decay;
this.offset = builder.offset;
this.origin = builder.origin;
- this.scale = builder.scale;
+ this.scale = ApiTypeHelper.requireNonNull(builder.scale, this, "scale");
}
public static DecayPlacement of(Function> fn) {
@@ -110,9 +111,9 @@ public final JsonData origin() {
}
/**
- * API name: {@code scale}
+ * Required - API name: {@code scale}
*/
- @Nullable
+ @Nonnull
public final JsonData scale() {
return this.scale;
}
@@ -143,10 +144,8 @@ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
this.origin.serialize(generator, mapper);
}
- if (this.scale != null) {
- generator.writeKey("scale");
- this.scale.serialize(generator, mapper);
- }
+ generator.writeKey("scale");
+ this.scale.serialize(generator, mapper);
}
// ---------------------------------------------------------------------------------------------
@@ -172,7 +171,6 @@ public static class Builder extends ObjectBuilderBase implements CopyableBuilder
private JsonData offset;
@Nullable
private JsonData origin;
- @Nullable
private JsonData scale;
public Builder() {}
@@ -225,10 +223,10 @@ public final Builder origin(@Nullable JsonData value) {
}
/**
- * API name: {@code scale}
+ * Required - API name: {@code scale}
*/
@Nonnull
- public final Builder scale(@Nullable JsonData value) {
+ public final Builder scale(JsonData value) {
this.scale = value;
return this;
}
@@ -270,7 +268,7 @@ public int hashCode() {
result = 31 * result + Objects.hashCode(this.decay);
result = 31 * result + Objects.hashCode(this.offset);
result = 31 * result + Objects.hashCode(this.origin);
- result = 31 * result + Objects.hashCode(this.scale);
+ result = 31 * result + this.scale.hashCode();
return result;
}
@@ -282,6 +280,6 @@ public boolean equals(Object o) {
return Objects.equals(this.decay, other.decay)
&& Objects.equals(this.offset, other.offset)
&& Objects.equals(this.origin, other.origin)
- && Objects.equals(this.scale, other.scale);
+ && this.scale.equals(other.scale);
}
}
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/FieldAndFormat.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/FieldAndFormat.java
index 29540f8862..0253f2d4a6 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/FieldAndFormat.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/FieldAndFormat.java
@@ -66,15 +66,11 @@ public class FieldAndFormat implements PlainJsonSerializable, ToCopyableBuilder<
@Nullable
private final String format;
- @Nullable
- private final Boolean includeUnmapped;
-
// ---------------------------------------------------------------------------------------------
private FieldAndFormat(Builder builder) {
this.field = ApiTypeHelper.requireNonNull(builder.field, this, "field");
this.format = builder.format;
- this.includeUnmapped = builder.includeUnmapped;
}
public static FieldAndFormat of(Function> fn) {
@@ -100,14 +96,6 @@ public final String format() {
return this.format;
}
- /**
- * API name: {@code include_unmapped}
- */
- @Nullable
- public final Boolean includeUnmapped() {
- return this.includeUnmapped;
- }
-
/**
* Serialize this object to JSON.
*/
@@ -126,11 +114,6 @@ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
generator.writeKey("format");
generator.write(this.format);
}
-
- if (this.includeUnmapped != null) {
- generator.writeKey("include_unmapped");
- generator.write(this.includeUnmapped);
- }
}
// ---------------------------------------------------------------------------------------------
@@ -153,21 +136,17 @@ public static class Builder extends ObjectBuilderBase implements CopyableBuilder
private String field;
@Nullable
private String format;
- @Nullable
- private Boolean includeUnmapped;
public Builder() {}
private Builder(FieldAndFormat o) {
this.field = o.field;
this.format = o.format;
- this.includeUnmapped = o.includeUnmapped;
}
private Builder(Builder o) {
this.field = o.field;
this.format = o.format;
- this.includeUnmapped = o.includeUnmapped;
}
@Override
@@ -197,15 +176,6 @@ public final Builder format(@Nullable String value) {
return this;
}
- /**
- * API name: {@code include_unmapped}
- */
- @Nonnull
- public final Builder includeUnmapped(@Nullable Boolean value) {
- this.includeUnmapped = value;
- return this;
- }
-
/**
* Builds a {@link FieldAndFormat}.
*
@@ -233,7 +203,6 @@ public FieldAndFormat build() {
protected static void setupFieldAndFormatDeserializer(ObjectDeserializer op) {
op.add(Builder::field, JsonpDeserializer.stringDeserializer(), "field");
op.add(Builder::format, JsonpDeserializer.stringDeserializer(), "format");
- op.add(Builder::includeUnmapped, JsonpDeserializer.booleanDeserializer(), "include_unmapped");
op.shortcutProperty("field");
}
@@ -243,7 +212,6 @@ public int hashCode() {
int result = 17;
result = 31 * result + this.field.hashCode();
result = 31 * result + Objects.hashCode(this.format);
- result = 31 * result + Objects.hashCode(this.includeUnmapped);
return result;
}
@@ -252,8 +220,6 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || this.getClass() != o.getClass()) return false;
FieldAndFormat other = (FieldAndFormat) o;
- return this.field.equals(other.field)
- && Objects.equals(this.format, other.format)
- && Objects.equals(this.includeUnmapped, other.includeUnmapped);
+ return this.field.equals(other.field) && Objects.equals(this.format, other.format);
}
}
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/GeoDistanceQuery.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/GeoDistanceQuery.java
index dd9dd1ff59..e7f0227ebd 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/GeoDistanceQuery.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/GeoDistanceQuery.java
@@ -47,6 +47,7 @@
import org.opensearch.client.json.JsonpMapper;
import org.opensearch.client.json.ObjectBuilderDeserializer;
import org.opensearch.client.json.ObjectDeserializer;
+import org.opensearch.client.opensearch._types.DistanceUnit;
import org.opensearch.client.opensearch._types.GeoDistanceType;
import org.opensearch.client.opensearch._types.GeoLocation;
import org.opensearch.client.util.ApiTypeHelper;
@@ -75,6 +76,9 @@ public class GeoDistanceQuery extends QueryBase implements QueryVariant, ToCopya
@Nonnull
private final GeoLocation location;
+ @Nullable
+ private final DistanceUnit unit;
+
@Nullable
private final GeoValidationMethod validationMethod;
@@ -87,6 +91,7 @@ private GeoDistanceQuery(Builder builder) {
this.field = ApiTypeHelper.requireNonNull(builder.field, this, "field");
this.ignoreUnmapped = builder.ignoreUnmapped;
this.location = ApiTypeHelper.requireNonNull(builder.location, this, "location");
+ this.unit = builder.unit;
this.validationMethod = builder.validationMethod;
}
@@ -151,6 +156,17 @@ public final GeoLocation location() {
return this.location;
}
+ /**
+ * The unit of distance measurement.
+ *
+ * API name: {@code unit}
+ *
+ */
+ @Nullable
+ public final DistanceUnit unit() {
+ return this.unit;
+ }
+
/**
* API name: {@code validation_method}
*/
@@ -176,6 +192,11 @@ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
generator.write(this.ignoreUnmapped);
}
+ if (this.unit != null) {
+ generator.writeKey("unit");
+ this.unit.serialize(generator, mapper);
+ }
+
if (this.validationMethod != null) {
generator.writeKey("validation_method");
this.validationMethod.serialize(generator, mapper);
@@ -207,6 +228,8 @@ public static class Builder extends QueryBase.AbstractBuilder implement
private Boolean ignoreUnmapped;
private GeoLocation location;
@Nullable
+ private DistanceUnit unit;
+ @Nullable
private GeoValidationMethod validationMethod;
public Builder() {}
@@ -218,6 +241,7 @@ private Builder(GeoDistanceQuery o) {
this.field = o.field;
this.ignoreUnmapped = o.ignoreUnmapped;
this.location = o.location;
+ this.unit = o.unit;
this.validationMethod = o.validationMethod;
}
@@ -228,6 +252,7 @@ private Builder(Builder o) {
this.field = o.field;
this.ignoreUnmapped = o.ignoreUnmapped;
this.location = o.location;
+ this.unit = o.unit;
this.validationMethod = o.validationMethod;
}
@@ -314,6 +339,18 @@ public final Builder location(Function
+ * API name: {@code unit}
+ *
+ */
+ @Nonnull
+ public final Builder unit(@Nullable DistanceUnit value) {
+ this.unit = value;
+ return this;
+ }
+
/**
* API name: {@code validation_method}
*/
@@ -352,6 +389,7 @@ protected static void setupGeoDistanceQueryDeserializer(ObjectDeserializer {
builder.field(name);
@@ -367,6 +405,7 @@ public int hashCode() {
result = 31 * result + this.field.hashCode();
result = 31 * result + Objects.hashCode(this.ignoreUnmapped);
result = 31 * result + this.location.hashCode();
+ result = 31 * result + Objects.hashCode(this.unit);
result = 31 * result + Objects.hashCode(this.validationMethod);
return result;
}
@@ -384,6 +423,7 @@ public boolean equals(Object o) {
&& this.field.equals(other.field)
&& Objects.equals(this.ignoreUnmapped, other.ignoreUnmapped)
&& this.location.equals(other.location)
+ && Objects.equals(this.unit, other.unit)
&& Objects.equals(this.validationMethod, other.validationMethod);
}
}
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/HybridQuery.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/HybridQuery.java
index 97fa40eccb..f540c00fea 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/HybridQuery.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/HybridQuery.java
@@ -59,6 +59,9 @@
@Generated("org.opensearch.client.codegen.CodeGenerator")
public class HybridQuery extends QueryBase implements QueryVariant, ToCopyableBuilder {
+ @Nullable
+ private final Query filter;
+
@Nullable
private final Integer paginationDepth;
@@ -69,6 +72,7 @@ public class HybridQuery extends QueryBase implements QueryVariant, ToCopyableBu
private HybridQuery(Builder builder) {
super(builder);
+ this.filter = builder.filter;
this.paginationDepth = builder.paginationDepth;
this.queries = ApiTypeHelper.unmodifiable(builder.queries);
}
@@ -85,6 +89,14 @@ public Query.Kind _queryKind() {
return Query.Kind.Hybrid;
}
+ /**
+ * API name: {@code filter}
+ */
+ @Nullable
+ public final Query filter() {
+ return this.filter;
+ }
+
/**
* API name: {@code pagination_depth}
*/
@@ -103,6 +115,11 @@ public final List queries() {
protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
super.serializeInternal(generator, mapper);
+ if (this.filter != null) {
+ generator.writeKey("filter");
+ this.filter.serialize(generator, mapper);
+ }
+
if (this.paginationDepth != null) {
generator.writeKey("pagination_depth");
generator.write(this.paginationDepth);
@@ -135,6 +152,8 @@ public static Builder builder() {
* Builder for {@link HybridQuery}.
*/
public static class Builder extends QueryBase.AbstractBuilder implements CopyableBuilder {
+ @Nullable
+ private Query filter;
@Nullable
private Integer paginationDepth;
@Nullable
@@ -144,12 +163,14 @@ public Builder() {}
private Builder(HybridQuery o) {
super(o);
+ this.filter = o.filter;
this.paginationDepth = o.paginationDepth;
this.queries = _listCopy(o.queries);
}
private Builder(Builder o) {
super(o);
+ this.filter = o.filter;
this.paginationDepth = o.paginationDepth;
this.queries = _listCopy(o.queries);
}
@@ -166,6 +187,23 @@ protected Builder self() {
return this;
}
+ /**
+ * API name: {@code filter}
+ */
+ @Nonnull
+ public final Builder filter(@Nullable Query value) {
+ this.filter = value;
+ return this;
+ }
+
+ /**
+ * API name: {@code filter}
+ */
+ @Nonnull
+ public final Builder filter(Function> fn) {
+ return filter(fn.apply(new Query.Builder()).build());
+ }
+
/**
* API name: {@code pagination_depth}
*/
@@ -239,6 +277,7 @@ public HybridQuery build() {
protected static void setupHybridQueryDeserializer(ObjectDeserializer op) {
setupQueryBaseDeserializer(op);
+ op.add(Builder::filter, Query._DESERIALIZER, "filter");
op.add(Builder::paginationDepth, JsonpDeserializer.integerDeserializer(), "pagination_depth");
op.add(Builder::queries, JsonpDeserializer.arrayDeserializer(Query._DESERIALIZER), "queries");
}
@@ -246,6 +285,7 @@ protected static void setupHybridQueryDeserializer(ObjectDeserializer, QueryVariant, PlainJsonSerializable {
- /**
- * {@link PinnedQuery} variant kinds.
- */
- public enum Kind implements JsonEnum {
- Docs("docs"),
- Ids("ids");
-
- private final String jsonValue;
-
- Kind(String jsonValue) {
- this.jsonValue = jsonValue;
- }
-
- @Override
- public String jsonValue() {
- return jsonValue;
- }
- }
-
- /**
- * {@link Query} variant kind.
- */
- @Override
- public Query.Kind _queryKind() {
- return Query.Kind.Pinned;
- }
-
- private final Kind _kind;
- private final Object _value;
-
- @Override
- public final Kind _kind() {
- return _kind;
- }
-
- @Override
- public final Object _get() {
- return _value;
- }
-
- @Nonnull
- private final Query organic;
-
- private PinnedQuery(Builder builder) {
- super(builder);
- this.organic = ApiTypeHelper.requireNonNull(builder.organic, this, "organic");
- this._kind = ApiTypeHelper.requireNonNull(builder._kind, builder, "");
- this._value = ApiTypeHelper.requireNonNull(builder._value, builder, "");
- }
-
- public static PinnedQuery of(Function> fn) {
- return fn.apply(new Builder()).build();
- }
-
- /**
- * Required - API name: {@code organic}
- */
- @Nonnull
- public final Query organic() {
- return this.organic;
- }
-
- /**
- * Is this variant instance of kind {@code docs}?
- */
- public boolean isDocs() {
- return _kind == Kind.Docs;
- }
-
- /**
- * Get the {@code docs} variant value.
- *
- * @throws IllegalStateException if the current variant is not the {@code docs} kind.
- */
- public List docs() {
- return TaggedUnionUtils.get(this, Kind.Docs);
- }
-
- /**
- * Is this variant instance of kind {@code ids}?
- */
- public boolean isIds() {
- return _kind == Kind.Ids;
- }
-
- /**
- * Get the {@code ids} variant value.
- *
- * @throws IllegalStateException if the current variant is not the {@code ids} kind.
- */
- public List ids() {
- return TaggedUnionUtils.get(this, Kind.Ids);
- }
-
- @Override
- public void serialize(JsonGenerator generator, JsonpMapper mapper) {
- generator.writeStartObject();
- super.serializeInternal(generator, mapper);
- generator.writeKey("organic");
- this.organic.serialize(generator, mapper);
- generator.writeKey(_kind.jsonValue());
- if (_value instanceof JsonpSerializable) {
- ((JsonpSerializable) _value).serialize(generator, mapper);
- } else {
- switch (_kind) {
- case Docs:
- generator.writeStartArray();
- for (PinnedDoc item0 : ((List) this._value)) {
- item0.serialize(generator, mapper);
- }
- generator.writeEnd();
- break;
- case Ids:
- generator.writeStartArray();
- for (String item0 : ((List) this._value)) {
- generator.write(item0);
- }
- generator.writeEnd();
- break;
- }
- }
- generator.writeEnd();
- }
-
- @Nonnull
- public Builder toBuilder() {
- return new Builder(this);
- }
-
- @Nonnull
- public static Builder builder() {
- return new Builder();
- }
-
- public static class Builder extends QueryBase.AbstractBuilder {
- private Kind _kind;
- private Object _value;
- private Query organic;
-
- public Builder() {}
-
- private Builder(PinnedQuery o) {
- super(o);
- this.organic = o.organic;
- this._kind = o._kind;
- this._value = o._value;
- }
-
- @Override
- @Nonnull
- protected Builder self() {
- return this;
- }
-
- /**
- * Required - API name: {@code organic}
- */
- @Nonnull
- public final Builder organic(Query value) {
- this.organic = value;
- return this;
- }
-
- /**
- * Required - API name: {@code organic}
- */
- @Nonnull
- public final Builder organic(Function> fn) {
- return organic(fn.apply(new Query.Builder()).build());
- }
-
- public ContainerBuilder docs(List v) {
- this._kind = Kind.Docs;
- this._value = v;
- return new ContainerBuilder();
- }
-
- public ContainerBuilder ids(List v) {
- this._kind = Kind.Ids;
- this._value = v;
- return new ContainerBuilder();
- }
-
- protected PinnedQuery build() {
- _checkSingleUse();
- return new PinnedQuery(this);
- }
-
- public class ContainerBuilder implements ObjectBuilder {
- private ContainerBuilder() {}
-
- /**
- * Required - API name: {@code organic}
- */
- @Nonnull
- public final ContainerBuilder organic(Query value) {
- Builder.this.organic = value;
- return this;
- }
-
- /**
- * Required - API name: {@code organic}
- */
- @Nonnull
- public final ContainerBuilder organic(Function> fn) {
- return organic(fn.apply(new Query.Builder()).build());
- }
-
- @Override
- public PinnedQuery build() {
- return Builder.this.build();
- }
- }
- }
-
- protected static void setupPinnedQueryDeserializer(ObjectDeserializer op) {
- setupQueryBaseDeserializer(op);
- op.add(Builder::organic, Query._DESERIALIZER, "organic");
- op.add(Builder::docs, JsonpDeserializer.arrayDeserializer(PinnedDoc._DESERIALIZER), "docs");
- op.add(Builder::ids, JsonpDeserializer.arrayDeserializer(JsonpDeserializer.stringDeserializer()), "ids");
- }
-
- public static final JsonpDeserializer _DESERIALIZER = ObjectBuilderDeserializer.lazy(
- Builder::new,
- PinnedQuery::setupPinnedQueryDeserializer,
- Builder::build
- );
-
- @Override
- public int hashCode() {
- int result = super.hashCode();
- result = 31 * result + Objects.hashCode(this._kind);
- result = 31 * result + Objects.hashCode(this._value);
- result = 31 * result + this.organic.hashCode();
- return result;
- }
-
- @Override
- public boolean equals(Object o) {
- if (!super.equals(o)) {
- return false;
- }
- if (this == o) return true;
- if (o == null || this.getClass() != o.getClass()) return false;
- PinnedQuery other = (PinnedQuery) o;
- return Objects.equals(this._kind, other._kind) && Objects.equals(this._value, other._value) && this.organic.equals(other.organic);
- }
-}
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/PinnedQueryVariant.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/PinnedQueryVariant.java
deleted file mode 100644
index 64724043e6..0000000000
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/PinnedQueryVariant.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- *
- * The OpenSearch Contributors require contributions made to
- * this file be licensed under the Apache-2.0 license or a
- * compatible open source license.
- */
-
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/*
- * Modifications Copyright OpenSearch Contributors. See
- * GitHub history for details.
- */
-
-//----------------------------------------------------
-// THIS CODE IS GENERATED. MANUAL EDITS WILL BE LOST.
-//----------------------------------------------------
-
-package org.opensearch.client.opensearch._types.query_dsl;
-
-import javax.annotation.Generated;
-
-/**
- * Base interface for {@link PinnedQuery} variants.
- */
-@Generated("org.opensearch.client.codegen.CodeGenerator")
-public interface PinnedQueryVariant {
- PinnedQuery.Kind _pinnedQueryKind();
-}
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/Query.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/Query.java
index 296d9c7f02..6186db7e45 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/Query.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/Query.java
@@ -101,7 +101,6 @@ public enum Kind implements JsonEnum {
Neural("neural"),
ParentId("parent_id"),
Percolate("percolate"),
- Pinned("pinned"),
Prefix("prefix"),
QueryString("query_string"),
Range("range"),
@@ -702,22 +701,6 @@ public PercolateQuery percolate() {
return TaggedUnionUtils.get(this, Kind.Percolate);
}
- /**
- * Is this variant instance of kind {@code pinned}?
- */
- public boolean isPinned() {
- return _kind == Kind.Pinned;
- }
-
- /**
- * Get the {@code pinned} variant value.
- *
- * @throws IllegalStateException if the current variant is not the {@code pinned} kind.
- */
- public PinnedQuery pinned() {
- return TaggedUnionUtils.get(this, Kind.Pinned);
- }
-
/**
* Is this variant instance of kind {@code prefix}?
*/
@@ -1474,16 +1457,6 @@ public ObjectBuilder percolate(Function pinned(PinnedQuery v) {
- this._kind = Kind.Pinned;
- this._value = v;
- return this;
- }
-
- public ObjectBuilder pinned(Function> fn) {
- return this.pinned(fn.apply(new PinnedQuery.Builder()).build());
- }
-
public ObjectBuilder prefix(PrefixQuery v) {
this._kind = Kind.Prefix;
this._value = v;
@@ -1761,7 +1734,6 @@ protected static void setupQueryDeserializer(ObjectDeserializer op) {
op.add(Builder::neural, NeuralQuery._DESERIALIZER, "neural");
op.add(Builder::parentId, ParentIdQuery._DESERIALIZER, "parent_id");
op.add(Builder::percolate, PercolateQuery._DESERIALIZER, "percolate");
- op.add(Builder::pinned, PinnedQuery._DESERIALIZER, "pinned");
op.add(Builder::prefix, PrefixQuery._DESERIALIZER, "prefix");
op.add(Builder::queryString, QueryStringQuery._DESERIALIZER, "query_string");
op.add(Builder::range, RangeQuery._DESERIALIZER, "range");
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/QueryBuilders.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/QueryBuilders.java
index 0950108aeb..ae3c17b27d 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/QueryBuilders.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/QueryBuilders.java
@@ -281,13 +281,6 @@ public static PercolateQuery.Builder percolate() {
return new PercolateQuery.Builder();
}
- /**
- * Creates a builder for the {@link PinnedQuery pinned} {@code Query} variant.
- */
- public static PinnedQuery.Builder pinned() {
- return new PinnedQuery.Builder();
- }
-
/**
* Creates a builder for the {@link PrefixQuery prefix} {@code Query} variant.
*/
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/RangeQuery.java b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/RangeQuery.java
index e8e6cb6aa8..5394f9f086 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/RangeQuery.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/_types/query_dsl/RangeQuery.java
@@ -79,6 +79,12 @@ public class RangeQuery implements QueryVariant, PlainJsonSerializable, ToCopyab
@Nullable
private final JsonData gte;
+ @Nullable
+ private final Boolean includeLower;
+
+ @Nullable
+ private final Boolean includeUpper;
+
@Nullable
private final JsonData lt;
@@ -106,6 +112,8 @@ private RangeQuery(Builder builder) {
this.from = builder.from;
this.gt = builder.gt;
this.gte = builder.gte;
+ this.includeLower = builder.includeLower;
+ this.includeUpper = builder.includeUpper;
this.lt = builder.lt;
this.lte = builder.lte;
this.name = builder.name;
@@ -174,6 +182,22 @@ public final JsonData gte() {
return this.gte;
}
+ /**
+ * API name: {@code include_lower}
+ */
+ @Nullable
+ public final Boolean includeLower() {
+ return this.includeLower;
+ }
+
+ /**
+ * API name: {@code include_upper}
+ */
+ @Nullable
+ public final Boolean includeUpper() {
+ return this.includeUpper;
+ }
+
/**
* API name: {@code lt}
*/
@@ -259,6 +283,16 @@ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
this.gte.serialize(generator, mapper);
}
+ if (this.includeLower != null) {
+ generator.writeKey("include_lower");
+ generator.write(this.includeLower);
+ }
+
+ if (this.includeUpper != null) {
+ generator.writeKey("include_upper");
+ generator.write(this.includeUpper);
+ }
+
if (this.lt != null) {
generator.writeKey("lt");
this.lt.serialize(generator, mapper);
@@ -320,6 +354,10 @@ public static class Builder extends ObjectBuilderBase implements CopyableBuilder
@Nullable
private JsonData gte;
@Nullable
+ private Boolean includeLower;
+ @Nullable
+ private Boolean includeUpper;
+ @Nullable
private JsonData lt;
@Nullable
private JsonData lte;
@@ -341,6 +379,8 @@ private Builder(RangeQuery o) {
this.from = o.from;
this.gt = o.gt;
this.gte = o.gte;
+ this.includeLower = o.includeLower;
+ this.includeUpper = o.includeUpper;
this.lt = o.lt;
this.lte = o.lte;
this.name = o.name;
@@ -356,6 +396,8 @@ private Builder(Builder o) {
this.from = o.from;
this.gt = o.gt;
this.gte = o.gte;
+ this.includeLower = o.includeLower;
+ this.includeUpper = o.includeUpper;
this.lt = o.lt;
this.lte = o.lte;
this.name = o.name;
@@ -424,6 +466,24 @@ public final Builder gte(@Nullable JsonData value) {
return this;
}
+ /**
+ * API name: {@code include_lower}
+ */
+ @Nonnull
+ public final Builder includeLower(@Nullable Boolean value) {
+ this.includeLower = value;
+ return this;
+ }
+
+ /**
+ * API name: {@code include_upper}
+ */
+ @Nonnull
+ public final Builder includeUpper(@Nullable Boolean value) {
+ this.includeUpper = value;
+ return this;
+ }
+
/**
* API name: {@code lt}
*/
@@ -508,6 +568,8 @@ protected static void setupRangeQueryDeserializer(ObjectDeserializer {
+public class TermsQuery implements QueryVariant, PlainJsonSerializable, ToCopyableBuilder {
+
+ @Nullable
+ private final Float boost;
@Nonnull
private final String field;
+ @Nullable
+ private final String name;
+
@Nonnull
private final TermsQueryField terms;
@@ -70,8 +78,9 @@ public class TermsQuery extends QueryBase implements QueryVariant, ToCopyableBui
// ---------------------------------------------------------------------------------------------
private TermsQuery(Builder builder) {
- super(builder);
+ this.boost = builder.boost;
this.field = ApiTypeHelper.requireNonNull(builder.field, this, "field");
+ this.name = builder.name;
this.terms = ApiTypeHelper.requireNonNull(builder.terms, this, "terms");
this.valueType = builder.valueType;
}
@@ -88,6 +97,14 @@ public Query.Kind _queryKind() {
return Query.Kind.Terms;
}
+ /**
+ * API name: {@code boost}
+ */
+ @Nullable
+ public final Float boost() {
+ return this.boost;
+ }
+
/**
* Required -
*/
@@ -96,6 +113,14 @@ public final String field() {
return this.field;
}
+ /**
+ * API name: {@code _name}
+ */
+ @Nullable
+ public final String name() {
+ return this.name;
+ }
+
/**
* Required -
*/
@@ -116,10 +141,29 @@ public final TermsQueryValueType valueType() {
return this.valueType;
}
+ /**
+ * Serialize this object to JSON.
+ */
+ @Override
+ public void serialize(JsonGenerator generator, JsonpMapper mapper) {
+ generator.writeStartObject();
+ serializeInternal(generator, mapper);
+ generator.writeEnd();
+ }
+
protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
- super.serializeInternal(generator, mapper);
generator.writeKey(this.field);
this.terms.serialize(generator, mapper);
+ if (this.boost != null) {
+ generator.writeKey("boost");
+ generator.write(this.boost);
+ }
+
+ if (this.name != null) {
+ generator.writeKey("_name");
+ generator.write(this.name);
+ }
+
if (this.valueType != null) {
generator.writeKey("value_type");
this.valueType.serialize(generator, mapper);
@@ -142,8 +186,12 @@ public static Builder builder() {
/**
* Builder for {@link TermsQuery}.
*/
- public static class Builder extends QueryBase.AbstractBuilder implements CopyableBuilder {
+ public static class Builder extends ObjectBuilderBase implements CopyableBuilder {
+ @Nullable
+ private Float boost;
private String field;
+ @Nullable
+ private String name;
private TermsQueryField terms;
@Nullable
private TermsQueryValueType valueType;
@@ -151,15 +199,17 @@ public static class Builder extends QueryBase.AbstractBuilder implement
public Builder() {}
private Builder(TermsQuery o) {
- super(o);
+ this.boost = o.boost;
this.field = o.field;
+ this.name = o.name;
this.terms = o.terms;
this.valueType = o.valueType;
}
private Builder(Builder o) {
- super(o);
+ this.boost = o.boost;
this.field = o.field;
+ this.name = o.name;
this.terms = o.terms;
this.valueType = o.valueType;
}
@@ -170,9 +220,12 @@ public Builder copy() {
return new Builder(this);
}
- @Override
+ /**
+ * API name: {@code boost}
+ */
@Nonnull
- protected Builder self() {
+ public final Builder boost(@Nullable Float value) {
+ this.boost = value;
return this;
}
@@ -185,6 +238,15 @@ public final Builder field(String value) {
return this;
}
+ /**
+ * API name: {@code _name}
+ */
+ @Nonnull
+ public final Builder name(@Nullable String value) {
+ this.name = value;
+ return this;
+ }
+
/**
* Required -
*/
@@ -240,7 +302,8 @@ public TermsQuery build() {
);
protected static void setupTermsQueryDeserializer(ObjectDeserializer op) {
- setupQueryBaseDeserializer(op);
+ op.add(Builder::boost, JsonpDeserializer.floatDeserializer(), "boost");
+ op.add(Builder::name, JsonpDeserializer.stringDeserializer(), "_name");
op.add(Builder::valueType, TermsQueryValueType._DESERIALIZER, "value_type");
op.setUnknownFieldHandler((builder, name, parser, mapper) -> {
builder.field(name);
@@ -250,8 +313,10 @@ protected static void setupTermsQueryDeserializer(ObjectDeserializer derived;
+
@Nullable
private final String df;
@@ -215,6 +219,9 @@ public final class SearchRequest extends RequestBase
@Nonnull
private final List searchAfter;
+ @Nullable
+ private final String searchPipeline;
+
@Nullable
private final SearchType searchType;
@@ -274,6 +281,7 @@ private SearchRequest(Builder builder) {
this.ccsMinimizeRoundtrips = builder.ccsMinimizeRoundtrips;
this.collapse = builder.collapse;
this.defaultOperator = builder.defaultOperator;
+ this.derived = ApiTypeHelper.unmodifiable(builder.derived);
this.df = builder.df;
this.docvalueFields = ApiTypeHelper.unmodifiable(builder.docvalueFields);
this.expandWildcards = ApiTypeHelper.unmodifiable(builder.expandWildcards);
@@ -305,6 +313,7 @@ private SearchRequest(Builder builder) {
this.scriptFields = ApiTypeHelper.unmodifiable(builder.scriptFields);
this.scroll = builder.scroll;
this.searchAfter = ApiTypeHelper.unmodifiable(builder.searchAfter);
+ this.searchPipeline = builder.searchPipeline;
this.searchType = builder.searchType;
this.seqNoPrimaryTerm = builder.seqNoPrimaryTerm;
this.size = builder.size;
@@ -441,6 +450,14 @@ public final Operator defaultOperator() {
return this.defaultOperator;
}
+ /**
+ * API name: {@code derived}
+ */
+ @Nonnull
+ public final Map derived() {
+ return this.derived;
+ }
+
/**
* Field to use as default where no field prefix is given in the query string. This parameter can only be used when the q query string
* parameter is specified.
@@ -554,8 +571,7 @@ public final Boolean ignoreUnavailable() {
}
/**
- * Indicates whether hit.matched_queries should be rendered as a map that includes the name of the matched query associated
- * with its score (true) or as an array containing the name of the matched queries (false)
+ * Whether to return scores with named queries. Default is false.
*
* API name: {@code include_named_queries_score}
*
@@ -796,6 +812,17 @@ public final List searchAfter() {
return this.searchAfter;
}
+ /**
+ * Customizable sequence of processing stages applied to search queries.
+ *
+ * API name: {@code search_pipeline}
+ *
+ */
+ @Nullable
+ public final String searchPipeline() {
+ return this.searchPipeline;
+ }
+
/**
* How distributed term frequencies are calculated for relevance scoring.
*
@@ -928,10 +955,7 @@ public final TrackHits trackTotalHits() {
}
/**
- * Enables or disables verbose mode for the search pipeline. When verbose mode is enabled, detailed information about each processor in
- * the search pipeline is included in the search response. This includes the processor name, execution status, input, output, and time
- * taken for processing. This parameter is primarily intended for debugging purposes, allowing users to track how data flows and
- * transforms through the search pipeline.
+ * Enables or disables verbose mode for the search pipeline.
*
* API name: {@code verbose_pipeline}
*
@@ -978,6 +1002,16 @@ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
this.collapse.serialize(generator, mapper);
}
+ if (ApiTypeHelper.isDefined(this.derived)) {
+ generator.writeKey("derived");
+ generator.writeStartObject();
+ for (Map.Entry item0 : this.derived.entrySet()) {
+ generator.writeKey(item0.getKey());
+ item0.getValue().serialize(generator, mapper);
+ }
+ generator.writeEnd();
+ }
+
if (ApiTypeHelper.isDefined(this.docvalueFields)) {
generator.writeKey("docvalue_fields");
generator.writeStartArray();
@@ -1021,6 +1055,11 @@ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
this.highlight.serialize(generator, mapper);
}
+ if (this.includeNamedQueriesScore != null) {
+ generator.writeKey("include_named_queries_score");
+ generator.write(this.includeNamedQueriesScore);
+ }
+
if (ApiTypeHelper.isDefined(this.indicesBoost)) {
generator.writeKey("indices_boost");
generator.writeStartArray();
@@ -1090,6 +1129,11 @@ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
generator.writeEnd();
}
+ if (this.searchPipeline != null) {
+ generator.writeKey("search_pipeline");
+ generator.write(this.searchPipeline);
+ }
+
if (this.seqNoPrimaryTerm != null) {
generator.writeKey("seq_no_primary_term");
generator.write(this.seqNoPrimaryTerm);
@@ -1162,6 +1206,11 @@ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
this.trackTotalHits.serialize(generator, mapper);
}
+ if (this.verbosePipeline != null) {
+ generator.writeKey("verbose_pipeline");
+ generator.write(this.verbosePipeline);
+ }
+
if (this.version != null) {
generator.writeKey("version");
generator.write(this.version);
@@ -1206,6 +1255,8 @@ public static class Builder extends RequestBase.AbstractBuilder impleme
@Nullable
private Operator defaultOperator;
@Nullable
+ private Map derived;
+ @Nullable
private String df;
@Nullable
private List docvalueFields;
@@ -1268,6 +1319,8 @@ public static class Builder extends RequestBase.AbstractBuilder impleme
@Nullable
private List searchAfter;
@Nullable
+ private String searchPipeline;
+ @Nullable
private SearchType searchType;
@Nullable
private Boolean seqNoPrimaryTerm;
@@ -1312,6 +1365,7 @@ private Builder(SearchRequest o) {
this.ccsMinimizeRoundtrips = o.ccsMinimizeRoundtrips;
this.collapse = o.collapse;
this.defaultOperator = o.defaultOperator;
+ this.derived = _mapCopy(o.derived);
this.df = o.df;
this.docvalueFields = _listCopy(o.docvalueFields);
this.expandWildcards = _listCopy(o.expandWildcards);
@@ -1343,6 +1397,7 @@ private Builder(SearchRequest o) {
this.scriptFields = _mapCopy(o.scriptFields);
this.scroll = o.scroll;
this.searchAfter = _listCopy(o.searchAfter);
+ this.searchPipeline = o.searchPipeline;
this.searchType = o.searchType;
this.seqNoPrimaryTerm = o.seqNoPrimaryTerm;
this.size = o.size;
@@ -1372,6 +1427,7 @@ private Builder(Builder o) {
this.ccsMinimizeRoundtrips = o.ccsMinimizeRoundtrips;
this.collapse = o.collapse;
this.defaultOperator = o.defaultOperator;
+ this.derived = _mapCopy(o.derived);
this.df = o.df;
this.docvalueFields = _listCopy(o.docvalueFields);
this.expandWildcards = _listCopy(o.expandWildcards);
@@ -1403,6 +1459,7 @@ private Builder(Builder o) {
this.scriptFields = _mapCopy(o.scriptFields);
this.scroll = o.scroll;
this.searchAfter = _listCopy(o.searchAfter);
+ this.searchPipeline = o.searchPipeline;
this.searchType = o.searchType;
this.seqNoPrimaryTerm = o.seqNoPrimaryTerm;
this.size = o.size;
@@ -1613,6 +1670,44 @@ public final Builder defaultOperator(@Nullable Operator value) {
return this;
}
+ /**
+ * API name: {@code derived}
+ *
+ *
+ * Adds all elements of map to derived.
+ *
+ */
+ @Nonnull
+ public final Builder derived(Map map) {
+ this.derived = _mapPutAll(this.derived, map);
+ return this;
+ }
+
+ /**
+ * API name: {@code derived}
+ *
+ *
+ * Adds an entry to derived.
+ *
+ */
+ @Nonnull
+ public final Builder derived(String key, DerivedField value) {
+ this.derived = _mapPut(this.derived, key, value);
+ return this;
+ }
+
+ /**
+ * API name: {@code derived}
+ *
+ *
+ * Adds a value to derived using a builder lambda.
+ *
+ */
+ @Nonnull
+ public final Builder derived(String key, Function> fn) {
+ return derived(key, fn.apply(new DerivedField.Builder()).build());
+ }
+
/**
* Field to use as default where no field prefix is given in the query string. This parameter can only be used when the q query
* string parameter is specified.
@@ -1859,8 +1954,7 @@ public final Builder ignoreUnavailable(@Nullable Boolean value) {
}
/**
- * Indicates whether hit.matched_queries should be rendered as a map that includes the name of the matched query
- * associated with its score (true) or as an array containing the name of the matched queries (false)
+ * Whether to return scores with named queries. Default is false.
*
* API name: {@code include_named_queries_score}
*
@@ -2320,6 +2414,18 @@ public final Builder searchAfter(Function
+ * API name: {@code search_pipeline}
+ *
+ */
+ @Nonnull
+ public final Builder searchPipeline(@Nullable String value) {
+ this.searchPipeline = value;
+ return this;
+ }
+
/**
* How distributed term frequencies are calculated for relevance scoring.
*
@@ -2565,10 +2671,7 @@ public final Builder trackTotalHits(Function
* API name: {@code verbose_pipeline}
*
@@ -2618,12 +2721,14 @@ public SearchRequest build() {
protected static void setupSearchRequestDeserializer(ObjectDeserializer op) {
op.add(Builder::aggregations, JsonpDeserializer.stringMapDeserializer(Aggregation._DESERIALIZER), "aggregations", "aggs");
op.add(Builder::collapse, FieldCollapse._DESERIALIZER, "collapse");
+ op.add(Builder::derived, JsonpDeserializer.stringMapDeserializer(DerivedField._DESERIALIZER), "derived");
op.add(Builder::docvalueFields, JsonpDeserializer.arrayDeserializer(FieldAndFormat._DESERIALIZER), "docvalue_fields");
op.add(Builder::explain, JsonpDeserializer.booleanDeserializer(), "explain");
op.add(Builder::ext, JsonpDeserializer.stringMapDeserializer(JsonData._DESERIALIZER), "ext");
op.add(Builder::fields, JsonpDeserializer.arrayDeserializer(FieldAndFormat._DESERIALIZER), "fields");
op.add(Builder::from, JsonpDeserializer.integerDeserializer(), "from");
op.add(Builder::highlight, Highlight._DESERIALIZER, "highlight");
+ op.add(Builder::includeNamedQueriesScore, JsonpDeserializer.booleanDeserializer(), "include_named_queries_score");
op.add(
Builder::indicesBoost,
JsonpDeserializer.arrayDeserializer(JsonpDeserializer.stringMapDeserializer(JsonpDeserializer.floatDeserializer())),
@@ -2637,6 +2742,7 @@ protected static void setupSearchRequestDeserializer(ObjectDeserializer params) {
if (this.ignoreUnavailable != null) {
params.put("ignore_unavailable", String.valueOf(this.ignoreUnavailable));
}
- if (this.includeNamedQueriesScore != null) {
- params.put("include_named_queries_score", String.valueOf(this.includeNamedQueriesScore));
- }
if (this.lenient != null) {
params.put("lenient", String.valueOf(this.lenient));
}
@@ -2730,9 +2834,6 @@ protected void applyQueryParameters(@Nonnull Map params) {
if (this.searchType != null) {
params.put("search_type", this.searchType.jsonValue());
}
- if (this.verbosePipeline != null) {
- params.put("verbose_pipeline", String.valueOf(this.verbosePipeline));
- }
}
/**
@@ -2795,6 +2896,7 @@ public int hashCode() {
result = 31 * result + Objects.hashCode(this.ccsMinimizeRoundtrips);
result = 31 * result + Objects.hashCode(this.collapse);
result = 31 * result + Objects.hashCode(this.defaultOperator);
+ result = 31 * result + Objects.hashCode(this.derived);
result = 31 * result + Objects.hashCode(this.df);
result = 31 * result + Objects.hashCode(this.docvalueFields);
result = 31 * result + Objects.hashCode(this.expandWildcards);
@@ -2826,6 +2928,7 @@ public int hashCode() {
result = 31 * result + Objects.hashCode(this.scriptFields);
result = 31 * result + Objects.hashCode(this.scroll);
result = 31 * result + Objects.hashCode(this.searchAfter);
+ result = 31 * result + Objects.hashCode(this.searchPipeline);
result = 31 * result + Objects.hashCode(this.searchType);
result = 31 * result + Objects.hashCode(this.seqNoPrimaryTerm);
result = 31 * result + Objects.hashCode(this.size);
@@ -2859,6 +2962,7 @@ public boolean equals(Object o) {
&& Objects.equals(this.ccsMinimizeRoundtrips, other.ccsMinimizeRoundtrips)
&& Objects.equals(this.collapse, other.collapse)
&& Objects.equals(this.defaultOperator, other.defaultOperator)
+ && Objects.equals(this.derived, other.derived)
&& Objects.equals(this.df, other.df)
&& Objects.equals(this.docvalueFields, other.docvalueFields)
&& Objects.equals(this.expandWildcards, other.expandWildcards)
@@ -2890,6 +2994,7 @@ public boolean equals(Object o) {
&& Objects.equals(this.scriptFields, other.scriptFields)
&& Objects.equals(this.scroll, other.scroll)
&& Objects.equals(this.searchAfter, other.searchAfter)
+ && Objects.equals(this.searchPipeline, other.searchPipeline)
&& Objects.equals(this.searchType, other.searchType)
&& Objects.equals(this.seqNoPrimaryTerm, other.seqNoPrimaryTerm)
&& Objects.equals(this.size, other.size)
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/core/explain/Explanation.java b/java-client/src/generated/java/org/opensearch/client/opensearch/core/explain/Explanation.java
index cf72619490..a9935e3be0 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/core/explain/Explanation.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/core/explain/Explanation.java
@@ -67,8 +67,7 @@ public class Explanation implements PlainJsonSerializable, ToCopyableBuilder details;
- @Nonnull
- private final Number value;
+ private final float value;
// ---------------------------------------------------------------------------------------------
@@ -101,8 +100,7 @@ public final List details() {
/**
* Required - API name: {@code value}
*/
- @Nonnull
- public final Number value() {
+ public final float value() {
return this.value;
}
@@ -130,7 +128,7 @@ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
}
generator.writeKey("value");
- generator.write(this.value.doubleValue());
+ generator.write(this.value);
}
// ---------------------------------------------------------------------------------------------
@@ -153,7 +151,7 @@ public static class Builder extends ObjectBuilderBase implements CopyableBuilder
private String description;
@Nullable
private List details;
- private Number value;
+ private Float value;
public Builder() {}
@@ -226,7 +224,7 @@ public final Builder details(Function op) {
op.add(Builder::description, JsonpDeserializer.stringDeserializer(), "description");
op.add(Builder::details, JsonpDeserializer.arrayDeserializer(Explanation._DESERIALIZER), "details");
- op.add(Builder::value, JsonpDeserializer.numberDeserializer(), "value");
+ op.add(Builder::value, JsonpDeserializer.floatDeserializer(), "value");
}
@Override
@@ -266,7 +264,7 @@ public int hashCode() {
int result = 17;
result = 31 * result + this.description.hashCode();
result = 31 * result + Objects.hashCode(this.details);
- result = 31 * result + this.value.hashCode();
+ result = 31 * result + Float.hashCode(this.value);
return result;
}
@@ -275,6 +273,6 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || this.getClass() != o.getClass()) return false;
Explanation other = (Explanation) o;
- return this.description.equals(other.description) && Objects.equals(this.details, other.details) && this.value.equals(other.value);
+ return this.description.equals(other.description) && Objects.equals(this.details, other.details) && this.value == other.value;
}
}
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/Highlight.java b/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/Highlight.java
index fded93dddc..19032c066b 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/Highlight.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/Highlight.java
@@ -37,7 +37,7 @@
package org.opensearch.client.opensearch.core.search;
import jakarta.json.stream.JsonGenerator;
-import java.util.Map;
+import java.util.List;
import java.util.Objects;
import java.util.function.Function;
import javax.annotation.Generated;
@@ -63,7 +63,7 @@ public class Highlight extends HighlightBase implements ToCopyableBuilder fields;
+ private final List fields;
// ---------------------------------------------------------------------------------------------
@@ -89,7 +89,7 @@ public final HighlighterEncoder encoder() {
* Required - API name: {@code fields}
*/
@Nonnull
- public final Map fields() {
+ public final List fields() {
return this.fields;
}
@@ -101,10 +101,9 @@ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
}
generator.writeKey("fields");
- generator.writeStartObject();
- for (Map.Entry item0 : this.fields.entrySet()) {
- generator.writeKey(item0.getKey());
- item0.getValue().serialize(generator, mapper);
+ generator.writeStartArray();
+ for (HighlightField item0 : this.fields) {
+ item0.serialize(generator, mapper);
}
generator.writeEnd();
}
@@ -128,20 +127,20 @@ public static Builder builder() {
public static class Builder extends HighlightBase.AbstractBuilder implements CopyableBuilder {
@Nullable
private HighlighterEncoder encoder;
- private Map fields;
+ private List fields;
public Builder() {}
private Builder(Highlight o) {
super(o);
this.encoder = o.encoder;
- this.fields = _mapCopy(o.fields);
+ this.fields = _listCopy(o.fields);
}
private Builder(Builder o) {
super(o);
this.encoder = o.encoder;
- this.fields = _mapCopy(o.fields);
+ this.fields = _listCopy(o.fields);
}
@Override
@@ -169,12 +168,12 @@ public final Builder encoder(@Nullable HighlighterEncoder value) {
* Required - API name: {@code fields}
*
*
- * Adds all elements of map to fields.
+ * Adds all elements of list to fields.
*
*/
@Nonnull
- public final Builder fields(Map map) {
- this.fields = _mapPutAll(this.fields, map);
+ public final Builder fields(List list) {
+ this.fields = _listAddAll(this.fields, list);
return this;
}
@@ -182,12 +181,12 @@ public final Builder fields(Map map) {
* Required - API name: {@code fields}
*
*
- * Adds an entry to fields.
+ * Adds one or more values to fields.
*
*/
@Nonnull
- public final Builder fields(String key, HighlightField value) {
- this.fields = _mapPut(this.fields, key, value);
+ public final Builder fields(HighlightField value, HighlightField... values) {
+ this.fields = _listAdd(this.fields, value, values);
return this;
}
@@ -199,8 +198,8 @@ public final Builder fields(String key, HighlightField value) {
*
*/
@Nonnull
- public final Builder fields(String key, Function> fn) {
- return fields(key, fn.apply(new HighlightField.Builder()).build());
+ public final Builder fields(Function> fn) {
+ return fields(fn.apply(new HighlightField.Builder()).build());
}
/**
@@ -230,7 +229,7 @@ public Highlight build() {
protected static void setupHighlightDeserializer(ObjectDeserializer op) {
setupHighlightBaseDeserializer(op);
op.add(Builder::encoder, HighlighterEncoder._DESERIALIZER, "encoder");
- op.add(Builder::fields, JsonpDeserializer.stringMapDeserializer(HighlightField._DESERIALIZER), "fields");
+ op.add(Builder::fields, JsonpDeserializer.arrayDeserializer(HighlightField._DESERIALIZER), "fields");
}
@Override
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/HighlightField.java b/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/HighlightField.java
index 56c1febfa1..4bbbbca049 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/HighlightField.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/HighlightField.java
@@ -48,7 +48,6 @@
import org.opensearch.client.json.JsonpMapper;
import org.opensearch.client.json.ObjectBuilderDeserializer;
import org.opensearch.client.json.ObjectDeserializer;
-import org.opensearch.client.opensearch._types.analysis.Analyzer;
import org.opensearch.client.util.ApiTypeHelper;
import org.opensearch.client.util.CopyableBuilder;
import org.opensearch.client.util.ObjectBuilder;
@@ -60,8 +59,8 @@
@Generated("org.opensearch.client.codegen.CodeGenerator")
public class HighlightField extends HighlightBase implements ToCopyableBuilder {
- @Nullable
- private final Analyzer analyzer;
+ @Nonnull
+ private final String key;
@Nonnull
private final List matchedFields;
@@ -70,7 +69,7 @@ public class HighlightField extends HighlightBase implements ToCopyableBuilder matchedFields() {
}
protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
+ generator.writeStartObject(this.key);
super.serializeInternal(generator, mapper);
- if (this.analyzer != null) {
- generator.writeKey("analyzer");
- this.analyzer.serialize(generator, mapper);
- }
-
if (ApiTypeHelper.isDefined(this.matchedFields)) {
generator.writeKey("matched_fields");
generator.writeStartArray();
@@ -109,6 +104,7 @@ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
}
generator.writeEnd();
}
+ generator.writeEnd();
}
// ---------------------------------------------------------------------------------------------
@@ -128,8 +124,7 @@ public static Builder builder() {
* Builder for {@link HighlightField}.
*/
public static class Builder extends HighlightBase.AbstractBuilder implements CopyableBuilder {
- @Nullable
- private Analyzer analyzer;
+ private String key;
@Nullable
private List matchedFields;
@@ -137,13 +132,13 @@ public Builder() {}
private Builder(HighlightField o) {
super(o);
- this.analyzer = o.analyzer;
+ this.key = o.key;
this.matchedFields = _listCopy(o.matchedFields);
}
private Builder(Builder o) {
super(o);
- this.analyzer = o.analyzer;
+ this.key = o.key;
this.matchedFields = _listCopy(o.matchedFields);
}
@@ -160,22 +155,14 @@ protected Builder self() {
}
/**
- * API name: {@code analyzer}
+ * Required - The target key
*/
@Nonnull
- public final Builder analyzer(@Nullable Analyzer value) {
- this.analyzer = value;
+ public final Builder key(String value) {
+ this.key = value;
return this;
}
- /**
- * API name: {@code analyzer}
- */
- @Nonnull
- public final Builder analyzer(Function> fn) {
- return analyzer(fn.apply(new Analyzer.Builder()).build());
- }
-
/**
* API name: {@code matched_fields}
*
@@ -228,14 +215,14 @@ public HighlightField build() {
protected static void setupHighlightFieldDeserializer(ObjectDeserializer op) {
setupHighlightBaseDeserializer(op);
- op.add(Builder::analyzer, Analyzer._DESERIALIZER, "analyzer");
op.add(Builder::matchedFields, JsonpDeserializer.arrayDeserializer(JsonpDeserializer.stringDeserializer()), "matched_fields");
+ op.setKey(Builder::key, JsonpDeserializer.stringDeserializer());
}
@Override
public int hashCode() {
int result = super.hashCode();
- result = 31 * result + Objects.hashCode(this.analyzer);
+ result = 31 * result + this.key.hashCode();
result = 31 * result + Objects.hashCode(this.matchedFields);
return result;
}
@@ -248,6 +235,6 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || this.getClass() != o.getClass()) return false;
HighlightField other = (HighlightField) o;
- return Objects.equals(this.analyzer, other.analyzer) && Objects.equals(this.matchedFields, other.matchedFields);
+ return this.key.equals(other.key) && Objects.equals(this.matchedFields, other.matchedFields);
}
}
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/HighlighterTagsSchema.java b/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/HighlighterTagsSchema.java
index 0fd86d10db..8c4e48166e 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/HighlighterTagsSchema.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/HighlighterTagsSchema.java
@@ -45,6 +45,8 @@
@JsonpDeserializable
@Generated("org.opensearch.client.codegen.CodeGenerator")
public enum HighlighterTagsSchema implements JsonEnum {
+ Default("default"),
+
Styled("styled");
private final String jsonValue;
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/Hit.java b/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/Hit.java
index 91cc460f3f..eb96d6b596 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/Hit.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/Hit.java
@@ -37,6 +37,7 @@
package org.opensearch.client.opensearch.core.search;
import jakarta.json.stream.JsonGenerator;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@@ -92,6 +93,9 @@ public class Hit implements PlainJsonSerializable, ToCopyableBuilder<
@Nonnull
private final List matchedQueries;
+ @Nonnull
+ private final Map metaFields;
+
@Nullable
private final NestedIdentity nested;
@@ -137,6 +141,7 @@ private Hit(Builder builder) {
this.index = builder.index;
this.innerHits = ApiTypeHelper.unmodifiable(builder.innerHits);
this.matchedQueries = ApiTypeHelper.unmodifiable(builder.matchedQueries);
+ this.metaFields = ApiTypeHelper.unmodifiable(builder.metaFields);
this.nested = builder.nested;
this.node = builder.node;
this.primaryTerm = builder.primaryTerm;
@@ -226,6 +231,14 @@ public final List matchedQueries() {
return this.matchedQueries;
}
+ /**
+ * Contains metadata values for the documents.
+ */
+ @Nonnull
+ public final Map metaFields() {
+ return this.metaFields;
+ }
+
/**
* API name: {@code _nested}
*/
@@ -325,6 +338,10 @@ public void serialize(JsonGenerator generator, JsonpMapper mapper) {
}
protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
+ for (Map.Entry item0 : this.metaFields.entrySet()) {
+ generator.writeKey(item0.getKey());
+ item0.getValue().serialize(generator, mapper);
+ }
if (this.explanation != null) {
generator.writeKey("_explanation");
this.explanation.serialize(generator, mapper);
@@ -501,6 +518,8 @@ public static class Builder extends ObjectBuilderBase implements Copy
@Nullable
private List matchedQueries;
@Nullable
+ private Map metaFields;
+ @Nullable
private NestedIdentity nested;
@Nullable
private String node;
@@ -535,6 +554,7 @@ private Builder(Hit o) {
this.index = o.index;
this.innerHits = _mapCopy(o.innerHits);
this.matchedQueries = _listCopy(o.matchedQueries);
+ this.metaFields = _mapCopy(o.metaFields);
this.nested = o.nested;
this.node = o.node;
this.primaryTerm = o.primaryTerm;
@@ -558,6 +578,7 @@ private Builder(Builder o) {
this.index = o.index;
this.innerHits = _mapCopy(o.innerHits);
this.matchedQueries = _listCopy(o.matchedQueries);
+ this.metaFields = _mapCopy(o.metaFields);
this.nested = o.nested;
this.node = o.node;
this.primaryTerm = o.primaryTerm;
@@ -780,6 +801,32 @@ public final Builder matchedQueries(String value, String... values) {
return this;
}
+ /**
+ * Contains metadata values for the documents.
+ *
+ *
+ * Adds all elements of map to metaFields.
+ *
+ */
+ @Nonnull
+ public final Builder metaFields(Map map) {
+ this.metaFields = _mapPutAll(this.metaFields, map);
+ return this;
+ }
+
+ /**
+ * Contains metadata values for the documents.
+ *
+ *
+ * Adds an entry to metaFields.
+ *
+ */
+ @Nonnull
+ public final Builder metaFields(String key, JsonData value) {
+ this.metaFields = _mapPut(this.metaFields, key, value);
+ return this;
+ }
+
/**
* API name: {@code _nested}
*/
@@ -973,6 +1020,12 @@ protected static void setupHitDeserializer(
op.add(Builder::sort, JsonpDeserializer.arrayDeserializer(FieldValue._DESERIALIZER), "sort");
op.add(Builder::source, tDocumentDeserializer, "_source");
op.add(Builder::version, JsonpDeserializer.longDeserializer(), "_version");
+ op.setUnknownFieldHandler((builder, name, parser, mapper) -> {
+ if (builder.metaFields == null) {
+ builder.metaFields = new HashMap<>();
+ }
+ builder.metaFields.put(name, JsonData._DESERIALIZER.deserialize(parser, mapper));
+ });
}
@Override
@@ -987,6 +1040,7 @@ public int hashCode() {
result = 31 * result + Objects.hashCode(this.index);
result = 31 * result + Objects.hashCode(this.innerHits);
result = 31 * result + Objects.hashCode(this.matchedQueries);
+ result = 31 * result + Objects.hashCode(this.metaFields);
result = 31 * result + Objects.hashCode(this.nested);
result = 31 * result + Objects.hashCode(this.node);
result = 31 * result + Objects.hashCode(this.primaryTerm);
@@ -1014,6 +1068,7 @@ public boolean equals(Object o) {
&& Objects.equals(this.index, other.index)
&& Objects.equals(this.innerHits, other.innerHits)
&& Objects.equals(this.matchedQueries, other.matchedQueries)
+ && Objects.equals(this.metaFields, other.metaFields)
&& Objects.equals(this.nested, other.nested)
&& Objects.equals(this.node, other.node)
&& Objects.equals(this.primaryTerm, other.primaryTerm)
diff --git a/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/HitsMetadata.java b/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/HitsMetadata.java
index b87fc64e86..906e302df3 100644
--- a/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/HitsMetadata.java
+++ b/java-client/src/generated/java/org/opensearch/client/opensearch/core/search/HitsMetadata.java
@@ -57,10 +57,13 @@
// typedef: core.search.HitsMetadata
@Generated("org.opensearch.client.codegen.CodeGenerator")
-public class HitsMetadata implements PlainJsonSerializable, ToCopyableBuilder, HitsMetadata> {
+public class HitsMetadata
+ implements
+ PlainJsonSerializable,
+ ToCopyableBuilder, HitsMetadata> {
@Nonnull
- private final List> hits;
+ private final List> hits;
@Nullable
private final Float maxScore;
@@ -70,13 +73,15 @@ public class HitsMetadata implements PlainJsonSerializable, ToCopyableBuilder
// ---------------------------------------------------------------------------------------------
- private HitsMetadata(Builder builder) {
+ private HitsMetadata(Builder builder) {
this.hits = ApiTypeHelper.unmodifiableRequired(builder.hits, this, "hits");
this.maxScore = builder.maxScore;
this.total = builder.total;
}
- public static HitsMetadata of(Function, ObjectBuilder>> fn) {
+ public static HitsMetadata of(
+ Function, ObjectBuilder>> fn
+ ) {
return fn.apply(new Builder<>()).build();
}
@@ -84,7 +89,7 @@ public static HitsMetadata of(Function, ObjectBui
* Required - API name: {@code hits}
*/
@Nonnull
- public final List> hits() {
+ public final List> hits() {
return this.hits;
}
@@ -120,7 +125,7 @@ public void serialize(JsonGenerator generator, JsonpMapper mapper) {
protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
generator.writeKey("hits");
generator.writeStartArray();
- for (Hit item0 : this.hits) {
+ for (Hit item0 : this.hits) {
item0.serialize(generator, mapper);
}
generator.writeEnd();
@@ -140,20 +145,22 @@ protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
@Override
@Nonnull
- public Builder toBuilder() {
+ public Builder toBuilder() {
return new Builder<>(this);
}
@Nonnull
- public static Builder builder() {
+ public static Builder builder() {
return new Builder<>();
}
/**
* Builder for {@link HitsMetadata}.
*/
- public static class Builder extends ObjectBuilderBase implements CopyableBuilder, HitsMetadata> {
- private List> hits;
+ public static class Builder extends ObjectBuilderBase
+ implements
+ CopyableBuilder, HitsMetadata> {
+ private List> hits;
@Nullable
private Float maxScore;
@Nullable
@@ -161,13 +168,13 @@ public static class Builder extends ObjectBuilderBase implements CopyableBuil
public Builder() {}
- private Builder(HitsMetadata o) {
+ private Builder(HitsMetadata o) {
this.hits = _listCopy(o.hits);
this.maxScore = o.maxScore;
this.total = o.total;
}
- private Builder(Builder o) {
+ private Builder(Builder o) {
this.hits = _listCopy(o.hits);
this.maxScore = o.maxScore;
this.total = o.total;
@@ -175,7 +182,7 @@ private Builder(Builder o) {
@Override
@Nonnull
- public Builder copy() {
+ public Builder copy() {
return new Builder<>(this);
}
@@ -187,7 +194,7 @@ public Builder copy() {
*
*/
@Nonnull
- public final Builder hits(List> list) {
+ public final Builder hits(List> list) {
this.hits = _listAddAll(this.hits, list);
return this;
}
@@ -200,7 +207,7 @@ public final Builder hits(List> list) {
*
*/
@Nonnull
- public final Builder hits(Hit value, Hit... values) {
+ public final Builder hits(Hit value, Hit... values) {
this.hits = _listAdd(this.hits, value, values);
return this;
}
@@ -213,15 +220,15 @@ public final Builder hits(Hit value, Hit... values) {
*
*/
@Nonnull
- public final Builder hits(Function, ObjectBuilder>> fn) {
- return hits(fn.apply(new Hit.Builder()).build());
+ public final Builder hits(Function, ObjectBuilder>> fn) {
+ return hits(fn.apply(new Hit.Builder()).build());
}
/**
* API name: {@code max_score}
*/
@Nonnull
- public final Builder maxScore(@Nullable Float value) {
+ public final Builder maxScore(@Nullable Float value) {
this.maxScore = value;
return this;
}
@@ -233,7 +240,7 @@ public final Builder maxScore(@Nullable Float value) {
*
*/
@Nonnull
- public final Builder total(@Nullable TotalHits value) {
+ public final Builder total(@Nullable TotalHits value) {
this.total = value;
return this;
}
@@ -245,7 +252,7 @@ public final Builder total(@Nullable TotalHits value) {
*
*/
@Nonnull
- public final Builder total(Function> fn) {
+ public final Builder total(Function> fn) {
return total(fn.apply(new TotalHits.Builder()).build());
}
@@ -256,7 +263,7 @@ public final Builder total(Function build() {
+ public HitsMetadata build() {
_checkSingleUse();
return new HitsMetadata<>(this);
@@ -268,18 +275,20 @@ public HitsMetadata build() {
/**
* Create a JSON deserializer for HitsMetadata.
*/
- public static